using System;
using System.Collections.Generic;
using Unity.Collections;
using Unity.XR.CoreUtils.Collections;
using UnityEngine.XR.ARSubsystems;
namespace UnityEngine.XR.ARFoundation
{
///
/// Manages the life cycle of the . Add one of these to a Camera in your scene
/// if you want camera texture and light estimation information to be available.
///
///
/// Related information: Camera components
///
[DefaultExecutionOrder(ARUpdateOrder.k_CameraManager)]
[DisallowMultipleComponent]
[RequireComponent(typeof(Camera))]
[AddComponentMenu("XR/AR Foundation/AR Camera Manager")]
[HelpURL("features/camera/camera-components")]
public sealed class ARCameraManager :
SubsystemLifecycleManager,
ISerializationCallbackReceiver
{
static List s_Textures = new();
static List s_PropertyIds = new();
ISwapchainStrategy m_SwapchainStrategy = new NoSwapchainStrategy();
Camera m_Camera;
bool m_PreRenderInvertCullingValue;
IUpdatableTexture m_CameraGrainUpdatableTexture;
///
/// An event which fires each time a new camera frame is received.
///
public event Action frameReceived;
[SerializeField]
[HideInInspector]
CameraFocusMode m_FocusMode = CameraFocusMode.Auto;
[SerializeField]
[HideInInspector]
#pragma warning disable CS0618
// If a user has an old project from 2019 lying around, OnAfterDeserialize will auto-upgrade them to the new API.
LightEstimationMode m_LightEstimationMode = LightEstimationMode.Disabled;
#pragma warning restore CS0618
[SerializeField]
[Tooltip("When enabled, auto focus will be requested on the (physical) AR camera.")]
bool m_AutoFocus = true;
///
/// Get or set whether autofocus is requested.
///
public bool autoFocusRequested
{
get => subsystem?.autoFocusRequested ?? m_AutoFocus;
set
{
m_AutoFocus = value;
if (enabled && subsystem != null)
subsystem.autoFocusRequested = value;
}
}
///
/// Get the current focus mode in use by the subsystem.
///
/// if autofocus is enabled. if fixed focus is enabled
/// or if there is no loaded .
public bool autoFocusEnabled => subsystem?.autoFocusEnabled ?? false;
[SerializeField]
[Tooltip("When enabled, Image Stabilization will be requested on the AR camera.")]
bool m_ImageStabilization = false;
///
/// Get or set whether Image Stabilization is requested.
///
public bool imageStabilizationRequested
{
get => subsystem?.imageStabilizationRequested ?? m_ImageStabilization;
set
{
m_ImageStabilization = value;
if (enabled && subsystem != null)
subsystem.imageStabilizationRequested = value;
}
}
///
/// Get whether Image Stabilization in enabled.
///
/// if EIS is enabled. if EIS is not enabled
/// or if there is no loaded .
public bool imageStabilizationEnabled => subsystem?.imageStabilizationEnabled ?? false;
///
/// Get or set the requested camera torch mode.
///
public XRCameraTorchMode requestedCameraTorchMode
{
get => subsystem?.requestedCameraTorchMode ?? XRCameraTorchMode.Off;
set
{
if (subsystem != null)
subsystem.requestedCameraTorchMode = value;
}
}
///
/// Gets the current camera torch mode.
///
/// The camera torch mode.
public XRCameraTorchMode currentCameraTorchMode
{
get => subsystem?.currentCameraTorchMode ?? XRCameraTorchMode.Off;
}
///
/// Get whether the current session configuration allows the camera torch to be turned on or off.
///
/// if camera torch mode is supported. Otherwise, .
public bool DoesCurrentCameraSupportTorch()
{
if (subsystem == null)
return false;
return subsystem.DoesCurrentCameraSupportTorch();
}
[SerializeField]
[Tooltip("The light estimation mode for the AR camera.")]
LightEstimation m_LightEstimation = LightEstimation.None;
///
/// Get or set the requested for the camera.
///
/// The light estimation mode for the camera.
public LightEstimation requestedLightEstimation
{
get => subsystem?.requestedLightEstimation.ToLightEstimation() ?? m_LightEstimation;
set
{
m_LightEstimation = value;
if (enabled && subsystem != null)
subsystem.requestedLightEstimation = value.ToFeature();
}
}
///
/// Get the current light estimation mode used by the subsystem, or LightEstimation.None
/// if there is no subsystem.
///
public LightEstimation currentLightEstimation => subsystem?.currentLightEstimation.ToLightEstimation() ?? LightEstimation.None;
[SerializeField]
[Tooltip("The requested camera facing direction")]
CameraFacingDirection m_FacingDirection = CameraFacingDirection.World;
///
/// Get or set the requested camera facing direction.
///
public CameraFacingDirection requestedFacingDirection
{
get => subsystem?.requestedCamera.ToCameraFacingDirection() ?? m_FacingDirection;
set
{
m_FacingDirection = value;
if (enabled && subsystem != null)
subsystem.requestedCamera = value.ToFeature();
}
}
///
/// The current camera facing direction. This should usually match
/// but might be different if the platform cannot service the requested camera facing direction, or it might
/// take a few frames for the requested facing direction to become active.
///
public CameraFacingDirection currentFacingDirection => subsystem?.currentCamera.ToCameraFacingDirection() ?? CameraFacingDirection.None;
[SerializeField]
[Tooltip("The requested background rendering mode. Using mode 'Any' allows the platform provider to determine the rendering mode.")]
CameraBackgroundRenderingMode m_RenderMode = CameraBackgroundRenderingMode.Any;
///
/// The current requested . When set, this value is converted to an
/// and passed to
/// if the camera subsystem is non-null.
///
public CameraBackgroundRenderingMode requestedBackgroundRenderingMode
{
get => subsystem?.requestedCameraBackgroundRenderingMode.ToBackgroundRenderingMode() ?? m_RenderMode;
set
{
m_RenderMode = value;
if (enabled && subsystem != null)
subsystem.requestedCameraBackgroundRenderingMode = value.ToXRSupportedCameraBackgroundRenderingMode();
}
}
///
/// The current of the , or
/// if the subsystem is .
///
/// The current camera background rendering mode.
public XRCameraBackgroundRenderingMode currentRenderingMode => subsystem?.currentCameraBackgroundRenderingMode ?? XRCameraBackgroundRenderingMode.None;
///
/// Indicates whether camera permission has been granted.
///
/// if permission has been granted. Otherwise, .
public bool permissionGranted => subsystem is { permissionGranted: true };
///
/// The Material used in background rendering.
///
/// The Material used in background rendering.
public Material cameraMaterial => subsystem?.cameraMaterial;
///
/// Part of the [ISerializationCallbackReceiver](https://docs.unity3d.com/ScriptReference/ISerializationCallbackReceiver.html)
/// interface. Invoked before serialization.
///
public void OnBeforeSerialize() { }
///
/// Part of the [ISerializationCallbackReceiver](https://docs.unity3d.com/ScriptReference/ISerializationCallbackReceiver.html)
/// interface. Invoked after deserialization.
///
public void OnAfterDeserialize()
{
if (m_FocusMode != (CameraFocusMode)(-1))
{
m_AutoFocus = m_FocusMode == CameraFocusMode.Auto;
m_FocusMode = (CameraFocusMode)(-1);
}
#pragma warning disable CS0618
if (m_LightEstimationMode != (LightEstimationMode)(-1))
{
m_LightEstimation = m_LightEstimationMode.ToLightEstimation();
m_LightEstimationMode = (LightEstimationMode)(-1);
}
#pragma warning restore CS0618
}
///
/// Tries to get camera intrinsics. Camera intrinsics refers to properties of a physical camera which might be
/// useful when performing additional computer vision processing on the camera image.
///
/// The camera intrinsics to be populated if the camera supports intrinsics.
///
/// if was populated. Otherwise,
/// .
///
/// > [!NOTE]
/// > The intrinsics may change each frame. You should call this each frame that you need intrinsics
/// > in order to ensure you are using the intrinsics for the current frame.
///
public bool TryGetIntrinsics(out XRCameraIntrinsics cameraIntrinsics)
{
if (subsystem == null)
{
cameraIntrinsics = default;
return false;
}
return subsystem.TryGetIntrinsics(out cameraIntrinsics);
}
///
/// Get the camera configurations currently supported for the implementation.
///
/// The allocation strategy to use for the returned data.
/// The supported camera configurations.
public NativeArray GetConfigurations(Allocator allocator)
=> subsystem?.GetConfigurations(allocator) ?? new NativeArray(0, allocator);
///
/// The current camera configuration.
///
/// The current camera configuration, if it exists. Otherwise, .
/// Thrown when setting the current configuration if the
/// implementation does not support camera configurations.
/// Thrown when setting the current configuration if the given
/// configuration is .
/// Thrown when setting the current configuration if the given
/// configuration is not a supported camera configuration.
/// Thrown when setting the current configuration if the
/// implementation is unable to set the current camera configuration.
public XRCameraConfiguration? currentConfiguration
{
get => subsystem?.currentConfiguration;
set
{
if (subsystem != null)
subsystem.currentConfiguration = value;
}
}
///
/// Attempts to acquire the latest camera image. This provides direct access to the raw pixel data, as well as
/// to utilities to convert to RGB and Grayscale formats.
///
/// A valid `XRCpuImage` if this method returns `true`.
/// if the latest camera image was successfully acquired. Otherwise,
/// .
/// The must be disposed to avoid resource leaks.
public bool TryAcquireLatestCpuImage(out XRCpuImage cpuImage)
{
if (subsystem == null)
{
cpuImage = default;
return false;
}
return subsystem.TryAcquireLatestCpuImage(out cpuImage);
}
void Awake()
{
m_Camera = GetComponent();
}
///
protected override void OnBeforeStart()
{
subsystem.requestedCameraBackgroundRenderingMode = m_RenderMode.ToXRSupportedCameraBackgroundRenderingMode();
subsystem.autoFocusRequested = m_AutoFocus;
subsystem.imageStabilizationRequested = m_ImageStabilization;
subsystem.requestedLightEstimation = m_LightEstimation.ToFeature();
subsystem.requestedCamera = m_FacingDirection.ToFeature();
}
///
protected override void OnDisable()
{
base.OnDisable();
m_SwapchainStrategy.DestroyTextures();
}
void Update()
{
if (subsystem == null)
return;
m_RenderMode = subsystem.requestedCameraBackgroundRenderingMode.ToBackgroundRenderingMode();
m_FacingDirection = subsystem.requestedCamera.ToCameraFacingDirection();
m_LightEstimation = subsystem.requestedLightEstimation.ToLightEstimation();
m_AutoFocus = subsystem.autoFocusRequested;
m_ImageStabilization = subsystem.imageStabilizationRequested;
var cameraParams = new XRCameraParams
{
zNear = m_Camera.nearClipPlane,
zFar = m_Camera.farClipPlane,
screenWidth = Screen.width,
screenHeight = Screen.height,
screenOrientation = Screen.orientation
};
if (!subsystem.TryGetLatestFrame(cameraParams, out XRCameraFrame frame))
return;
if (m_SwapchainStrategy.TryUpdateTexturesForFrame(
subsystem.GetTextureDescriptors(Allocator.Temp), out var textureInfos))
InvokeFrameReceivedEvent(frame, textureInfos);
}
///
/// Invoke the camera frame received event packing the frame information into the event argument.
///
/// The camera frame raising the event.
void InvokeFrameReceivedEvent(XRCameraFrame frame, ReadOnlyListSpan updatableTextures)
{
if (frameReceived == null)
return;
var lightEstimation = new ARLightEstimationData();
if (frame.TryGetAverageBrightness(out var averageBrightness))
lightEstimation.averageBrightness = averageBrightness;
if (frame.TryGetAverageIntensityInLumens(out var averageIntensityInLumens))
lightEstimation.averageIntensityInLumens = averageIntensityInLumens;
if (frame.TryGetAverageColorTemperature(out var averageColorTemperature))
lightEstimation.averageColorTemperature = averageColorTemperature;
if (frame.TryGetColorCorrection(out var colorCorrection))
lightEstimation.colorCorrection = colorCorrection;
if (frame.TryGetMainLightDirection(out var mainLightDirection))
lightEstimation.mainLightDirection = mainLightDirection;
if (frame.TryGetMainLightIntensityLumens(out var mainLightIntensityLumens))
lightEstimation.mainLightIntensityLumens = mainLightIntensityLumens;
if (frame.TryGetMainLightColor(out var mainLightColor))
lightEstimation.mainLightColor = mainLightColor;
if (frame.TryGetAmbientSphericalHarmonics(out var ambientSphericalHarmonics))
lightEstimation.ambientSphericalHarmonics = ambientSphericalHarmonics;
var eventArgs = new ARCameraFrameEventArgs();
eventArgs.lightEstimation = lightEstimation;
if (frame.TryGetTimestamp(out var timestampNs))
eventArgs.timestampNs = timestampNs;
if (frame.TryGetProjectionMatrix(out var projectionMatrix))
eventArgs.projectionMatrix = projectionMatrix;
if (frame.TryGetDisplayMatrix(out var displayMatrix))
eventArgs.displayMatrix = displayMatrix;
if (frame.TryGetExposureDuration(out var exposureDuration))
eventArgs.exposureDuration = exposureDuration;
if (frame.TryGetExposureOffset(out var exposureOffset))
eventArgs.exposureOffset = exposureOffset;
if (frame.TryGetCameraGrain(out var cameraGrain))
{
if (m_CameraGrainUpdatableTexture == null)
{
m_CameraGrainUpdatableTexture = UpdatableTextureFactory.Create(cameraGrain);
}
else
{
// always succeeds for Texture2D
m_CameraGrainUpdatableTexture.TryUpdateFromDescriptor(cameraGrain);
}
eventArgs.cameraGrainTexture = m_CameraGrainUpdatableTexture.texture;
}
if (frame.TryGetNoiseIntensity(out var noiseIntensity))
eventArgs.noiseIntensity = noiseIntensity;
if (frame.TryGetExifData(out XRCameraFrameExifData exifData))
eventArgs.exifData = exifData;
s_Textures.Clear();
s_PropertyIds.Clear();
foreach (var updatableTexture in updatableTextures)
{
DebugAssert.That(updatableTexture.descriptor.textureType == XRTextureType.Texture2D)?.
WithMessage($"Camera Texture needs to be a Texture 2D, but instead is {updatableTexture.descriptor.textureType.ToString()}.");
s_Textures.Add((Texture2D)updatableTexture.texture);
s_PropertyIds.Add(updatableTexture.descriptor.propertyNameId);
}
eventArgs.textures = s_Textures;
eventArgs.propertyNameIds = s_PropertyIds;
eventArgs.shaderKeywords = subsystem.GetShaderKeywords2();
frameReceived(eventArgs);
}
}
}