using System;
using System.Collections.Generic;
namespace UnityEngine.XR.ARSubsystems
{
///
/// A set of flags that represent features available in AR.
///
[Flags]
public enum Feature : ulong
{
///
/// No features are selected.
///
None = 0,
///
/// The world-facing camera. On a phone, this is the rear camera.
///
WorldFacingCamera = 1 << 0,
///
/// The user-facing camera. On a phone, this is the front camera.
///
UserFacingCamera = 1 << 1,
///
/// Either camera ( or ).
///
AnyCamera = WorldFacingCamera | UserFacingCamera,
///
/// Rotation-only tracking (that is, 3 degrees of freedom without positional tracking).
///
RotationOnly = 1 << 2,
///
/// Both position and rotation tracking (that is, 6 degrees of freedom).
///
PositionAndRotation = 1 << 3,
///
/// Any tracking mode ( or ).
///
AnyTrackingMode = RotationOnly | PositionAndRotation,
///
/// Face detection. See .
///
FaceTracking = 1 << 4,
///
/// Plane detection. See .
///
PlaneTracking = 1 << 5,
///
/// Image detection. See .
///
ImageTracking = 1 << 6,
///
/// 3D object detection. See .
///
ObjectTracking = 1 << 7,
///
/// Environment probes. See .
///
EnvironmentProbes = 1 << 8,
///
/// 2D human body tracking. See .
///
Body2D = 1 << 9,
///
/// 3D human body tracking. See .
///
Body3D = 1 << 10,
///
/// Estimate scale when performing 3D human body tracking. See .
///
Body3DScaleEstimation = 1 << 11,
///
/// People occlusion with stencil texture enabled. See .
///
PeopleOcclusionStencil = 1 << 12,
///
/// People occlusion with depth texture enabled. See .
///
PeopleOcclusionDepth = 1 << 13,
///
/// Collaborative session. See .
///
Collaboration = 1 << 14,
///
/// Auto focus enabled.
///
AutoFocus = 1 << 15,
///
/// Light estimation for ambient intensity.
///
LightEstimationAmbientIntensity = 1 << 16,
///
/// Light estimation for ambient color.
///
LightEstimationAmbientColor = 1 << 17,
///
/// Light estimation for ambient spherical harmonics.
///
LightEstimationAmbientSphericalHarmonics = 1 << 18,
///
/// Light estimation for the main light's direction.
///
LightEstimationMainLightDirection = 1 << 19,
///
/// Light estimation for the main light's intensity.
///
LightEstimationMainLightIntensity = 1 << 20,
///
/// A value with all light estimation related bits set.
///
AnyLightEstimation = LightEstimationAmbientIntensity | LightEstimationAmbientColor | LightEstimationAmbientSphericalHarmonics | LightEstimationMainLightDirection | LightEstimationMainLightIntensity,
///
/// Instant and Tracked raycasts.
///
Raycast = 1 << 21,
///
/// A feature that describes real-time meshing capability.
///
Meshing = 1 << 22,
///
/// A feature that describes classification for .
///
MeshClassification = 1 << 23,
///
/// A feature that describes the ability to surface point clouds.
///
PointCloud = 1 << 24,
///
/// A feature that allows environment depth images to be captured.
///
EnvironmentDepth = 1 << 25,
///
/// A feature that applies temporal smoothing to environment depth images.
///
///
EnvironmentDepthTemporalSmoothing = 1 << 26,
///
/// Image Stabilization enabled.
///
ImageStabilization = 1 << 27,
///
/// Camera torch mode enabled.
///
CameraTorch = 1 << 28,
}
///
/// Extension methods for flags.
///
public static class FeatureExtensions
{
///
/// Tests whether any of the features in are present in .
///
/// The being extended.
/// The Features to test against.
/// true if any of the features in are also in ,
/// otherwise false.
public static bool Any(this Feature self, Feature features) => (self & features) != Feature.None;
///
/// Tests whether all the features in are present in .
///
/// The being extended.
/// The Features to test against.
/// true if all the features in are also in ,
/// otherwise false.
public static bool All(this Feature self, Feature features) => (self & features) == features;
///
/// Tests whether there are any common features between and .
///
/// The being extended.
/// The Features to test against.
/// true if none of the features in are in ,
/// otherwise false.
public static bool None(this Feature self, Feature features) => (self & features) == Feature.None;
///
/// Computes the union of and (that is,
/// the set of features in or or both).
///
/// The being extended.
/// Features to union with
/// The union of and
/// (that is, the set of features in or or both).
public static Feature Union(this Feature self, Feature features) => self | features;
///
/// Computes the intersection of and
/// (that is, the set of features present in both and ).
///
/// The being extended.
/// Features to intersect with
/// The intersection of and
/// (that is, the set of features common to both and ).
public static Feature Intersection(this Feature self, Feature features) => self & features;
///
/// Computes the set difference (that is, removes all flags in from ).
///
/// The being extended.
/// Features to remove from
/// The set difference of and
/// (that is, all members of which do not belong to ).
public static Feature SetDifference(this Feature self, Feature features) => self & ~features;
///
/// Computes the symmetric difference (that is,
/// the set of all features that belong to exactly one of and ,
/// present in one but not both).
///
/// The being extended.
/// Features with which to compute the symmetric difference against
/// The symmetric difference of and
/// (that is, the features that belong to or , but not both).
public static Feature SymmetricDifference(this Feature self, Feature features) => self ^ features;
///
/// Sets or removes one or more flags.
///
/// The being extended.
/// The (s) to set or remove.
/// If true, the flag(s) in will be set. If false, it/they will be removed.
/// with the (s) set or removed according to the value of .
public static Feature SetEnabled(this Feature self, Feature feature, bool enabled) => enabled ? self | feature : self & ~feature;
///
/// Filters just the camera-related flags.
///
/// The being extended.
/// The camera-related flags from .
///
public static Feature Cameras(this Feature self) => self.Intersection(Feature.AnyCamera);
///
/// Filters just the tracking-related flags.
///
/// The being extended.
/// The tracking-related flags from .
///
public static Feature TrackingModes(this Feature self) => self.Intersection(Feature.AnyTrackingMode);
///
/// Filters just the light estimation related flags.
///
/// The being extended.
/// The light estimation-related flags from .
public static Feature LightEstimation(this Feature self) => self.Intersection(Feature.AnyLightEstimation);
///
/// Removes all camera and tracking-related flags.
///
/// The being extended.
/// with camera and tracking-related bits removed.
///
///
public static Feature WithoutCameraOrTracking(this Feature self) => self.SetDifference(Feature.AnyCamera.Union(Feature.AnyTrackingMode));
static Feature LowestBit(this Feature self)
{
return self & (self ^ (self - 1));
}
///
/// Generates a single string representing the list of enabled features separated by , or a comma if not specified.
///
///
/// This method makes several heap allocations, generating garbage. It is intended for debugging purposes and
/// should not be called frequently in a production application.
///
/// The being extended.
/// The string separator to insert between elements of the list, or ", " if omitted.
/// A string of s separated by . If none of the features are enabled, returns "(None)".
public static string ToStringList(this Feature features, string separator = ", ")
{
var names = new List();
while (features != 0)
{
var feature = features.LowestBit();
switch (feature)
{
case Feature.WorldFacingCamera:
names.Add("World Facing Camera");
break;
case Feature.UserFacingCamera:
names.Add("User Facing Camera");
break;
case Feature.RotationOnly:
names.Add("Rotation Only");
break;
case Feature.PositionAndRotation:
names.Add("Rotation and Orientation");
break;
case Feature.FaceTracking:
names.Add("Face Tracking");
break;
case Feature.PlaneTracking:
names.Add("Plane Tracking");
break;
case Feature.ImageTracking:
names.Add("Image Tracking");
break;
case Feature.ObjectTracking:
names.Add("Object Tracking");
break;
case Feature.EnvironmentProbes:
names.Add("Environment Probes");
break;
case Feature.Body2D:
names.Add("2D Body Tracking");
break;
case Feature.Body3D:
names.Add("3D Body Tracking");
break;
case Feature.Body3DScaleEstimation:
names.Add("3D Body Scale Estimation");
break;
case Feature.PeopleOcclusionStencil:
names.Add("Human Occlusion Stencil");
break;
case Feature.PeopleOcclusionDepth:
names.Add("Human Occlusion Depth");
break;
case Feature.Collaboration:
names.Add("Collaboration");
break;
case Feature.AutoFocus:
names.Add("Auto-Focus");
break;
case Feature.LightEstimationAmbientIntensity:
names.Add("Light Estimation (Ambient Intensity)");
break;
case Feature.LightEstimationAmbientColor:
names.Add("Light Estimation (Ambient Color)");
break;
case Feature.LightEstimationAmbientSphericalHarmonics:
names.Add("Light Estimation (Spherical Harmonics)");
break;
case Feature.LightEstimationMainLightDirection:
names.Add("Light Estimation (Main Light Direction)");
break;
case Feature.LightEstimationMainLightIntensity:
names.Add("Light Estimation (Main Light Intensity)");
break;
case Feature.Raycast:
names.Add("Raycast");
break;
case Feature.Meshing:
names.Add("Meshing");
break;
case Feature.MeshClassification:
names.Add("Mesh Classification");
break;
case Feature.PointCloud:
names.Add("Point Cloud");
break;
case Feature.EnvironmentDepth:
names.Add("Environment Depth");
break;
case Feature.EnvironmentDepthTemporalSmoothing:
names.Add("Environment Depth Temporal Smoothing");
break;
case Feature.ImageStabilization:
names.Add("Image Stabilization");
break;
default:
names.Add(feature.ToString());
break;
}
features &= (features - 1);
}
return names.Count > 0 ? string.Join(separator, names) : "(None)";
}
///
/// Calculates the number of enabled features in .
///
/// The being extended.
/// The number of enabled flags.
public static int Count(this Feature self)
{
int count = 0;
ulong features = (ulong)self;
while (features != 0)
{
++count;
// set lowest bit to zero
features &= (features - 1);
}
return count;
}
}
}