Initialer Upload neues Unity-Projekt

This commit is contained in:
Daniel Ocks
2025-07-21 09:11:14 +02:00
commit eeca72985b
14558 changed files with 1508140 additions and 0 deletions

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: fb27ef9cdbfb48d4b8f2169b0088fd79
timeCreated: 1669731265

View File

@ -0,0 +1,539 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
internal static class OVRTask
{
internal static OVRTask<TResult> FromGuid<TResult>(Guid id) => Create<TResult>(id);
internal static OVRTask<TResult> FromRequest<TResult>(ulong id) => Create<TResult>(GetId(id));
internal static OVRTask<TResult> FromResult<TResult>(TResult result)
{
var task = Create<TResult>(Guid.NewGuid());
task.SetResult(result);
return task;
}
internal static OVRTask<TResult> GetExisting<TResult>(Guid id) => Get<TResult>(id);
internal static OVRTask<TResult> GetExisting<TResult>(ulong id) => Get<TResult>(GetId(id));
internal static void SetResult<TResult>(Guid id, TResult result) =>
GetExisting<TResult>(id).SetResult(result);
internal static void SetResult<TResult>(ulong id, TResult result) =>
GetExisting<TResult>(id).SetResult(result);
private static OVRTask<TResult> Get<TResult>(Guid id)
{
return new OVRTask<TResult>(id);
}
private static OVRTask<TResult> Create<TResult>(Guid id)
{
var task = Get<TResult>(id);
task.AddToPending();
return task;
}
internal static unsafe Guid GetId(ulong value)
{
const ulong hashModifier1 = 0x319642b2d24d8ec3;
const ulong hashModifier2 = 0x96de1b173f119089;
var guid = default(Guid);
*(ulong*)&guid = unchecked(value + hashModifier1);
*((ulong*)&guid + 1) = hashModifier2;
return guid;
}
}
/// <summary>
/// Represents an awaitable task.
/// </summary>
/// <remarks>
/// This is a task-like object which supports the <c>await</c> pattern. Typically, you do not need to
/// create or use this object directly. Instead, you can either :
/// <para>- <c>await</c> a method which returns an object of type <see cref="OVRTask{TResult}"/>,
/// which will eventually return a <typeparamref name="TResult"/></para>
/// <para>- poll the <see cref="IsCompleted"/> property and then call <see cref="GetResult"/></para>
/// <para>- pass a delegate by calling <see cref="ContinueWith(Action{TResult})"/>. Note that an additional state <c>object</c> can get passed in and added as a parameter of the callback, see <see cref="ContinueWith{T}"/></para>
/// Requires the main thread to complete the await contract - blocking can result in an infinite loop.
/// </remarks>
/// <typeparam name="TResult">The type of result being awaited.</typeparam>
public readonly struct OVRTask<TResult> : IEquatable<OVRTask<TResult>>, IDisposable
{
#region static
private static readonly HashSet<Guid> Pending = new HashSet<Guid>();
private static readonly Dictionary<Guid, TResult> Results = new Dictionary<Guid, TResult>();
private static readonly Dictionary<Guid, Action> Continuations = new Dictionary<Guid, Action>();
private delegate void CallbackInvoker(Guid guid, TResult result);
private delegate bool CallbackRemover(Guid guid);
private static readonly Dictionary<Guid, CallbackInvoker>
CallbackInvokers = new Dictionary<Guid, CallbackInvoker>();
private static readonly Dictionary<Guid, CallbackRemover>
CallbackRemovers = new Dictionary<Guid, CallbackRemover>();
private static readonly HashSet<Action> CallbackClearers = new HashSet<Action>();
private delegate bool InternalDataRemover(Guid guid);
private static readonly Dictionary<Guid, InternalDataRemover> InternalDataRemovers =
new Dictionary<Guid, InternalDataRemover>();
private static readonly HashSet<Action> InternalDataClearers = new HashSet<Action>();
private static readonly Dictionary<Guid, Action<Guid>> SubscriberRemovers =
new Dictionary<Guid, Action<Guid>>();
private static readonly HashSet<Action> SubscriberClearers = new HashSet<Action>();
#endregion
private readonly Guid _id;
internal OVRTask(Guid id)
{
_id = id;
}
internal void AddToPending() => Pending.Add(_id);
internal bool IsPending => Pending.Contains(_id);
internal void SetInternalData<T>(T data) => InternalData<T>.Set(_id, data);
internal bool TryGetInternalData<T>(out T data) => InternalData<T>.TryGet(_id, out data);
internal void SetResult(TResult result)
{
// Means no one was awaiting this result.
if (!Pending.Remove(_id)) return;
if (InternalDataRemovers.TryGetValue(_id, out var internalDataRemover))
{
InternalDataRemovers.Remove(_id);
internalDataRemover(_id);
}
if (SubscriberRemovers.TryGetValue(_id, out var subscriberRemover))
{
SubscriberRemovers.Remove(_id);
subscriberRemover(_id);
}
if (CallbackInvokers.TryGetValue(_id, out var invoker))
{
CallbackInvokers.Remove(_id);
invoker(_id, result);
}
else
{
// Add to the results so that GetResult can retrieve it later.
Results.Add(_id, result);
if (Continuations.TryGetValue(_id, out var continuation))
{
Continuations.Remove(_id);
continuation();
}
}
}
private static class InternalData<T>
{
private static readonly Dictionary<Guid, T> Data = new Dictionary<Guid, T>();
public static bool TryGet(Guid taskId, out T data)
{
return Data.TryGetValue(taskId, out data);
}
public static void Set(Guid taskId, T data)
{
Data[taskId] = data;
InternalDataRemovers.Add(taskId, Remover);
InternalDataClearers.Add(Clearer);
}
private static readonly InternalDataRemover Remover = Remove;
private static readonly Action Clearer = Clear;
private static bool Remove(Guid taskId) => Data.Remove(taskId);
private static void Clear() => Data.Clear();
}
static class IncrementalResultSubscriber<T>
{
static readonly Dictionary<Guid, Action<T>> Subscribers = new Dictionary<Guid, Action<T>>();
public static void Set(Guid taskId, Action<T> subscriber)
{
Subscribers[taskId] = subscriber;
SubscriberRemovers[taskId] = Remover;
SubscriberClearers.Add(Clearer);
}
public static void Notify(Guid taskId, T result)
{
if (Subscribers.TryGetValue(taskId, out var subscriber))
{
subscriber(result);
}
}
static readonly Action<Guid> Remover = Remove;
static void Remove(Guid id) => Subscribers.Remove(id);
static readonly Action Clearer = Clear;
static void Clear() => Subscribers.Clear();
}
/// <summary>
/// Sets the delegate to be invoked when an incremental result is available before the task is complete.
/// </summary>
/// <remarks>
/// Some tasks may provide incremental results before the task is complete. In this case, you can use
/// <see cref="SetIncrementalResultCallback{TIncrementalResult}"/> to receive those results as they become available.
///
/// For example, the task may provide a list of results over some period of time and may be able to provide
/// partial results as they become available, before the task completes.
/// </remarks>
/// <param name="onIncrementalResultAvailable">Invoked whenever <see cref="NotifyIncrementalResult{TIncrementalResult}"/>
/// is called.</param>
/// <typeparam name="TIncrementalResult">The type of the incremental result. This is typically different than the
/// <typeparamref name="TResult"/>.</typeparam>
/// <exception cref="System.ArgumentNullException">Thrown when <paramref name="onIncrementalResultAvailable"/> is `null`.</exception>
internal void SetIncrementalResultCallback<TIncrementalResult>(
Action<TIncrementalResult> onIncrementalResultAvailable)
{
if (onIncrementalResultAvailable == null)
throw new ArgumentNullException(nameof(onIncrementalResultAvailable));
IncrementalResultSubscriber<TIncrementalResult>.Set(_id, onIncrementalResultAvailable);
}
/// <summary>
/// Notifies a subscriber of an incremental result associated with an ongoing task.
/// </summary>
/// <remarks>
/// Use this to provide partial results that may be available before the task fully completes.
/// </remarks>
/// <typeparam name="TIncrementalResult">The type of the result, usually different from <typeparamref name="TResult"/>.</typeparam>
internal void NotifyIncrementalResult<TIncrementalResult>(TIncrementalResult incrementalResult)
=> IncrementalResultSubscriber<TIncrementalResult>.Notify(_id, incrementalResult);
#region Polling Implementation
/// <summary>
/// Indicates whether the task has completed.
/// </summary>
/// <remarks>
/// Choose only one pattern out of the three proposed way of awaiting for the task completion:
/// Polling,<c>async/await</c> or <see cref="ContinueWith(Action{TResult})"/>
/// as all three patterns will end up calling the <see cref="GetResult"/> which can only be called once.
/// </remarks>
/// <returns><c>True</c> if the task has completed. <see cref="GetResult"/> can be called.</returns>
public bool IsCompleted => !IsPending;
/// <summary>
/// Gets the result of the Task.
/// </summary>
/// <remarks>
/// This method should only be called once <see cref="IsCompleted"/> is true.
/// Calling it multiple times leads to undefined behavior.
/// Do not use in conjunction with any other methods (<c>await</c> or using <see cref="ContinueWith"/>).
/// </remarks>
/// <returns>Returns the result of type <typeparamref name="TResult"/>.</returns>
/// <exception cref="InvalidOperationException">Thrown when the task doesn't have any available result. This could
/// happen if the method is called before <see cref="IsCompleted"/> is true, after the task has been disposed of
/// or if this method has already been called once.</exception>
public TResult GetResult()
{
if (!Results.TryGetValue(_id, out var value))
{
throw new InvalidOperationException($"Task {_id} doesn't have any available result.");
}
Results.Remove(_id);
return value;
}
#endregion
#region Awaiter Contract Implementation
/// <summary>
/// Definition of an awaiter that satisfies the await contract.
/// </summary>
/// <remarks>
/// This allows an <see cref="OVRTask{T}"/> to be awaited using the <c>await</c> keyword.
/// Typically, you should not use this struct; instead, it is used by the compiler by
/// automatically calling the <see cref="GetAwaiter"/> method when using the <c>await</c> keyword.
/// </remarks>
public readonly struct Awaiter : INotifyCompletion
{
private readonly OVRTask<TResult> _task;
internal Awaiter(OVRTask<TResult> task)
{
_task = task;
}
public bool IsCompleted => _task.IsCompleted;
public void OnCompleted(Action continuation) => _task.WithContinuation(continuation);
public TResult GetResult() => _task.GetResult();
}
/// <summary>
/// Gets an awaiter that satisfies the await contract.
/// </summary>
/// <remarks>
/// This allows an <see cref="OVRTask{T}"/> to be awaited using the <c>await</c> keyword.
/// Typically, you should not call this directly; instead, it is invoked by the compiler, e.g.,
/// <example>
/// <code><![CDATA[
/// // Something that returns an OVRTask<T>
/// var task = GetResultAsync();
///
/// // compiler uses GetAwaiter here
/// var result = await task;
/// ]]></code>
/// Or, more commonly:
/// <code><![CDATA[
/// var result = await GetResultAsync();
/// ]]></code>
/// </example>
/// Requires the main thread to complete the await contract - blocking can result in an infinite loop.
/// </remarks>
/// <returns>Returns an Awaiter-like object that satisfies the await pattern.</returns>
public Awaiter GetAwaiter() => new Awaiter(this);
private void WithContinuation(Action continuation)
{
ValidateDelegateAndThrow(continuation, nameof(continuation));
Continuations[_id] = continuation;
}
#endregion
#region Delegate Implementation
readonly struct Callback
{
private static readonly Dictionary<Guid, Callback> Callbacks = new Dictionary<Guid, Callback>();
readonly Action<TResult> _delegate;
static void Invoke(Guid taskId, TResult result)
{
if (Callbacks.TryGetValue(taskId, out var callback))
{
Callbacks.Remove(taskId);
callback.Invoke(result);
}
}
static bool Remove(Guid taskId) => Callbacks.Remove(taskId);
static void Clear() => Callbacks.Clear();
void Invoke(TResult result) => _delegate(result);
Callback(Action<TResult> @delegate) => _delegate = @delegate;
public static readonly CallbackInvoker Invoker = Invoke;
public static readonly CallbackRemover Remover = Remove;
public static readonly Action Clearer = Clear;
public static void Add(Guid taskId, Action<TResult> @delegate)
{
Callbacks.Add(taskId, new Callback(@delegate));
CallbackInvokers.Add(taskId, Invoker);
CallbackRemovers.Add(taskId, Remover);
CallbackClearers.Add(Clearer);
}
}
readonly struct CallbackWithState<T>
{
private static readonly Dictionary<Guid, CallbackWithState<T>> Callbacks =
new Dictionary<Guid, CallbackWithState<T>>();
readonly T _data;
readonly Action<TResult, T> _delegate;
static void Invoke(Guid taskId, TResult result)
{
if (Callbacks.TryGetValue(taskId, out var callback))
{
Callbacks.Remove(taskId);
callback.Invoke(result);
}
}
CallbackWithState(T data, Action<TResult, T> @delegate)
{
_data = data;
_delegate = @delegate;
}
private static readonly CallbackInvoker Invoker = Invoke;
private static readonly CallbackRemover Remover = Remove;
private static readonly Action Clearer = Clear;
private static void Clear() => Callbacks.Clear();
private static bool Remove(Guid taskId) => Callbacks.Remove(taskId);
private void Invoke(TResult result) => _delegate(result, _data);
public static void Add(Guid taskId, T data, Action<TResult, T> callback)
{
Callbacks.Add(taskId, new CallbackWithState<T>(data, callback));
CallbackInvokers.Add(taskId, Invoker);
CallbackRemovers.Add(taskId, Remover);
CallbackClearers.Add(Clearer);
}
}
/// <summary>
/// Registers a delegate that will get called on completion of the task.
/// </summary>
/// <remarks>
/// The delegate will be invoked with the <typeparamref name="TResult"/> result as parameter.
/// Do not use in conjunction with any other methods (<c>await</c> or calling <see cref="GetResult"/>).
/// </remarks>
/// <param name="onCompleted">A delegate to be invoked when this task completes. If the task is already complete,
/// <paramref name="onCompleted"/> is invoked immediately.</param>
/// <seealso cref="ContinueWith{T}"/>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="onCompleted"/> is null.</exception>
/// <exception cref="InvalidOperationException">Thrown if there already is a delegate or a continuation registered to this task.</exception>
public void ContinueWith(Action<TResult> onCompleted)
{
ValidateDelegateAndThrow(onCompleted, nameof(onCompleted));
if (IsCompleted)
{
onCompleted.Invoke(GetResult());
}
else
{
Callback.Add(_id, onCompleted);
}
}
/// <summary>
/// Registers a delegate that will get called on completion of the task.
/// </summary>
/// <remarks>
/// The delegate will be invoked with <paramref name="state"/> and the <typeparamref name="TResult"/> result as
/// parameters.
/// Do not use in conjunction with any other methods (<c>await</c> or calling <see cref="GetResult"/>).
/// </remarks>
/// <param name="onCompleted">A delegate to be invoked when this task completes. If the task is already complete,
/// <paramref name="onCompleted"/> is invoked immediately.</param>
/// <param name="state">An <c>object</c> to store and pass to <paramref name="onCompleted"/>.</param>
/// <seealso cref="ContinueWith(Action{TResult})"/>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="onCompleted"/> is null.</exception>
/// <exception cref="InvalidOperationException">Thrown if there already is a delegate or a continuation registered to this task.</exception>
public void ContinueWith<T>(Action<TResult, T> onCompleted, T state)
{
ValidateDelegateAndThrow(onCompleted, nameof(onCompleted));
if (IsCompleted)
{
onCompleted.Invoke(GetResult(), state);
}
else
{
CallbackWithState<T>.Add(_id, state, onCompleted);
}
}
void ValidateDelegateAndThrow(object @delegate, string paramName)
{
if (@delegate == null)
throw new ArgumentNullException(paramName);
if (Continuations.ContainsKey(_id))
throw new InvalidOperationException($"Task {_id} is already being used by an await call.");
if (CallbackInvokers.ContainsKey(_id))
throw new InvalidOperationException($"Task {_id} is already being used with ContinueWith.");
}
#endregion
#region IDisposable Implementation
/// <summary>
/// Disposes of the task.
/// </summary>
/// <remarks>
/// Invalidate this object but does not cancel the task.
/// In the case where the result will not actually be consumed, it must be called to prevent a memory leak.
/// You can not call <see cref="GetResult"/> nor use <c>await</c> on a disposed task.
/// </remarks>
public void Dispose()
{
Results.Remove(_id);
Continuations.Remove(_id);
Pending.Remove(_id);
CallbackInvokers.Remove(_id);
if (CallbackRemovers.TryGetValue(_id, out var remover))
{
CallbackRemovers.Remove(_id);
remover(_id);
}
if (InternalDataRemovers.TryGetValue(_id, out var internalDataRemover))
{
InternalDataRemovers.Remove(_id);
internalDataRemover(_id);
}
if (SubscriberRemovers.TryGetValue(_id, out var subscriberRemover))
{
SubscriberRemovers.Remove(_id);
subscriberRemover(_id);
}
}
#endregion
#region IEquatable Implementation
public bool Equals(OVRTask<TResult> other) => _id == other._id;
public override bool Equals(object obj) => obj is OVRTask<TResult> other && Equals(other);
public static bool operator ==(OVRTask<TResult> lhs, OVRTask<TResult> rhs) => lhs.Equals(rhs);
public static bool operator !=(OVRTask<TResult> lhs, OVRTask<TResult> rhs) => !lhs.Equals(rhs);
public override int GetHashCode() => _id.GetHashCode();
public override string ToString() => _id.ToString();
#endregion
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 7bc71515869d46e08e210173388da05c
timeCreated: 1669731273

View File

@ -0,0 +1,69 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRAudioSourceTest : MonoBehaviour
{
public float period = 2.0f;
private float nextActionTime;
// Start is called before the first frame update
void Start()
{
Material templateMaterial = GetComponent<Renderer>().material;
Material newMaterial = Instantiate<Material>(templateMaterial);
newMaterial.color = Color.green;
GetComponent<Renderer>().material = newMaterial;
nextActionTime = Time.time + period;
}
// Update is called once per frame
void Update()
{
if (Time.time > nextActionTime)
{
nextActionTime = Time.time + period;
Material mat = GetComponent<Renderer>().material;
if (mat.color == Color.green)
{
mat.color = Color.red;
}
else
{
mat.color = Color.green;
}
AudioSource audioSource = GetComponent<AudioSource>();
if (audioSource == null)
{
Debug.LogError("Unable to find AudioSource");
}
else
{
audioSource.Play();
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 358b12a21a8aa9540b435051f334fe9b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,51 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
// If there is a game object under the main camera which should not be cloned under Mixed Reality Capture,
// attaching this component would auto destroy that after the MRC camera get cloned
public class OVRAutoDestroyInMRC : MonoBehaviour
{
// Use this for initialization
void Start()
{
bool underMrcCamera = false;
Transform p = transform.parent;
while (p != null)
{
if (p.gameObject.name.StartsWith("OculusMRC_"))
{
underMrcCamera = true;
break;
}
p = p.parent;
}
if (underMrcCamera)
{
Destroy(gameObject);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 42a68265e2d624d49ae7fced6a7e4d91
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,55 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
/// <summary>
/// Allows you to toggle chromatic aberration correction with a gamepad button press.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_chromatic_aberration")]
public class OVRChromaticAberration : MonoBehaviour
{
/// <summary>
/// The button that will toggle chromatic aberration correction.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.X;
private bool chromatic = false;
void Start()
{
// Enable/Disable Chromatic Aberration Correction.
// NOTE: Enabling Chromatic Aberration for mobile has a large performance cost.
OVRManager.instance.chromatic = chromatic;
}
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle chromatic aberration correction
//*************************
chromatic = !chromatic;
OVRManager.instance.chromatic = chromatic;
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3b56515a831f2fb44bc7ae02679aeebc
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,432 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
/// <summary>
/// Simple helper script that conditionally enables rendering of a controller if it is connected.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_controller_helper")]
public class OVRControllerHelper : MonoBehaviour
{
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest And RiftS Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchQuestAndRiftSLeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest And RiftS Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchQuestAndRiftSRightController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Rift Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchRiftLeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Rift Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchRiftRightController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest 2 Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchQuest2LeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest 2 Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchQuest2RightController;
/// <summary>
/// The root GameObject that represents the Meta Touch Pro Controller model (Left).
/// </summary>
public GameObject m_modelMetaTouchProLeftController;
/// <summary>
/// The root GameObject that represents the Meta Touch Pro Controller model (Right).
/// </summary>
public GameObject m_modelMetaTouchProRightController;
/// <summary>
/// The root GameObject that represents the Meta Quest Plus Controller model (Left).
/// </summary>
public GameObject m_modelMetaTouchPlusLeftController;
/// <summary>
/// The root GameObject that represents the Meta Quest Plus Controller model (Right).
/// </summary>
public GameObject m_modelMetaTouchPlusRightController;
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
/// <summary>
/// Determines if the controller should be hidden based on held state.
/// </summary>
public OVRInput.InputDeviceShowState m_showState = OVRInput.InputDeviceShowState.ControllerInHandOrNoHand;
/// <summary>
/// If controller driven hand poses is on, and the mode is Natural, controllers will be hidden unless this is true.
/// </summary>
public bool showWhenHandsArePoweredByNaturalControllerPoses = false;
/// <summary>
/// The animator component that contains the controller animation controller for animating buttons and triggers.
/// </summary>
private Animator m_animator;
private GameObject m_activeController;
private bool m_controllerModelsInitialized = false;
private bool m_hasInputFocus = true;
private bool m_hasInputFocusPrev = false;
private enum ControllerType
{
QuestAndRiftS = 1,
Rift = 2,
Quest2 = 3,
TouchPro = 4,
TouchPlus = 5,
}
private ControllerType activeControllerType = ControllerType.Rift;
private bool m_prevControllerConnected = false;
private bool m_prevControllerConnectedCached = false;
private OVRInput.ControllerInHandState m_prevControllerInHandState = OVRInput.ControllerInHandState.NoHand;
void Start()
{
if (OVRManager.OVRManagerinitialized)
{
InitializeControllerModels();
}
}
void InitializeControllerModels()
{
if (m_controllerModelsInitialized)
return;
OVRPlugin.SystemHeadset headset = OVRPlugin.GetSystemHeadsetType();
OVRPlugin.Hand controllerHand = m_controller == OVRInput.Controller.LTouch
? OVRPlugin.Hand.HandLeft
: OVRPlugin.Hand.HandRight;
OVRPlugin.InteractionProfile profile = OVRPlugin.GetCurrentInteractionProfile(controllerHand);
// If multimodality is enabled, then overwrite the value if we find the controllers to be unheld
if (OVRPlugin.IsMultimodalHandsControllersSupported())
{
OVRPlugin.InteractionProfile detachedProfile =
OVRPlugin.GetCurrentDetachedInteractionProfile(controllerHand);
if (detachedProfile != OVRPlugin.InteractionProfile.None)
{
profile = detachedProfile;
}
}
switch (headset)
{
case OVRPlugin.SystemHeadset.Rift_CV1:
activeControllerType = ControllerType.Rift;
break;
case OVRPlugin.SystemHeadset.Oculus_Quest_2:
if (profile == OVRPlugin.InteractionProfile.TouchPro)
{
activeControllerType = ControllerType.TouchPro;
}
else
{
activeControllerType = ControllerType.Quest2;
}
break;
case OVRPlugin.SystemHeadset.Oculus_Link_Quest_2:
if (profile == OVRPlugin.InteractionProfile.TouchPro)
{
activeControllerType = ControllerType.TouchPro;
}
else
{
activeControllerType = ControllerType.Quest2;
}
break;
case OVRPlugin.SystemHeadset.Meta_Quest_Pro:
activeControllerType = ControllerType.TouchPro;
break;
case OVRPlugin.SystemHeadset.Meta_Link_Quest_Pro:
activeControllerType = ControllerType.TouchPro;
break;
case OVRPlugin.SystemHeadset.Meta_Quest_3:
case OVRPlugin.SystemHeadset.Meta_Link_Quest_3:
if (profile == OVRPlugin.InteractionProfile.TouchPro)
{
activeControllerType = ControllerType.TouchPro;
}
else
{
activeControllerType = ControllerType.TouchPlus;
}
break;
default:
activeControllerType = ControllerType.QuestAndRiftS;
break;
}
Debug.LogFormat("OVRControllerHelp: Active controller type: {0} for product {1} (headset {2}, hand {3})",
activeControllerType, OVRPlugin.productName, headset, controllerHand);
// Hide all controller models until controller get connected
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_modelMetaTouchProLeftController.SetActive(false);
m_modelMetaTouchProRightController.SetActive(false);
m_modelMetaTouchPlusLeftController.SetActive(false);
m_modelMetaTouchPlusRightController.SetActive(false);
OVRManager.InputFocusAcquired += InputFocusAquired;
OVRManager.InputFocusLost += InputFocusLost;
m_controllerModelsInitialized = true;
}
void Update()
{
if (!m_controllerModelsInitialized)
{
if (OVRManager.OVRManagerinitialized)
{
InitializeControllerModels();
}
else
{
return;
}
}
OVRInput.Hand handOfController = (m_controller == OVRInput.Controller.LTouch)
? OVRInput.Hand.HandLeft
: OVRInput.Hand.HandRight;
OVRInput.ControllerInHandState controllerInHandState = OVRInput.GetControllerIsInHandState(handOfController);
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if ((controllerConnected != m_prevControllerConnected) || !m_prevControllerConnectedCached ||
(controllerInHandState != m_prevControllerInHandState) ||
(m_hasInputFocus != m_hasInputFocusPrev))
{
if (activeControllerType == ControllerType.Rift)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchRiftRightController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.RTouch));
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_modelMetaTouchProLeftController.SetActive(false);
m_modelMetaTouchProRightController.SetActive(false);
m_modelMetaTouchPlusLeftController.SetActive(false);
m_modelMetaTouchPlusRightController.SetActive(false);
m_animator = m_controller == OVRInput.Controller.LTouch
? m_modelOculusTouchRiftLeftController.GetComponent<Animator>()
: m_modelOculusTouchRiftRightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch
? m_modelOculusTouchRiftLeftController
: m_modelOculusTouchRiftRightController;
}
else if (activeControllerType == ControllerType.Quest2)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchQuest2RightController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.RTouch));
m_modelMetaTouchProLeftController.SetActive(false);
m_modelMetaTouchProRightController.SetActive(false);
m_modelMetaTouchPlusLeftController.SetActive(false);
m_modelMetaTouchPlusRightController.SetActive(false);
m_animator = m_controller == OVRInput.Controller.LTouch
? m_modelOculusTouchQuest2LeftController.GetComponent<Animator>()
: m_modelOculusTouchQuest2RightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch
? m_modelOculusTouchQuest2LeftController
: m_modelOculusTouchQuest2RightController;
}
else if (activeControllerType == ControllerType.QuestAndRiftS)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchQuestAndRiftSRightController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.RTouch));
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_modelMetaTouchProLeftController.SetActive(false);
m_modelMetaTouchProRightController.SetActive(false);
m_modelMetaTouchPlusLeftController.SetActive(false);
m_modelMetaTouchPlusRightController.SetActive(false);
m_animator = m_controller == OVRInput.Controller.LTouch
? m_modelOculusTouchQuestAndRiftSLeftController.GetComponent<Animator>()
: m_modelOculusTouchQuestAndRiftSRightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch
? m_modelOculusTouchQuestAndRiftSLeftController
: m_modelOculusTouchQuestAndRiftSRightController;
}
else if (activeControllerType == ControllerType.TouchPro)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_modelMetaTouchProLeftController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.LTouch));
m_modelMetaTouchProRightController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.RTouch));
m_modelMetaTouchPlusLeftController.SetActive(false);
m_modelMetaTouchPlusRightController.SetActive(false);
m_animator = m_controller == OVRInput.Controller.LTouch
? m_modelMetaTouchProLeftController.GetComponent<Animator>()
: m_modelMetaTouchProRightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch
? m_modelMetaTouchProLeftController
: m_modelMetaTouchProRightController;
}
else /*if (activeControllerType == ControllerType.TouchPlus)*/
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_modelMetaTouchProLeftController.SetActive(false);
m_modelMetaTouchProRightController.SetActive(false);
m_modelMetaTouchPlusLeftController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.LTouch));
m_modelMetaTouchPlusRightController.SetActive(controllerConnected &&
(m_controller == OVRInput.Controller.RTouch));
m_animator = m_controller == OVRInput.Controller.LTouch
? m_modelMetaTouchPlusLeftController.GetComponent<Animator>()
: m_modelMetaTouchPlusRightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch
? m_modelMetaTouchPlusLeftController
: m_modelMetaTouchPlusRightController;
}
m_prevControllerConnected = controllerConnected;
m_prevControllerConnectedCached = true;
m_prevControllerInHandState = controllerInHandState;
m_hasInputFocusPrev = m_hasInputFocus;
}
bool shouldSetControllerActive = m_hasInputFocus && controllerConnected;
switch (m_showState)
{
case OVRInput.InputDeviceShowState.Always:
// intentionally blank
break;
case OVRInput.InputDeviceShowState.ControllerInHandOrNoHand:
if (controllerInHandState == OVRInput.ControllerInHandState.ControllerNotInHand)
{
shouldSetControllerActive = false;
}
break;
case OVRInput.InputDeviceShowState.ControllerInHand:
if (controllerInHandState != OVRInput.ControllerInHandState.ControllerInHand)
{
shouldSetControllerActive = false;
}
break;
case OVRInput.InputDeviceShowState.ControllerNotInHand:
if (controllerInHandState != OVRInput.ControllerInHandState.ControllerNotInHand)
{
shouldSetControllerActive = false;
}
break;
case OVRInput.InputDeviceShowState.NoHand:
if (controllerInHandState != OVRInput.ControllerInHandState.NoHand)
{
shouldSetControllerActive = false;
}
break;
}
if (!showWhenHandsArePoweredByNaturalControllerPoses && OVRPlugin.IsControllerDrivenHandPosesEnabled() && OVRPlugin.AreControllerDrivenHandPosesNatural())
{
shouldSetControllerActive = false;
}
if (m_activeController != null)
{
m_activeController.SetActive(shouldSetControllerActive);
}
if (m_animator != null)
{
m_animator.SetFloat("Button 1", OVRInput.Get(OVRInput.Button.One, m_controller) ? 1.0f : 0.0f);
m_animator.SetFloat("Button 2", OVRInput.Get(OVRInput.Button.Two, m_controller) ? 1.0f : 0.0f);
m_animator.SetFloat("Button 3", OVRInput.Get(OVRInput.Button.Start, m_controller) ? 1.0f : 0.0f);
m_animator.SetFloat("Joy X", OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick, m_controller).x);
m_animator.SetFloat("Joy Y", OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick, m_controller).y);
m_animator.SetFloat("Trigger", OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger, m_controller));
m_animator.SetFloat("Grip", OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller));
}
}
public void InputFocusAquired()
{
m_hasInputFocus = true;
}
public void InputFocusLost()
{
m_hasInputFocus = false;
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: aed62bf3ae2456c408f247f96808ce96
timeCreated: 1486166271
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,207 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRControllerTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
// virtual
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
new BoolMonitor("OneDown", () => OVRInput.GetDown(OVRInput.Button.One)),
new BoolMonitor("OneUp", () => OVRInput.GetUp(OVRInput.Button.One)),
new BoolMonitor("One (Touch)", () => OVRInput.Get(OVRInput.Touch.One)),
new BoolMonitor("OneDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.One)),
new BoolMonitor("OneUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.One)),
new BoolMonitor("Two", () => OVRInput.Get(OVRInput.Button.Two)),
new BoolMonitor("TwoDown", () => OVRInput.GetDown(OVRInput.Button.Two)),
new BoolMonitor("TwoUp", () => OVRInput.GetUp(OVRInput.Button.Two)),
new BoolMonitor("PrimaryIndexTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTrigger (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown (Touch)",
() => OVRInput.GetDown(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryHandTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("Up", () => OVRInput.Get(OVRInput.Button.Up)),
new BoolMonitor("Down", () => OVRInput.Get(OVRInput.Button.Down)),
new BoolMonitor("Left", () => OVRInput.Get(OVRInput.Button.Left)),
new BoolMonitor("Right", () => OVRInput.Get(OVRInput.Button.Right)),
// raw
new BoolMonitor("Start", () => OVRInput.Get(OVRInput.RawButton.Start)),
new BoolMonitor("StartDown", () => OVRInput.GetDown(OVRInput.RawButton.Start)),
new BoolMonitor("StartUp", () => OVRInput.GetUp(OVRInput.RawButton.Start)),
new BoolMonitor("Back", () => OVRInput.Get(OVRInput.RawButton.Back)),
new BoolMonitor("BackDown", () => OVRInput.GetDown(OVRInput.RawButton.Back)),
new BoolMonitor("BackUp", () => OVRInput.GetUp(OVRInput.RawButton.Back)),
new BoolMonitor("A", () => OVRInput.Get(OVRInput.RawButton.A)),
new BoolMonitor("ADown", () => OVRInput.GetDown(OVRInput.RawButton.A)),
new BoolMonitor("AUp", () => OVRInput.GetUp(OVRInput.RawButton.A)),
};
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed",
() => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
OVRInput.Controller activeController = OVRInput.GetActiveController();
data.Length = 0;
byte battery = OVRInput.GetControllerBatteryPercentRemaining();
data.AppendFormat("Battery: {0}\n", battery);
float framerate = OVRPlugin.GetAppFramerate();
data.AppendFormat("Framerate: {0:F2}\n", framerate);
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
Vector3 angVel = OVRInput.GetLocalControllerAngularVelocity(activeController);
data.AppendFormat("AngVel: ({0:F2}, {1:F2}, {2:F2})\n", angVel.x, angVel.y, angVel.z);
#pragma warning disable CS0618 // Type or member is obsolete
Vector3 angAcc = OVRInput.GetLocalControllerAngularAcceleration(activeController);
data.AppendFormat("AngAcc: ({0:F2}, {1:F2}, {2:F2})\n", angAcc.x, angAcc.y, angAcc.z);
#pragma warning restore CS0618 // Type or member is obsolete
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Vector3 vel = OVRInput.GetLocalControllerVelocity(activeController);
data.AppendFormat("Vel: ({0:F2}, {1:F2}, {2:F2})\n", vel.x, vel.y, vel.z);
#pragma warning disable CS0618 // Type or member is obsolete
Vector3 acc = OVRInput.GetLocalControllerAcceleration(activeController);
data.AppendFormat("Acc: ({0:F2}, {1:F2}, {2:F2})\n", acc.x, acc.y, acc.z);
#pragma warning restore CS0618 // Type or member is obsolete
float indexTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
data.AppendFormat("PrimaryIndexTriggerAxis1D: ({0:F2})\n", indexTrigger);
float handTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger);
data.AppendFormat("PrimaryHandTriggerAxis1D: ({0:F2})\n", handTrigger);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e0a6abd1cb88e9245bd78dac49d7fd6e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,300 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections;
using System.IO;
/// <summary>
/// Helper script for capture cubemap and save it into PNG or JPG file
/// </summary>
/// <description>
/// How it works:
/// 1) This script can be attached to a existing game object, you can also use prefab Assets\OVR\Prefabs\OVRCubemapCaptureProbe
/// There are 2 ways to trigger a capture if you attached this script to a game object.
/// * Automatic capturing: if [autoTriggerAfterLaunch] is true, a automatic capturing will be triggered after [autoTriggerDelay] seconds.
/// * Keyboard trigger: press key [triggeredByKey], a capturing will be triggered.
/// 2) If you like to trigger the screen capture in your code logic, just call static function [OVRCubemapCapture.TriggerCubemapCapture] with proper input arguments.
/// </description>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_cubemap_capture")]
public class OVRCubemapCapture : MonoBehaviour
{
/// <summary>
/// Enable the automatic screenshot trigger, which will capture a cubemap after autoTriggerDelay (seconds)
/// </summary>
public bool autoTriggerAfterLaunch = true;
public float autoTriggerDelay = 1.0f;
private float autoTriggerElapse = 0.0f;
/// <summary>
/// Trigger cubemap screenshot if user pressed key triggeredByKey
/// </summary>
public KeyCode triggeredByKey = KeyCode.F8;
/// <summary>
/// The complete file path for saving the cubemap screenshot, including the filename and extension
/// if pathName is blank, screenshots will be saved into %USERPROFILE%\Documents\OVR_ScreenShot360
/// </summary>
public string pathName;
/// <summary>
/// The cube face resolution
/// </summary>
public int cubemapSize = 2048;
// Update is called once per frame
void Update()
{
// Trigger after autoTriggerDelay
if (autoTriggerAfterLaunch)
{
autoTriggerElapse += Time.deltaTime;
if (autoTriggerElapse >= autoTriggerDelay)
{
autoTriggerAfterLaunch = false;
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
// Trigger by press triggeredByKey
if (Input.GetKeyDown(triggeredByKey))
{
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
#endif
}
/// <summary>
/// Generate unity cubemap at specific location and save into JPG/PNG
/// </summary>
/// <description>
/// Default save folder: your app's persistentDataPath
/// Default file name: using current time OVR_hh_mm_ss.png
/// Note1: this will take a few seconds to finish
/// Note2: if you only want to specify path not filename, please end [pathName] with "/"
/// </description>
public static void TriggerCubemapCapture(Vector3 capturePos, int cubemapSize = 2048, string pathName = null)
{
GameObject ownerObj = new GameObject("CubemapCamera", typeof(Camera));
ownerObj.hideFlags = HideFlags.HideAndDontSave;
ownerObj.transform.position = capturePos;
ownerObj.transform.rotation = Quaternion.identity;
Camera camComponent = ownerObj.GetComponent<Camera>();
camComponent.farClipPlane = 10000.0f;
camComponent.enabled = false;
Cubemap cubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
RenderIntoCubemap(camComponent, cubemap);
SaveCubemapCapture(cubemap, pathName);
DestroyImmediate(cubemap);
DestroyImmediate(ownerObj);
}
public static void RenderIntoCubemap(Camera ownerCamera, Cubemap outCubemap)
{
int width = (int)outCubemap.width;
int height = (int)outCubemap.height;
CubemapFace[] faces = new CubemapFace[]
{
CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY,
CubemapFace.PositiveZ, CubemapFace.NegativeZ
};
Vector3[] faceAngles = new Vector3[]
{
new Vector3(0.0f, 90.0f, 0.0f), new Vector3(0.0f, -90.0f, 0.0f), new Vector3(-90.0f, 0.0f, 0.0f),
new Vector3(90.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 180.0f, 0.0f)
};
// Backup states
RenderTexture backupRenderTex = RenderTexture.active;
float backupFieldOfView = ownerCamera.fieldOfView;
float backupAspect = ownerCamera.aspect;
Quaternion backupRot = ownerCamera.transform.rotation;
//RenderTexture backupRT = ownerCamera.targetTexture;
// Enable 8X MSAA
RenderTexture faceTexture = new RenderTexture(width, height, 24);
faceTexture.antiAliasing = 8;
faceTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;
faceTexture.hideFlags = HideFlags.HideAndDontSave;
// For intermediate saving
Texture2D swapTex = new Texture2D(width, height, TextureFormat.RGB24, false);
swapTex.hideFlags = HideFlags.HideAndDontSave;
// Capture 6 Directions
ownerCamera.targetTexture = faceTexture;
ownerCamera.fieldOfView = 90;
ownerCamera.aspect = 1.0f;
Color[] mirroredPixels = new Color[swapTex.height * swapTex.width];
for (int i = 0; i < faces.Length; i++)
{
ownerCamera.transform.eulerAngles = faceAngles[i];
ownerCamera.Render();
RenderTexture.active = faceTexture;
swapTex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
// Mirror vertically to meet the standard of unity cubemap
Color[] OrignalPixels = swapTex.GetPixels();
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
mirroredPixels[y1 * width + x1] = OrignalPixels[((height - 1 - y1) * width) + x1];
}
}
outCubemap.SetPixels(mirroredPixels, faces[i]);
}
outCubemap.SmoothEdges();
// Restore states
RenderTexture.active = backupRenderTex;
ownerCamera.fieldOfView = backupFieldOfView;
ownerCamera.aspect = backupAspect;
ownerCamera.transform.rotation = backupRot;
ownerCamera.targetTexture = backupRenderTex;
DestroyImmediate(swapTex);
DestroyImmediate(faceTexture);
}
/// <summary>
/// Save unity cubemap into NPOT 6x1 cubemap/texture atlas in the following format PX NX PY NY PZ NZ
/// </summary>
/// <description>
/// Supported format: PNG/JPG
/// Default file name: using current time OVR_hh_mm_ss.png
/// </description>
public static bool SaveCubemapCapture(Cubemap cubemap, string pathName = null)
{
string fileName;
string dirName;
int width = cubemap.width;
int height = cubemap.height;
int x = 0;
int y = 0;
bool saveToPNG = true;
if (string.IsNullOrEmpty(pathName))
{
dirName = Application.persistentDataPath + "/OVR_ScreenShot360/";
fileName = null;
}
else
{
dirName = Path.GetDirectoryName(pathName);
fileName = Path.GetFileName(pathName);
if (dirName[dirName.Length - 1] != '/' || dirName[dirName.Length - 1] != '\\')
dirName += "/";
}
if (string.IsNullOrEmpty(fileName))
fileName = "OVR_" + System.DateTime.Now.ToString("hh_mm_ss") + ".png";
string extName = Path.GetExtension(fileName);
if (extName == ".png")
{
saveToPNG = true;
}
else if (extName == ".jpg")
{
saveToPNG = false;
}
else
{
Debug.LogError("Unsupported file format" + extName);
return false;
}
// Validate path
try
{
System.IO.Directory.CreateDirectory(dirName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to create path " + dirName + " since " + e.ToString());
return false;
}
// Create the new texture
Texture2D tex = new Texture2D(width * 6, height, TextureFormat.RGB24, false);
if (tex == null)
{
Debug.LogError("[OVRScreenshotWizard] Failed creating the texture!");
return false;
}
// Merge all the cubemap faces into the texture
// Reference cubemap format: http://docs.unity3d.com/Manual/class-Cubemap.html
CubemapFace[] faces = new CubemapFace[]
{
CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY,
CubemapFace.PositiveZ, CubemapFace.NegativeZ
};
for (int i = 0; i < faces.Length; i++)
{
// get the pixels from the cubemap
Color[] srcPixels = null;
Color[] pixels = cubemap.GetPixels(faces[i]);
// if desired, flip them as they are ordered left to right, bottom to top
srcPixels = new Color[pixels.Length];
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
srcPixels[y1 * width + x1] = pixels[((height - 1 - y1) * width) + x1];
}
}
// Copy them to the dest texture
tex.SetPixels(x, y, width, height, srcPixels);
x += width;
}
try
{
// Encode the texture and save it to disk
byte[] bytes = saveToPNG ? tex.EncodeToPNG() : tex.EncodeToJPG();
System.IO.File.WriteAllBytes(dirName + fileName, bytes);
Debug.Log("Cubemap file created " + dirName + fileName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to save cubemap file since " + e.ToString());
return false;
}
DestroyImmediate(tex);
return true;
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7a489178b0acf0147846b3873447beaf
timeCreated: 1464728890
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,30 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
abstract public class OVRCursor : MonoBehaviour
{
public abstract void SetCursorRay(Transform ray);
public abstract void SetCursorStartDest(Vector3 start, Vector3 dest, Vector3 normal);
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f2233ce673fcb9f41bd0753f867b7f70
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,106 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using UnityEngine;
using UnityEngine.Assertions;
/// <summary>
/// OVR Component to drive blend shapes on a <c>SkinnedMeshRenderer</c> based on Face Tracking provided by <c>OVRFaceExpressions</c>.
/// </summary>
/// <remarks>
/// See <see cref="OVRFace"/> for more information.
/// This specialization of <see cref="OVRFace"/> provides mapping based on an array, configurable from the editor
/// This component comes with a custom editor that supports attempting to auto populate the mapping array based on string matching
/// See <see cref="OVRCustomFaceEditor"/> for more information.
/// </remarks>
[RequireComponent(typeof(SkinnedMeshRenderer))]
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_custom_face")]
public class OVRCustomFace : OVRFace
{
public OVRFaceExpressions.FaceExpression[] Mappings
{
get => _mappings;
set => _mappings = value;
}
[SerializeField]
[Tooltip("The mapping between Face Expressions to the blendshapes available " +
"on the shared mesh of the skinned mesh renderer")]
internal OVRFaceExpressions.FaceExpression[] _mappings;
[SerializeField, HideInInspector]
internal RetargetingType retargetingType;
protected RetargetingType RetargetingValue
{
get => retargetingType;
set => retargetingType = value;
}
[SerializeField]
[Tooltip("Allow duplicates when mapping blendshapes to Face Expressions")]
internal bool _allowDuplicateMapping = true;
protected bool AllowDuplicateMapping
{
get => _allowDuplicateMapping;
set => _allowDuplicateMapping = value;
}
/// <inheritdoc/>
protected override void Start()
{
base.Start();
Assert.IsNotNull(_mappings);
Assert.AreEqual(_mappings.Length, RetrieveSkinnedMeshRenderer().sharedMesh.blendShapeCount,
"Mapping out of sync with shared mesh.");
}
/// <inheritdoc/>
protected internal override OVRFaceExpressions.FaceExpression GetFaceExpression(int blendShapeIndex)
{
Assert.IsTrue(blendShapeIndex < _mappings.Length && blendShapeIndex >= 0);
return _mappings[blendShapeIndex];
}
/// <summary>
/// Allows the user to define their own blend shape name and face expression pair mappings.
/// By default it will just return the Oculus version.
/// </summary>
/// <returns>Two arrays, each relating a blend shape name with a face expression pair.</returns>
protected internal virtual (string[], OVRFaceExpressions.FaceExpression[])
GetCustomBlendShapeNameAndExpressionPairs()
{
string[] oculusBlendShapeNames = Enum.GetNames(typeof(OVRFaceExpressions.FaceExpression));
OVRFaceExpressions.FaceExpression[] oculusFaceExpressions =
(OVRFaceExpressions.FaceExpression[])Enum.GetValues(typeof(OVRFaceExpressions.FaceExpression));
return (oculusBlendShapeNames, oculusFaceExpressions);
}
public enum RetargetingType
{
OculusFace = 0,
Custom = 1,
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8261b39cb0902434f972a40b33f9acf2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,235 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using UnityEngine;
using UnityEngine.Assertions;
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_custom_face_extensions")]
public static class OVRCustomFaceExtensions
{
/// <summary>
/// Find the best matching blend shape for each facial expression based on their names
/// </summary>
/// <remarks>
/// Auto generation idea is to tokenize expression enum strings and blend shape name strings and find matching tokens
/// We quantify the quality of the match by the total number of characters in the matching tokens
/// We require at least a total of more than 2 characters to match, to avoid matching just L/R LB/RB etc.
/// A better technique might be to use Levenshtein distance to match the tokens to allow some typos while still being loose on order of tokens
/// </remarks>
/// <param name="skinnedMesh">The mesh to find a mapping for.</param>
/// <param name="blendShapeNames">Array of blend shape names</param>
/// <param name="faceExpressions">Array of FaceExpression id for mapping to them</param>
/// <param name="allowDuplicateMapping">Whether to allow duplicate mapping or not</param>
/// <returns>Returns an array of <see cref="OVRFaceExpressions.FaceExpression"/> of the same length as the number of blendshapes on the <paramref name="skinnedMesh"/> with each element identifying the closest found match</returns>
public static OVRFaceExpressions.FaceExpression[] AutoGenerateMapping(
Mesh skinnedMesh,
string[] blendShapeNames,
OVRFaceExpressions.FaceExpression[] faceExpressions,
bool allowDuplicateMapping)
{
Assert.AreEqual(blendShapeNames.Length, faceExpressions.Length);
var result = new OVRFaceExpressions.FaceExpression[skinnedMesh.blendShapeCount];
var expressionTokens = new HashSet<string>[blendShapeNames.Length];
for (int i = 0; i < blendShapeNames.Length; ++i)
{
expressionTokens[i] = TokenizeString(blendShapeNames[i]);
}
var usedBlendshapes = new HashSet<OVRFaceExpressions.FaceExpression>();
for (int i = 0; i < skinnedMesh.blendShapeCount; ++i)
{
var blendShapeName = skinnedMesh.GetBlendShapeName(i);
var bestMatchFound = FindBestMatch(
expressionTokens,
blendShapeName,
faceExpressions,
OVRFaceExpressions.FaceExpression.Invalid);
// If not allowing duplicates, make an exception for liptowards.
if (!allowDuplicateMapping &&
(usedBlendshapes.Contains(bestMatchFound) &&
!IsLipsToward(blendShapeName)))
{
result[i] = OVRFaceExpressions.FaceExpression.Invalid;
}
else
{
result[i] = bestMatchFound;
usedBlendshapes.Add(bestMatchFound);
}
}
return result;
}
private static OVRFaceExpressions.FaceExpression FindBestMatch(HashSet<string>[] tokenizedOptions,
string searchString, OVRFaceExpressions.FaceExpression[] expressions,
OVRFaceExpressions.FaceExpression fallback)
{
searchString = searchString.Substring(searchString.LastIndexOf('.') + 1); //remove model name prefix if present
HashSet<string> blendShapeTokens = TokenizeString(searchString);
OVRFaceExpressions.FaceExpression bestMatch = fallback;
// require more than two characters to match in an expression, to avoid just matching L/ LB/ R/RB
int bestMatchCount = 2;
for (int j = 0; j < tokenizedOptions.Length; ++j)
{
int thisMatchCount = 0;
HashSet<string> thisSet = tokenizedOptions[j];
// Currently we only allow exact matches, using Levenshtein distance for fuzzy matches
// would allow for handling of common typos and other slight mismatches
foreach (string matchingToken in blendShapeTokens.Intersect(thisSet))
{
thisMatchCount += matchingToken.Length;
}
if (thisMatchCount > bestMatchCount)
{
bestMatchCount = thisMatchCount;
bestMatch = expressions[j];
}
}
return bestMatch;
}
private static bool IsLipsToward(string blendshapeName)
{
blendshapeName = blendshapeName.Substring(blendshapeName.IndexOf('.') + 1);
return blendshapeName == "lipsToward_LB" ||
blendshapeName == "lipsToward_RB" ||
blendshapeName == "lipsToward_LT" ||
blendshapeName == "lipsToward_RT";
}
internal static HashSet<string> TokenizeString(string s)
{
var separators = new char[] { ' ', '_', '-', ',', '.', ';' };
// add both the camel case and non-camel case split versions since the
// camel case split doesn't handle all caps
//(it's fundamentally ambigous without natural language comprehension)
// duplicates don't matter as we later will hash them and they should match
var splitTokens = SplitCamelCase(s).Split(separators).Concat(s.Split(separators));
var hashCodes = new HashSet<string>();
foreach (string token in splitTokens)
{
string lowerCaseToken = token.ToLowerInvariant();
// give a chance for synonyms to mach with low weight
if (lowerCaseToken == "left" || lowerCaseToken == "l")
{
hashCodes.Add("L");
}
if (lowerCaseToken == "right" || lowerCaseToken == "r")
{
hashCodes.Add("R");
}
hashCodes.Add(lowerCaseToken);
}
return hashCodes;
}
private static string SplitCamelCase(string input) => System.Text.RegularExpressions.Regex
.Replace(input, "([A-Z])", " $1", System.Text.RegularExpressions.RegexOptions.Compiled).Trim();
public static void AutoMapBlendshapes(this OVRCustomFace customFace)
{
var type = customFace.retargetingType;
var renderer = customFace.GetComponent<SkinnedMeshRenderer>();
try
{
OVRFaceExpressions.FaceExpression[] generatedMapping;
switch (type)
{
case OVRCustomFace.RetargetingType.OculusFace:
generatedMapping = OculusFaceAutoGenerateMapping(renderer.sharedMesh,
customFace._allowDuplicateMapping);
break;
case OVRCustomFace.RetargetingType.Custom:
generatedMapping = CustomAutoGeneratedMapping(customFace,
renderer.sharedMesh,
customFace._allowDuplicateMapping);
break;
default:
throw new InvalidEnumArgumentException($"Invalid {nameof(OVRCustomFace.RetargetingType)}");
}
if (generatedMapping != null)
{
Assert.AreEqual(generatedMapping.Length, renderer.sharedMesh.blendShapeCount);
if (customFace._mappings == null || customFace._mappings.Length != renderer.sharedMesh.blendShapeCount)
{
customFace._mappings =
new OVRFaceExpressions.FaceExpression[renderer.sharedMesh.blendShapeCount];
}
for (int i = 0; i < renderer.sharedMesh.blendShapeCount; ++i)
{
customFace._mappings[i] = generatedMapping[i];
}
}
}
catch (Exception e)
{
Debug.LogError($"Auto Map Face Error: {e.Message}");
}
}
public static void ClearBlendshapes(this OVRCustomFace customFace)
{
var renderer = customFace.GetComponent<SkinnedMeshRenderer>();
for (int i = 0; i < renderer.sharedMesh.blendShapeCount; ++i)
{
customFace._mappings[i] = OVRFaceExpressions.FaceExpression.Invalid;
}
}
internal static OVRFaceExpressions.FaceExpression[] OculusFaceAutoGenerateMapping(Mesh sharedMesh,
bool allowDuplicateMapping)
{
string[] oculusBlendShapeNames = Enum.GetNames(typeof(OVRFaceExpressions.FaceExpression));
OVRFaceExpressions.FaceExpression[] oculusFaceExpressions =
(OVRFaceExpressions.FaceExpression[])Enum.GetValues(typeof(OVRFaceExpressions.FaceExpression));
return AutoGenerateMapping(sharedMesh,
oculusBlendShapeNames, oculusFaceExpressions, allowDuplicateMapping);
}
internal static OVRFaceExpressions.FaceExpression[] CustomAutoGeneratedMapping(OVRCustomFace customFace,
Mesh sharedMesh,
bool allowDuplicateMapping)
{
string[] customBlendShapeNames;
OVRFaceExpressions.FaceExpression[] customFaceExpressions;
(customBlendShapeNames, customFaceExpressions) = customFace.GetCustomBlendShapeNameAndExpressionPairs();
return AutoGenerateMapping(sharedMesh,
customBlendShapeNames, customFaceExpressions, allowDuplicateMapping);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2ba24b6fa2950c548a28fffacb177271
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,88 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using UnityEngine;
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_custom_skeleton")]
public class OVRCustomSkeleton : OVRSkeleton, ISerializationCallbackReceiver
{
[HideInInspector][SerializeField] private List<Transform> _customBones_V2;
public List<Transform> CustomBones => _customBones_V2;
/// <summary>
/// List of skeleton structures to be retargeted to the supported format for body tracking.
/// </summary>
public enum RetargetingType
{
/// <summary>The default skeleton structure of the Oculus tracking system</summary>
OculusSkeleton
}
[SerializeField, HideInInspector]
internal RetargetingType retargetingType = RetargetingType.OculusSkeleton;
protected override Transform GetBoneTransform(BoneId boneId) => _customBones_V2[(int)boneId];
#if UNITY_EDITOR
private bool _shouldSetDirty;
private void OnValidate()
{
if (!_shouldSetDirty) return;
UnityEditor.PrefabUtility.RecordPrefabInstancePropertyModifications(this);
UnityEditor.EditorUtility.SetDirty(this);
_shouldSetDirty = false;
}
#endif
void ISerializationCallbackReceiver.OnBeforeSerialize()
{
}
void ISerializationCallbackReceiver.OnAfterDeserialize()
{
AllocateBones();
}
private void AllocateBones()
{
if (_customBones_V2.Count == (int)BoneId.Max) return;
// Make sure we have the right number of bones
while (_customBones_V2.Count < (int)BoneId.Max)
{
_customBones_V2.Add(null);
}
#if UNITY_EDITOR
_shouldSetDirty = true;
#endif
}
internal override void SetSkeletonType(SkeletonType skeletonType)
{
base.SetSkeletonType(skeletonType);
_customBones_V2 ??= new List<Transform>();
AllocateBones();
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 674a40251fe8ad841b18517ac5209957
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: -80
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,443 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
//-------------------------------------------------------------------------------------
/// <summary>
/// Shows debug information on a heads-up display.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_debug_info")]
public class OVRDebugInfo : MonoBehaviour
{
#region GameObjects for Debug Information UIs
GameObject debugUIManager;
GameObject debugUIObject;
GameObject riftPresent;
GameObject fps;
GameObject ipd;
GameObject fov;
GameObject height;
GameObject depth;
GameObject resolutionEyeTexture;
GameObject latencies;
GameObject texts;
#endregion
#region Debug strings
string strRiftPresent = null; // "VR DISABLED"
string strFPS = null; // "FPS: 0";
string strIPD = null; // "IPD: 0.000";
string strFOV = null; // "FOV: 0.0f";
string strHeight = null; // "Height: 0.0f";
string strDepth = null; // "Depth: 0.0f";
string strResolutionEyeTexture = null; // "Resolution : {0} x {1}"
string strLatencies = null; // "R: {0:F3} TW: {1:F3} PP: {2:F3} RE: {3:F3} TWE: {4:F3}"
#endregion
/// <summary>
/// Variables for FPS
/// </summary>
float updateInterval = 0.5f;
float accum = 0.0f;
int frames = 0;
float timeLeft = 0.0f;
/// <summary>
/// Managing for UI initialization
/// </summary>
bool initUIComponent = false;
bool isInited = false;
/// <summary>
/// UIs Y offset
/// </summary>
float offsetY = 55.0f;
/// <summary>
/// Managing for rift detection UI
/// </summary>
float riftPresentTimeout = 0.0f;
/// <summary>
/// Turn on / off VR variables
/// </summary>
bool showVRVars = false;
#region MonoBehaviour handler
/// <summary>
/// Initialization
/// </summary>
void Awake()
{
// Create canvas for using new GUI
debugUIManager = new GameObject();
debugUIManager.name = "DebugUIManager";
debugUIManager.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
RectTransform rectTransform = debugUIManager.AddComponent<RectTransform>();
rectTransform.sizeDelta = new Vector2(100f, 100f);
rectTransform.localScale = new Vector3(0.001f, 0.001f, 0.001f);
rectTransform.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
rectTransform.localEulerAngles = Vector3.zero;
Canvas canvas = debugUIManager.AddComponent<Canvas>();
canvas.renderMode = RenderMode.WorldSpace;
canvas.pixelPerfect = false;
}
/// <summary>
/// Updating VR variables and managing UI present
/// </summary>
void Update()
{
if (initUIComponent && !isInited)
{
InitUIComponents();
}
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.Space) && riftPresentTimeout < 0.0f)
{
initUIComponent = true;
showVRVars ^= true;
}
#endif
UpdateDeviceDetection();
// Presenting VR variables
if (showVRVars)
{
debugUIManager.SetActive(true);
UpdateVariable();
UpdateStrings();
}
else
{
debugUIManager.SetActive(false);
}
}
/// <summary>
/// Initialize isInited value on OnDestroy
/// </summary>
void OnDestroy()
{
isInited = false;
}
#endregion
#region Private Functions
/// <summary>
/// Initialize UI GameObjects
/// </summary>
void InitUIComponents()
{
float posY = 0.0f;
int fontSize = 20;
debugUIObject = new GameObject();
debugUIObject.name = "DebugInfo";
debugUIObject.transform.parent = GameObject.Find("DebugUIManager").transform;
debugUIObject.transform.localPosition = new Vector3(0.0f, 100.0f, 0.0f);
debugUIObject.transform.localEulerAngles = Vector3.zero;
debugUIObject.transform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
// Print out for FPS
if (!string.IsNullOrEmpty(strFPS))
{
fps = VariableObjectManager(fps, "FPS", posY -= offsetY, strFPS, fontSize);
}
// Print out for IPD
if (!string.IsNullOrEmpty(strIPD))
{
ipd = VariableObjectManager(ipd, "IPD", posY -= offsetY, strIPD, fontSize);
}
// Print out for FOV
if (!string.IsNullOrEmpty(strFOV))
{
fov = VariableObjectManager(fov, "FOV", posY -= offsetY, strFOV, fontSize);
}
// Print out for Height
if (!string.IsNullOrEmpty(strHeight))
{
height = VariableObjectManager(height, "Height", posY -= offsetY, strHeight, fontSize);
}
// Print out for Depth
if (!string.IsNullOrEmpty(strDepth))
{
depth = VariableObjectManager(depth, "Depth", posY -= offsetY, strDepth, fontSize);
}
// Print out for Resoulution of Eye Texture
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
{
resolutionEyeTexture = VariableObjectManager(resolutionEyeTexture, "Resolution", posY -= offsetY,
strResolutionEyeTexture, fontSize);
}
// Print out for Latency
if (!string.IsNullOrEmpty(strLatencies))
{
latencies = VariableObjectManager(latencies, "Latency", posY -= offsetY, strLatencies, 17);
posY = 0.0f;
}
initUIComponent = false;
isInited = true;
}
/// <summary>
/// Update VR Variables
/// </summary>
void UpdateVariable()
{
UpdateIPD();
UpdateEyeHeightOffset();
UpdateEyeDepthOffset();
UpdateFOV();
UpdateResolutionEyeTexture();
UpdateLatencyValues();
UpdateFPS();
}
/// <summary>
/// Update Strings
/// </summary>
void UpdateStrings()
{
if (debugUIObject == null)
return;
if (!string.IsNullOrEmpty(strFPS))
fps.GetComponentInChildren<Text>().text = strFPS;
if (!string.IsNullOrEmpty(strIPD))
ipd.GetComponentInChildren<Text>().text = strIPD;
if (!string.IsNullOrEmpty(strFOV))
fov.GetComponentInChildren<Text>().text = strFOV;
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
resolutionEyeTexture.GetComponentInChildren<Text>().text = strResolutionEyeTexture;
if (!string.IsNullOrEmpty(strLatencies))
{
latencies.GetComponentInChildren<Text>().text = strLatencies;
latencies.GetComponentInChildren<Text>().fontSize = 14;
}
if (!string.IsNullOrEmpty(strHeight))
height.GetComponentInChildren<Text>().text = strHeight;
if (!string.IsNullOrEmpty(strDepth))
depth.GetComponentInChildren<Text>().text = strDepth;
}
/// <summary>
/// It's for rift present GUI
/// </summary>
void RiftPresentGUI(GameObject guiMainOBj)
{
riftPresent = ComponentComposition(riftPresent);
riftPresent.transform.SetParent(guiMainOBj.transform);
riftPresent.name = "RiftPresent";
RectTransform rectTransform = riftPresent.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, 0.0f, 0.0f);
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
rectTransform.localEulerAngles = Vector3.zero;
Text text = riftPresent.GetComponentInChildren<Text>();
text.text = strRiftPresent;
text.fontSize = 20;
}
/// <summary>
/// Updates the device detection.
/// </summary>
void UpdateDeviceDetection()
{
if (riftPresentTimeout >= 0.0f)
{
riftPresentTimeout -= Time.deltaTime;
}
}
/// <summary>
/// Object Manager for Variables
/// </summary>
/// <returns> gameobject for each Variable </returns>
GameObject VariableObjectManager(GameObject gameObject, string name, float posY, string str, int fontSize)
{
gameObject = ComponentComposition(gameObject);
gameObject.name = name;
gameObject.transform.SetParent(debugUIObject.transform);
RectTransform rectTransform = gameObject.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, posY -= offsetY, 0.0f);
Text text = gameObject.GetComponentInChildren<Text>();
text.text = str;
text.fontSize = fontSize;
gameObject.transform.localEulerAngles = Vector3.zero;
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
return gameObject;
}
/// <summary>
/// Component composition
/// </summary>
/// <returns> Composed gameobject. </returns>
GameObject ComponentComposition(GameObject GO)
{
GO = new GameObject();
GO.AddComponent<RectTransform>();
GO.AddComponent<CanvasRenderer>();
GO.AddComponent<Image>();
GO.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
GO.GetComponent<Image>().color = new Color(7f / 255f, 45f / 255f, 71f / 255f, 200f / 255f);
texts = new GameObject();
texts.AddComponent<RectTransform>();
texts.AddComponent<CanvasRenderer>();
texts.AddComponent<Text>();
texts.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
texts.GetComponent<Text>().font = Resources.GetBuiltinResource(typeof(Font), "Arial.ttf") as Font;
texts.GetComponent<Text>().alignment = TextAnchor.MiddleCenter;
texts.transform.SetParent(GO.transform);
texts.name = "TextBox";
return GO;
}
#endregion
#region Debugging variables handler
/// <summary>
/// Updates the IPD.
/// </summary>
void UpdateIPD()
{
strIPD = System.String.Format("IPD (mm): {0:F4}", OVRManager.profile.ipd * 1000.0f);
}
/// <summary>
/// Updates the eye height offset.
/// </summary>
void UpdateEyeHeightOffset()
{
float eyeHeight = OVRManager.profile.eyeHeight;
strHeight = System.String.Format("Eye Height (m): {0:F3}", eyeHeight);
}
/// <summary>
/// Updates the eye depth offset.
/// </summary>
void UpdateEyeDepthOffset()
{
float eyeDepth = OVRManager.profile.eyeDepth;
strDepth = System.String.Format("Eye Depth (m): {0:F3}", eyeDepth);
}
/// <summary>
/// Updates the FOV.
/// </summary>
void UpdateFOV()
{
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
strFOV = System.String.Format("FOV (deg): {0:F3}", eyeDesc.fov.y);
}
/// <summary>
/// Updates resolution of eye texture
/// </summary>
void UpdateResolutionEyeTexture()
{
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.RightEye);
float scale = UnityEngine.XR.XRSettings.renderViewportScale;
float w = (int)(scale * (float)(leftEyeDesc.resolution.x + rightEyeDesc.resolution.x));
float h = (int)(scale * (float)Mathf.Max(leftEyeDesc.resolution.y, rightEyeDesc.resolution.y));
strResolutionEyeTexture = System.String.Format("Resolution : {0} x {1}", w, h);
}
/// <summary>
/// Updates latency values
/// </summary>
void UpdateLatencyValues()
{
#if !UNITY_ANDROID || UNITY_EDITOR
OVRDisplay.LatencyData latency = OVRManager.display.latency;
if (latency.render < 0.000001f && latency.timeWarp < 0.000001f && latency.postPresent < 0.000001f)
strLatencies = System.String.Format("Latency values are not available.");
else
strLatencies = System.String.Format(
"Render: {0:F3} TimeWarp: {1:F3} Post-Present: {2:F3}\nRender Error: {3:F3} TimeWarp Error: {4:F3}",
latency.render,
latency.timeWarp,
latency.postPresent,
latency.renderError,
latency.timeWarpError);
#endif
}
/// <summary>
/// Updates the FPS.
/// </summary>
void UpdateFPS()
{
timeLeft -= Time.unscaledDeltaTime;
accum += Time.unscaledDeltaTime;
++frames;
// Interval ended - update GUI text and start new interval
if (timeLeft <= 0.0)
{
// display two fractional digits (f2 format)
float fps = frames / accum;
strFPS = System.String.Format("FPS: {0:F2}", fps);
timeLeft += updateInterval;
accum = 0.0f;
frames = 0;
}
}
#endregion
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b71d1996d67004241a3b69960856ffcb
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,124 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Runtime.InteropServices;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine;
using static OVRPlugin;
//-------------------------------------------------------------------------------------
/// <summary>
/// Collection of helper methods to facilitate data deserialization
/// </summary>
internal static class OVRDeserialize
{
public static T ByteArrayToStructure<T>(byte[] bytes) where T : struct
{
T stuff;
GCHandle handle = GCHandle.Alloc(bytes, GCHandleType.Pinned);
try
{
stuff = (T)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(T));
}
finally
{
handle.Free();
}
return stuff;
}
public struct DisplayRefreshRateChangedData
{
public float FromRefreshRate;
public float ToRefreshRate;
}
public struct SpaceQueryResultsData
{
public UInt64 RequestId;
}
public struct SpaceQueryCompleteData
{
public UInt64 RequestId;
public int Result;
}
public struct SceneCaptureCompleteData
{
public UInt64 RequestId;
public int Result;
}
public struct SpatialAnchorCreateCompleteData
{
public UInt64 RequestId;
public int Result;
public UInt64 Space;
public Guid Uuid;
}
public struct SpaceSetComponentStatusCompleteData
{
public UInt64 RequestId;
public int Result;
public UInt64 Space;
public Guid Uuid;
public OVRPlugin.SpaceComponentType ComponentType;
public int Enabled;
}
public struct SpaceSaveCompleteData
{
public UInt64 RequestId;
public UInt64 Space;
public int Result;
public Guid Uuid;
}
public struct SpaceEraseCompleteData
{
public UInt64 RequestId;
public int Result;
public Guid Uuid;
public OVRPlugin.SpaceStorageLocation Location;
}
public struct SpaceShareResultData
{
public UInt64 RequestId;
public int Result;
}
public struct SpaceListSaveResultData
{
public UInt64 RequestId;
public int Result;
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 0905396e50b244dabd2dc877e7715704
timeCreated: 1652862802

View File

@ -0,0 +1,335 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using Unity.Collections;
/// <summary>
/// Allows you to enumerate an IEnumerable in a non allocating way, if possible.
/// </summary>
/// <typeparam name="T">The type of item contained by the collection.</typeparam>
/// <seealso cref="OVRExtensions.ToNonAlloc{T}"/>
internal readonly struct OVREnumerable<T> : IEnumerable<T>
{
readonly IEnumerable<T> _enumerable;
public OVREnumerable(IEnumerable<T> enumerable) => _enumerable = enumerable;
public Enumerator GetEnumerator() => new Enumerator(_enumerable);
IEnumerator<T> IEnumerable<T>.GetEnumerator() => GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public struct Enumerator : IEnumerator<T>
{
enum CollectionType
{
None,
List,
Set,
Queue,
Enumerable,
}
int _listIndex;
readonly CollectionType _type;
readonly int _listCount;
readonly IEnumerator<T> _enumerator;
readonly IReadOnlyList<T> _list;
HashSet<T>.Enumerator _setEnumerator;
Queue<T>.Enumerator _queueEnumerator;
public Enumerator(IEnumerable<T> enumerable)
{
_setEnumerator = default;
_queueEnumerator = default;
_enumerator = null;
_list = null;
_listIndex = -1;
_listCount = 0;
switch (enumerable)
{
case IReadOnlyList<T> list:
_list = list;
_listCount = list.Count;
_type = CollectionType.List;
break;
case HashSet<T> set:
_setEnumerator = set.GetEnumerator();
_type = CollectionType.Set;
break;
case Queue<T> queue:
_queueEnumerator = queue.GetEnumerator();
_type = CollectionType.Queue;
break;
default:
_enumerator = enumerable.GetEnumerator();
_type = CollectionType.Enumerable;
break;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool MoveNext() => _type switch
{
CollectionType.List => MoveNextList(),
CollectionType.Set => _setEnumerator.MoveNext(),
CollectionType.Queue => _queueEnumerator.MoveNext(),
CollectionType.Enumerable => _enumerator.MoveNext(),
_ => throw new InvalidOperationException($"Unsupported collection type {_type}.")
};
bool MoveNextList()
{
ValidateAndThrow();
return ++_listIndex < _listCount;
}
public void Reset()
{
switch (_type)
{
case CollectionType.List:
ValidateAndThrow();
_listIndex = -1;
break;
case CollectionType.Set:
case CollectionType.Queue:
break;
case CollectionType.Enumerable:
_enumerator.Reset();
break;
}
}
public T Current
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get => _type switch
{
CollectionType.List => _list[_listIndex],
CollectionType.Set => _setEnumerator.Current,
CollectionType.Queue => _queueEnumerator.Current,
CollectionType.Enumerable => _enumerator.Current,
_ => throw new InvalidOperationException($"Unsupported collection type {_type}.")
};
}
object IEnumerator.Current => Current;
public void Dispose()
{
switch (_type)
{
case CollectionType.List:
break;
case CollectionType.Set:
_setEnumerator.Dispose();
break;
case CollectionType.Queue:
_queueEnumerator.Dispose();
break;
case CollectionType.Enumerable:
_enumerator.Dispose();
break;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
void ValidateAndThrow()
{
if (_listCount != _list.Count)
throw new InvalidOperationException($"The list changed length during enumeration.");
}
}
}
static partial class OVRExtensions
{
/// <summary>
/// Allows the caller to enumerate an IEnumerable in a non-allocating way, if possible.
/// </summary>
/// <remarks>
/// <example>
/// If you have an IEnumerable, this will allocate IEnumerator:
/// <code><![CDATA[
/// void Foo(IEnumerable<T> collection) {
/// // Allocates an IEnumerator<T>
/// foreach (var item in collection) {
/// // do something with item
/// }
/// }
/// ]]></code>
/// However, often the IEnumerable is at least an IReadOnlyList, e.g., a List or Array, its elements can be accessed
/// using the index operator. This custom enumerable will do that:
/// <code><![CDATA[
/// void Foo(IEnumerable<T> collection) {
/// // Returns a non-allocating struct-based enumerator
/// foreach (var item in collection.ToNonAlloc()) {
/// // do something with item
/// }
/// }
/// ]]></code>
/// </example>
///
/// Note that some safeties cannot be guaranteed, such as mutations to a List during enumeration.
/// </remarks>
/// <param name="enumerable">The collection you wish to enumerate.</param>
/// <typeparam name="T">The type of item in the collection.</typeparam>
/// <returns>Returns a non-allocating enumerable.</returns>
internal static OVREnumerable<T> ToNonAlloc<T>(this IEnumerable<T> enumerable) => new OVREnumerable<T>(enumerable);
/// <summary>
/// Copies a collection to a `NativeArray`.
/// </summary>
/// <remarks>
/// This will copy <paramref name="enumerable"/> to a NativeArray in the most efficient way possible. Behavior of
/// <paramref name="enumerable"/> in order of decreasing efficiency:
/// - Fixed-size array: single native allocation + memcpy - no managed allocations
/// - IReadOnlyList: single native allocation + iteration - no managed allocations
/// - HashSet: single native allocation + iteration - no managed allocations
/// - Queue: single native allocation + iteration - no managed allocations
/// - IReadOnlyCollection: single native allocation - single managed IEnumerator allocation
/// - ICollection: single native allocation - single managed IEnumerator allocation
/// - Anything else: multiple native allocations (using a growth strategy) - single managed IEnumerator allocation
/// </remarks>
/// <param name="enumerable">The collection to copy to a NativeArray</param>
/// <param name="allocator">The allocator to use for the returned NativeArray</param>
/// <typeparam name="T">The type of the elements in the collection.</typeparam>
/// <returns>Returns a new NativeArray allocated with <paramref name="allocator"/> filled with the elements of
/// <paramref name="enumerable"/>.</returns>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="enumerable"/> is `null`.</exception>
internal static NativeArray<T> ToNativeArray<T>(this IEnumerable<T> enumerable, Allocator allocator)
where T : struct
{
if (enumerable == null)
throw new ArgumentNullException(nameof(enumerable));
switch (enumerable)
{
// Easiest case, since NativeArray supports this
case T[] fixedArray: return new NativeArray<T>(fixedArray, allocator);
// Good, since we can iterate the list without allocating
case IReadOnlyList<T> list:
{
var array = new NativeArray<T>(list.Count, allocator, NativeArrayOptions.UninitializedMemory);
for (var i = 0; i < array.Length; i++)
{
array[i] = list[i];
}
return array;
}
// HashSet can be iterated without allocation but doesn't conform to any interface that supports it, so
// it's a special case.
case HashSet<T> set:
{
var array = new NativeArray<T>(set.Count, allocator, NativeArrayOptions.UninitializedMemory);
var index = 0;
foreach (var item in set)
{
array[index++] = item;
}
return array;
}
// Same as HashSet
case Queue<T> queue:
{
var array = new NativeArray<T>(queue.Count, allocator, NativeArrayOptions.UninitializedMemory);
var index = 0;
foreach (var item in queue)
{
array[index++] = item;
}
return array;
}
// Less good because we need to allocate to iterate, but we can know the size beforehand
case IReadOnlyCollection<T> collection:
{
var array = new NativeArray<T>(collection.Count, allocator, NativeArrayOptions.UninitializedMemory);
var index = 0;
foreach (var item in collection)
{
array[index++] = item;
}
return array;
}
// Same as above
case ICollection<T> collection:
{
var array = new NativeArray<T>(collection.Count, allocator, NativeArrayOptions.UninitializedMemory);
var index = 0;
foreach (var item in collection)
{
array[index++] = item;
}
return array;
}
// Fallback to worst case, but only enumerate the collection once
default:
{
var count = 0;
var capacity = 4;
var array = new NativeArray<T>(capacity, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
foreach (var item in enumerable)
{
if (count == capacity)
{
// Grow the array
capacity *= 2;
NativeArray<T> newArray;
using (array)
{
newArray = new NativeArray<T>(capacity, Allocator.Temp,
NativeArrayOptions.UninitializedMemory);
NativeArray<T>.Copy(array, newArray, array.Length);
}
array = newArray;
}
array[count++] = item;
}
using (array)
{
var result = new NativeArray<T>(count, allocator, NativeArrayOptions.UninitializedMemory);
NativeArray<T>.Copy(array, result, count);
return result;
}
}
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: eb3d883f0f6745ccaa8ed10c8a8c206d
timeCreated: 1671578447

View File

@ -0,0 +1,138 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using UnityEngine.Assertions;
/// <summary>
/// OVR Component to drive blend shapes on a <c>SkinnedMeshRenderer</c> based on Face Tracking provided by <see cref="OVRFaceExpressions"/>.
/// </summary>
/// <remarks>
/// Intended to be used as a base type that is inherited from, in order to provide mapping logic from blend shape indices.
/// The mapping of <see cref="OVRFaceExpressions.FaceExpression"/> to blend shapes is accomplished by overriding <see cref="OVRFace.GetFaceExpression(int)"/>.
/// Needs to be linked to an <see cref="OVRFaceExpressions"/> component to fetch tracking data from.
/// </remarks>
[RequireComponent(typeof(SkinnedMeshRenderer))]
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_face")]
public class OVRFace : MonoBehaviour
{
/// <summary>
/// Start this instance.
/// Will validate that all properties are set correctly
/// </summary>
public OVRFaceExpressions FaceExpressions
{
get => _faceExpressions;
set => _faceExpressions = value;
}
public float BlendShapeStrengthMultiplier
{
get => _blendShapeStrengthMultiplier;
set => _blendShapeStrengthMultiplier = value;
}
internal SkinnedMeshRenderer RetrieveSkinnedMeshRenderer()
{
return GetComponent<SkinnedMeshRenderer>();
}
[SerializeField]
[Tooltip("The OVRFaceExpressions Component to fetch the Face Tracking weights from that are to be applied")]
protected internal OVRFaceExpressions _faceExpressions;
[SerializeField]
[Tooltip("A multiplier to the weights read from the OVRFaceExpressions to exaggerate facial expressions")]
protected internal float _blendShapeStrengthMultiplier = 100.0f;
private SkinnedMeshRenderer _skinnedMeshRenderer;
protected SkinnedMeshRenderer SkinnedMesh => _skinnedMeshRenderer;
protected virtual void Awake()
{
if (_faceExpressions == null)
{
_faceExpressions = SearchFaceExpressions();
Debug.Log($"Found OVRFaceExpression reference in {_faceExpressions.name} due to unassigned field.");
}
}
internal OVRFaceExpressions SearchFaceExpressions() => gameObject.GetComponentInParent<OVRFaceExpressions>();
protected virtual void Start()
{
Assert.IsNotNull(_faceExpressions, "OVRFace requires OVRFaceExpressions to function.");
_skinnedMeshRenderer = GetComponent<SkinnedMeshRenderer>();
Assert.IsNotNull(_skinnedMeshRenderer);
Assert.IsNotNull(_skinnedMeshRenderer.sharedMesh);
}
protected virtual void Update()
{
if (!_faceExpressions.FaceTrackingEnabled || !_faceExpressions.enabled)
{
return;
}
if (_faceExpressions.ValidExpressions)
{
int numBlendshapes = _skinnedMeshRenderer.sharedMesh.blendShapeCount;
for (int blendShapeIndex = 0; blendShapeIndex < numBlendshapes; ++blendShapeIndex)
{
if (GetWeightValue(blendShapeIndex, out var currentWeight))
{
_skinnedMeshRenderer.SetBlendShapeWeight(blendShapeIndex, Mathf.Clamp(currentWeight, 0f, 100f));
}
}
}
}
/// <summary>
/// Fetches the <see cref="OVRFaceExpressions.FaceExpression"/> for a given blend shape index on the shared mesh of the <c>SkinnedMeshRenderer</c> on the same component
/// </summary>
/// <remarks>
/// Override this function to provide the mapping between blend shapes and face expressions
/// </remarks>
/// <param name="blendShapeIndex">The index of the blend shape, will be in-between 0 and the number of blend shapes on the shared mesh.</param>
/// <returns>Returns the <see cref="OVRFaceExpressions.FaceExpression"/> to drive the bland shape identified by <paramref name="blendShapeIndex"/>.</returns>
internal protected virtual OVRFaceExpressions.FaceExpression GetFaceExpression(int blendShapeIndex) =>
OVRFaceExpressions.FaceExpression.Invalid;
/// <summary>
/// Calculates the value for the specific target blend shape of the shared mesh <c>SkinnedMeshRenderer</c>
/// </summary>
/// <param name="blendShapeIndex">Index of the blend shape of the shared mesh <c>SkinnedMeshRenderer</c></param>
/// <param name="weightValue">Calculated value</param>
/// <returns>true if value was calculated, false if no value available for that blend shape</returns>
internal protected virtual bool GetWeightValue(int blendShapeIndex, out float weightValue)
{
OVRFaceExpressions.FaceExpression blendShapeToFaceExpression = GetFaceExpression(blendShapeIndex);
if (blendShapeToFaceExpression >= OVRFaceExpressions.FaceExpression.Max || blendShapeToFaceExpression < 0)
{
weightValue = 0;
return false;
}
weightValue = _faceExpressions[blendShapeToFaceExpression] * _blendShapeStrengthMultiplier;
return true;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: aacaa4dee14bf5c4082b4db38c20b824
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: -70
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,291 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
/// <summary>
/// UI pointer driven by gaze input.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_gaze_pointer")]
public class OVRGazePointer : OVRCursor
{
private Transform gazeIcon; //the transform that rotates according to our movement
[Tooltip("Should the pointer be hidden when not over interactive objects.")]
public bool hideByDefault = true;
[Tooltip("Time after leaving interactive object before pointer fades.")]
public float showTimeoutPeriod = 1;
[Tooltip("Time after mouse pointer becoming inactive before pointer unfades.")]
public float hideTimeoutPeriod = 0.1f;
[Tooltip("Keep a faint version of the pointer visible while using a mouse")]
public bool dimOnHideRequest = true;
[Tooltip("Angular scale of pointer")]
public float depthScaleMultiplier = 0.03f;
public bool matchNormalOnPhysicsColliders;
/// <summary>
/// The gaze ray.
/// </summary>
public Transform rayTransform;
/// <summary>
/// Is gaze pointer current visible
/// </summary>
public bool hidden { get; private set; }
/// <summary>
/// Current scale applied to pointer
/// </summary>
public float currentScale { get; private set; }
/// <summary>
/// Current depth of pointer from camera
/// </summary>
private float depth;
private float hideUntilTime;
/// <summary>
/// How many times position has been set this frame. Used to detect when there are no position sets in a frame.
/// </summary>
private int positionSetsThisFrame = 0;
/// <summary>
/// Last time code requested the pointer be shown. Usually when pointer passes over interactive elements.
/// </summary>
private float lastShowRequestTime;
/// <summary>
/// Last time pointer was requested to be hidden. Usually mouse pointer activity.
/// </summary>
private float lastHideRequestTime;
// Optionally present GUI element displaying progress when using gaze-to-select mechanics
private OVRProgressIndicator progressIndicator;
private static OVRGazePointer _instance;
public static OVRGazePointer instance
{
// If there's no GazePointer already in the scene, instanciate one now.
get
{
if (_instance == null)
{
Debug.Log(string.Format("Instanciating GazePointer", 0));
_instance = (OVRGazePointer)GameObject.Instantiate(
(OVRGazePointer)Resources.Load("Prefabs/GazePointerRing", typeof(OVRGazePointer)));
}
return _instance;
}
}
/// <summary>
/// Used to determine alpha level of gaze cursor. Could also be used to determine cursor size, for example, as the cursor fades out.
/// </summary>
public float visibilityStrength
{
get
{
// It's possible there are reasons to show the cursor - such as it hovering over some UI - and reasons to hide
// the cursor - such as another input method (e.g. mouse) being used. We take both of these in to account.
float strengthFromShowRequest;
if (hideByDefault)
{
// fade the cursor out with time
strengthFromShowRequest = Mathf.Clamp01(1 - (Time.time - lastShowRequestTime) / showTimeoutPeriod);
}
else
{
// keep it fully visible
strengthFromShowRequest = 1;
}
// Now consider factors requesting pointer to be hidden
float strengthFromHideRequest;
strengthFromHideRequest = (lastHideRequestTime + hideTimeoutPeriod > Time.time)
? (dimOnHideRequest ? 0.1f : 0)
: 1;
// Hide requests take priority
return Mathf.Min(strengthFromShowRequest, strengthFromHideRequest);
}
}
public float SelectionProgress
{
get { return progressIndicator ? progressIndicator.currentProgress : 0; }
set
{
if (progressIndicator)
progressIndicator.currentProgress = value;
}
}
public void Awake()
{
currentScale = 1;
// Only allow one instance at runtime.
if (_instance != null && _instance != this)
{
enabled = false;
DestroyImmediate(this);
return;
}
_instance = this;
gazeIcon = transform.Find("GazeIcon");
progressIndicator = transform.GetComponent<OVRProgressIndicator>();
}
void Update()
{
if (rayTransform == null && Camera.main != null)
rayTransform = Camera.main.transform;
// Move the gaze cursor to keep it in the middle of the view
transform.position = rayTransform.position + rayTransform.forward * depth;
// Should we show or hide the gaze cursor?
if (visibilityStrength == 0 && !hidden)
{
Hide();
}
else if (visibilityStrength > 0 && hidden)
{
Show();
}
}
/// <summary>
/// Set position and orientation of pointer
/// </summary>
/// <param name="pos"></param>
/// <param name="normal"></param>
public override void SetCursorStartDest(Vector3 _, Vector3 pos, Vector3 normal)
{
transform.position = pos;
if (!matchNormalOnPhysicsColliders) normal = rayTransform.forward;
// Set the rotation to match the normal of the surface it's on.
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(normal, rayTransform.up);
transform.rotation = newRot;
// record depth so that distance doesn't pop when pointer leaves an object
depth = (rayTransform.position - pos).magnitude;
//set scale based on depth
currentScale = depth * depthScaleMultiplier;
transform.localScale = new Vector3(currentScale, currentScale, currentScale);
positionSetsThisFrame++;
RequestShow();
}
public override void SetCursorRay(Transform ray)
{
// We don't do anything here, because we already set this properly by default in Update.
}
void LateUpdate()
{
// This happens after all Updates so we know that if positionSetsThisFrame is zero then nothing set the position this frame
if (positionSetsThisFrame == 0)
{
// No geometry intersections, so gazing into space. Make the cursor face directly at the camera
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(rayTransform.forward, rayTransform.up);
transform.rotation = newRot;
}
Quaternion iconRotation = gazeIcon.rotation;
iconRotation.SetLookRotation(transform.rotation * new Vector3(0, 0, 1));
gazeIcon.rotation = iconRotation;
positionSetsThisFrame = 0;
}
/// <summary>
/// Request the pointer be hidden
/// </summary>
public void RequestHide()
{
if (!dimOnHideRequest)
{
Hide();
}
lastHideRequestTime = Time.time;
}
/// <summary>
/// Request the pointer be shown. Hide requests take priority
/// </summary>
public void RequestShow()
{
Show();
lastShowRequestTime = Time.time;
}
// Disable/Enable child elements when we show/hide the cursor. For performance reasons.
void Hide()
{
var cachedTransform = transform;
for (int i = 0; i < cachedTransform.childCount; i++)
{
cachedTransform.GetChild(i).gameObject.SetActive(false);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = false;
hidden = true;
}
void Show()
{
var cachedTransform = transform;
for (int i = 0; i < cachedTransform.childCount; i++)
{
cachedTransform.GetChild(i).gameObject.SetActive(true);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = true;
hidden = false;
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 30530ad0e40d0a64ea26d753ee4996ea
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,26 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
public class OVRGearVrControllerTest : MonoBehaviour
{
// Deprecated since SDK 1.51
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7acc4619d4cb5e64e9ed05e5a7a8099f
timeCreated: 1486173066
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,174 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using UnityEngine;
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_grabbable")]
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider " +
"-- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 02d61468f8b77ae4b92c344bc9a600fb
timeCreated: 1481833527
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,435 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Allows grabbing and throwing of objects with the OVRGrabbable component on them.
/// </summary>
[RequireComponent(typeof(Rigidbody))]
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_grabber")]
public class OVRGrabber : MonoBehaviour
{
// Grip trigger thresholds for picking up objects, with some hysteresis.
public float grabBegin = 0.55f;
public float grabEnd = 0.35f;
// Demonstrates parenting the held object to the hand's transform when grabbed.
// When false, the grabbed object is moved every FixedUpdate using MovePosition.
// Note that MovePosition is required for proper physics simulation. If you set this to true, you can
// easily observe broken physics simulation by, for example, moving the bottom cube of a stacked
// tower and noting a complete loss of friction.
[SerializeField]
protected bool m_parentHeldObject = false;
// If true, this script will move the hand to the transform specified by m_parentTransform, using MovePosition in
// Update. This allows correct physics behavior, at the cost of some latency. In this usage scenario, you
// should NOT parent the hand to the hand anchor.
// (If m_moveHandPosition is false, this script will NOT update the game object's position.
// The hand gameObject can simply be attached to the hand anchor, which updates position in LateUpdate,
// gaining us a few ms of reduced latency.)
[SerializeField]
protected bool m_moveHandPosition = false;
// Child/attached transforms of the grabber, indicating where to snap held objects to (if you snap them).
// Also used for ranking grab targets in case of multiple candidates.
[SerializeField]
protected Transform m_gripTransform = null;
// Child/attached Colliders to detect candidate grabbable objects.
[SerializeField]
protected Collider[] m_grabVolumes = null;
// Should be OVRInput.Controller.LTouch or OVRInput.Controller.RTouch.
[SerializeField]
protected OVRInput.Controller m_controller;
// You can set this explicitly in the inspector if you're using m_moveHandPosition.
// Otherwise, you should typically leave this null and simply parent the hand to the hand anchor
// in your scene, using Unity's inspector.
[SerializeField]
protected Transform m_parentTransform;
[SerializeField]
protected GameObject m_player;
protected bool m_grabVolumeEnabled = true;
protected Vector3 m_lastPos;
protected Quaternion m_lastRot;
protected Quaternion m_anchorOffsetRotation;
protected Vector3 m_anchorOffsetPosition;
protected float m_prevFlex;
protected OVRGrabbable m_grabbedObj = null;
protected Vector3 m_grabbedObjectPosOff;
protected Quaternion m_grabbedObjectRotOff;
protected Dictionary<OVRGrabbable, int> m_grabCandidates = new Dictionary<OVRGrabbable, int>();
protected bool m_operatingWithoutOVRCameraRig = true;
/// <summary>
/// The currently grabbed object.
/// </summary>
public OVRGrabbable grabbedObject
{
get { return m_grabbedObj; }
}
public void ForceRelease(OVRGrabbable grabbable)
{
bool canRelease = (
(m_grabbedObj != null) &&
(m_grabbedObj == grabbable)
);
if (canRelease)
{
GrabEnd();
}
}
protected virtual void Awake()
{
m_anchorOffsetPosition = transform.localPosition;
m_anchorOffsetRotation = transform.localRotation;
if (!m_moveHandPosition)
{
// If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.
OVRCameraRig rig = transform.GetComponentInParent<OVRCameraRig>();
if (rig != null)
{
rig.UpdatedAnchors += (r) => { OnUpdatedAnchors(); };
m_operatingWithoutOVRCameraRig = false;
}
}
}
protected virtual void Start()
{
m_lastPos = transform.position;
m_lastRot = transform.rotation;
if (m_parentTransform == null)
{
m_parentTransform = gameObject.transform;
}
// We're going to setup the player collision to ignore the hand collision.
SetPlayerIgnoreCollision(gameObject, true);
}
// Using Update instead of FixedUpdate. Doing this in FixedUpdate causes visible judder even with
// somewhat high tick rates, because variable numbers of ticks per frame will give hand poses of
// varying recency. We want a single hand pose sampled at the same time each frame.
// Note that this can lead to its own side effects. For example, if m_parentHeldObject is false, the
// grabbed objects will be moved with MovePosition. If this is called in Update while the physics
// tick rate is dramatically different from the application frame rate, other objects touched by
// the held object will see an incorrect velocity (because the move will occur over the time of the
// physics tick, not the render tick), and will respond to the incorrect velocity with potentially
// visible artifacts.
virtual public void Update()
{
if (m_operatingWithoutOVRCameraRig)
{
OnUpdatedAnchors();
}
}
// Hands follow the touch anchors by calling MovePosition each frame to reach the anchor.
// This is done instead of parenting to achieve workable physics. If you don't require physics on
// your hands or held objects, you may wish to switch to parenting.
void OnUpdatedAnchors()
{
Vector3 destPos = m_parentTransform.TransformPoint(m_anchorOffsetPosition);
Quaternion destRot = m_parentTransform.rotation * m_anchorOffsetRotation;
if (m_moveHandPosition)
{
GetComponent<Rigidbody>().MovePosition(destPos);
GetComponent<Rigidbody>().MoveRotation(destRot);
}
if (!m_parentHeldObject)
{
MoveGrabbedObject(destPos, destRot);
}
m_lastPos = transform.position;
m_lastRot = transform.rotation;
float prevFlex = m_prevFlex;
// Update values from inputs
m_prevFlex = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller);
CheckForGrabOrRelease(prevFlex);
}
void OnDestroy()
{
if (m_grabbedObj != null)
{
GrabEnd();
}
}
void OnTriggerEnter(Collider otherCollider)
{
// Get the grab trigger
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ??
otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Add the grabbable
int refCount = 0;
m_grabCandidates.TryGetValue(grabbable, out refCount);
m_grabCandidates[grabbable] = refCount + 1;
}
void OnTriggerExit(Collider otherCollider)
{
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ??
otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Remove the grabbable
int refCount = 0;
bool found = m_grabCandidates.TryGetValue(grabbable, out refCount);
if (!found)
{
return;
}
if (refCount > 1)
{
m_grabCandidates[grabbable] = refCount - 1;
}
else
{
m_grabCandidates.Remove(grabbable);
}
}
protected void CheckForGrabOrRelease(float prevFlex)
{
if ((m_prevFlex >= grabBegin) && (prevFlex < grabBegin))
{
GrabBegin();
}
else if ((m_prevFlex <= grabEnd) && (prevFlex > grabEnd))
{
GrabEnd();
}
}
protected virtual void GrabBegin()
{
float closestMagSq = float.MaxValue;
OVRGrabbable closestGrabbable = null;
Collider closestGrabbableCollider = null;
// Iterate grab candidates and find the closest grabbable candidate
foreach (OVRGrabbable grabbable in m_grabCandidates.Keys)
{
bool canGrab = !(grabbable.isGrabbed && !grabbable.allowOffhandGrab);
if (!canGrab)
{
continue;
}
for (int j = 0; j < grabbable.grabPoints.Length; ++j)
{
Collider grabbableCollider = grabbable.grabPoints[j];
// Store the closest grabbable
Vector3 closestPointOnBounds = grabbableCollider.ClosestPointOnBounds(m_gripTransform.position);
float grabbableMagSq = (m_gripTransform.position - closestPointOnBounds).sqrMagnitude;
if (grabbableMagSq < closestMagSq)
{
closestMagSq = grabbableMagSq;
closestGrabbable = grabbable;
closestGrabbableCollider = grabbableCollider;
}
}
}
// Disable grab volumes to prevent overlaps
GrabVolumeEnable(false);
if (closestGrabbable != null)
{
if (closestGrabbable.isGrabbed)
{
closestGrabbable.grabbedBy.OffhandGrabbed(closestGrabbable);
}
m_grabbedObj = closestGrabbable;
m_grabbedObj.GrabBegin(this, closestGrabbableCollider);
m_lastPos = transform.position;
m_lastRot = transform.rotation;
// Set up offsets for grabbed object desired position relative to hand.
if (m_grabbedObj.snapPosition)
{
m_grabbedObjectPosOff = m_gripTransform.localPosition;
if (m_grabbedObj.snapOffset)
{
Vector3 snapOffset = m_grabbedObj.snapOffset.position;
if (m_controller == OVRInput.Controller.LTouch) snapOffset.x = -snapOffset.x;
m_grabbedObjectPosOff += snapOffset;
}
}
else
{
Vector3 relPos = m_grabbedObj.transform.position - transform.position;
relPos = Quaternion.Inverse(transform.rotation) * relPos;
m_grabbedObjectPosOff = relPos;
}
if (m_grabbedObj.snapOrientation)
{
m_grabbedObjectRotOff = m_gripTransform.localRotation;
if (m_grabbedObj.snapOffset)
{
m_grabbedObjectRotOff = m_grabbedObj.snapOffset.rotation * m_grabbedObjectRotOff;
}
}
else
{
Quaternion relOri = Quaternion.Inverse(transform.rotation) * m_grabbedObj.transform.rotation;
m_grabbedObjectRotOff = relOri;
}
// NOTE: force teleport on grab, to avoid high-speed travel to dest which hits a lot of other objects at high
// speed and sends them flying. The grabbed object may still teleport inside of other objects, but fixing that
// is beyond the scope of this demo.
MoveGrabbedObject(m_lastPos, m_lastRot, true);
// NOTE: This is to get around having to setup collision layers, but in your own project you might
// choose to remove this line in favor of your own collision layer setup.
SetPlayerIgnoreCollision(m_grabbedObj.gameObject, true);
if (m_parentHeldObject)
{
m_grabbedObj.transform.parent = transform;
}
}
}
protected virtual void MoveGrabbedObject(Vector3 pos, Quaternion rot, bool forceTeleport = false)
{
if (m_grabbedObj == null)
{
return;
}
Rigidbody grabbedRigidbody = m_grabbedObj.grabbedRigidbody;
Vector3 grabbablePosition = pos + rot * m_grabbedObjectPosOff;
Quaternion grabbableRotation = rot * m_grabbedObjectRotOff;
if (forceTeleport)
{
grabbedRigidbody.transform.position = grabbablePosition;
grabbedRigidbody.transform.rotation = grabbableRotation;
}
else
{
grabbedRigidbody.MovePosition(grabbablePosition);
grabbedRigidbody.MoveRotation(grabbableRotation);
}
}
protected void GrabEnd()
{
if (m_grabbedObj != null)
{
OVRPose localPose = new OVRPose
{
position = OVRInput.GetLocalControllerPosition(m_controller),
orientation = OVRInput.GetLocalControllerRotation(m_controller)
};
OVRPose offsetPose = new OVRPose
{
position = m_anchorOffsetPosition,
orientation = m_anchorOffsetRotation
};
localPose = localPose * offsetPose;
OVRPose trackingSpace = transform.ToOVRPose() * localPose.Inverse();
Vector3 linearVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
Vector3 angularVelocity =
trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);
GrabbableRelease(linearVelocity, angularVelocity);
}
// Re-enable grab volumes to allow overlap events
GrabVolumeEnable(true);
}
protected void GrabbableRelease(Vector3 linearVelocity, Vector3 angularVelocity)
{
m_grabbedObj.GrabEnd(linearVelocity, angularVelocity);
if (m_parentHeldObject) m_grabbedObj.transform.parent = null;
m_grabbedObj = null;
}
protected virtual void GrabVolumeEnable(bool enabled)
{
if (m_grabVolumeEnabled == enabled)
{
return;
}
m_grabVolumeEnabled = enabled;
for (int i = 0; i < m_grabVolumes.Length; ++i)
{
Collider grabVolume = m_grabVolumes[i];
grabVolume.enabled = m_grabVolumeEnabled;
}
if (!m_grabVolumeEnabled)
{
m_grabCandidates.Clear();
}
}
protected virtual void OffhandGrabbed(OVRGrabbable grabbable)
{
if (m_grabbedObj == grabbable)
{
GrabbableRelease(Vector3.zero, Vector3.zero);
}
}
protected void SetPlayerIgnoreCollision(GameObject grabbable, bool ignore)
{
if (m_player != null)
{
Collider[] playerColliders = m_player.GetComponentsInChildren<Collider>();
foreach (Collider pc in playerColliders)
{
Collider[] colliders = grabbable.GetComponentsInChildren<Collider>();
foreach (Collider c in colliders)
{
if (!c.isTrigger && !pc.isTrigger)
Physics.IgnoreCollision(c, pc, ignore);
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fd425c2d06f39bf4899d07c05d0f10eb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 200
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,200 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections;
/// <summary>
/// Diagnostic display with a regular grid of cubes for visual testing of
/// tracking and distortion.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_grid_cube")]
public class OVRGridCube : MonoBehaviour
{
/// <summary>
/// The key that toggles the grid of cubes.
/// </summary>
public KeyCode GridKey = KeyCode.G;
private GameObject CubeGrid = null;
private bool CubeGridOn = false;
private bool CubeSwitchColorOld = false;
private bool CubeSwitchColor = false;
private int gridSizeX = 6;
private int gridSizeY = 4;
private int gridSizeZ = 6;
private float gridScale = 0.3f;
private float cubeScale = 0.03f;
// Handle to OVRCameraRig
private OVRCameraRig CameraController = null;
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
UpdateCubeGrid();
}
/// <summary>
/// Sets the OVR camera controller.
/// </summary>
/// <param name="cameraController">Camera controller.</param>
public void SetOVRCameraController(ref OVRCameraRig cameraController)
{
CameraController = cameraController;
}
void UpdateCubeGrid()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
// Toggle the grid cube display on 'G'
if (Input.GetKeyDown(GridKey))
{
if (CubeGridOn == false)
{
CubeGridOn = true;
Debug.LogWarning("CubeGrid ON");
if (CubeGrid != null)
CubeGrid.SetActive(true);
else
CreateCubeGrid();
}
else
{
CubeGridOn = false;
Debug.LogWarning("CubeGrid OFF");
if (CubeGrid != null)
CubeGrid.SetActive(false);
}
}
#endif
if (CubeGrid != null)
{
// Set cube colors to let user know if camera is tracking
CubeSwitchColor = !OVRManager.tracker.isPositionTracked;
if (CubeSwitchColor != CubeSwitchColorOld)
CubeGridSwitchColor(CubeSwitchColor);
CubeSwitchColorOld = CubeSwitchColor;
}
}
void CreateCubeGrid()
{
Debug.LogWarning("Create CubeGrid");
// Create the visual cube grid
CubeGrid = new GameObject("CubeGrid");
// Set a layer to target a specific camera
CubeGrid.layer = CameraController.gameObject.layer;
for (int x = -gridSizeX; x <= gridSizeX; x++)
for (int y = -gridSizeY; y <= gridSizeY; y++)
for (int z = -gridSizeZ; z <= gridSizeZ; z++)
{
// Set the cube type:
// 0 = non-axis cube
// 1 = axis cube
// 2 = center cube
int CubeType = 0;
if ((x == 0 && y == 0) || (x == 0 && z == 0) || (y == 0 && z == 0))
{
if ((x == 0) && (y == 0) && (z == 0))
CubeType = 2;
else
CubeType = 1;
}
GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
BoxCollider bc = cube.GetComponent<BoxCollider>();
bc.enabled = false;
cube.layer = CameraController.gameObject.layer;
// No shadows
Renderer r = cube.GetComponent<Renderer>();
#if UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_5 || UNITY_4_6
// Renderer.castShadows was deprecated starting in Unity 5.0
r.castShadows = false;
#else
r.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
#endif
r.receiveShadows = false;
// Cube line is white down the middle
if (CubeType == 0)
r.material.color = Color.red;
else if (CubeType == 1)
r.material.color = Color.white;
else
r.material.color = Color.yellow;
cube.transform.position =
new Vector3(((float)x * gridScale),
((float)y * gridScale),
((float)z * gridScale));
float s = 0.7f;
// Axis cubes are bigger
if (CubeType == 1)
s = 1.0f;
// Center cube is the largest
if (CubeType == 2)
s = 2.0f;
cube.transform.localScale =
new Vector3(cubeScale * s, cubeScale * s, cubeScale * s);
cube.transform.parent = CubeGrid.transform;
}
}
/// <summary>
/// Switch the Cube grid color.
/// </summary>
/// <param name="CubeSwitchColor">If set to <c>true</c> cube switch color.</param>
void CubeGridSwitchColor(bool CubeSwitchColor)
{
Color c = Color.red;
if (CubeSwitchColor == true)
c = Color.blue;
var cachedTransform = CubeGrid.transform;
for (int i = 0; i < cachedTransform.childCount; i++)
{
var child = cachedTransform.GetChild(i);
Material m = child.GetComponent<Renderer>().material;
// Cube line is white down the middle
if (m.color == Color.red || m.color == Color.blue)
m.color = c;
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4988596c8a187f94f8e6a345ebb4254b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,281 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRHand : MonoBehaviour,
OVRSkeleton.IOVRSkeletonDataProvider,
OVRSkeletonRenderer.IOVRSkeletonRendererDataProvider,
OVRMesh.IOVRMeshDataProvider,
OVRMeshRenderer.IOVRMeshRendererDataProvider
{
public enum Hand
{
None = OVRPlugin.Hand.None,
HandLeft = OVRPlugin.Hand.HandLeft,
HandRight = OVRPlugin.Hand.HandRight,
}
public enum HandFinger
{
Thumb = OVRPlugin.HandFinger.Thumb,
Index = OVRPlugin.HandFinger.Index,
Middle = OVRPlugin.HandFinger.Middle,
Ring = OVRPlugin.HandFinger.Ring,
Pinky = OVRPlugin.HandFinger.Pinky,
Max = OVRPlugin.HandFinger.Max,
}
public enum TrackingConfidence
{
Low = OVRPlugin.TrackingConfidence.Low,
High = OVRPlugin.TrackingConfidence.High
}
[SerializeField]
internal Hand HandType = Hand.None;
[SerializeField]
private Transform _pointerPoseRoot = null;
/// <summary>
/// Determines if the controller should be hidden based on held state.
/// </summary>
public OVRInput.InputDeviceShowState m_showState = OVRInput.InputDeviceShowState.ControllerNotInHand;
private GameObject _pointerPoseGO;
private OVRPlugin.HandState _handState = new OVRPlugin.HandState();
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public bool IsTracked { get; private set; }
public bool IsSystemGestureInProgress { get; private set; }
public bool IsPointerPoseValid { get; private set; }
public Transform PointerPose { get; private set; }
public float HandScale { get; private set; }
public TrackingConfidence HandConfidence { get; private set; }
public bool IsDominantHand { get; private set; }
private void Awake()
{
_pointerPoseGO = new GameObject();
PointerPose = _pointerPoseGO.transform;
if (_pointerPoseRoot != null)
{
PointerPose.SetParent(_pointerPoseRoot, false);
}
GetHandState(OVRPlugin.Step.Render);
}
private void Update()
{
GetHandState(OVRPlugin.Step.Render);
}
private void FixedUpdate()
{
if (OVRPlugin.nativeXrApi != OVRPlugin.XrApi.OpenXR)
{
GetHandState(OVRPlugin.Step.Physics);
}
}
private void GetHandState(OVRPlugin.Step step)
{
if (OVRPlugin.GetHandState(step, (OVRPlugin.Hand)HandType, ref _handState))
{
IsTracked = (_handState.Status & OVRPlugin.HandStatus.HandTracked) != 0;
IsSystemGestureInProgress = (_handState.Status & OVRPlugin.HandStatus.SystemGestureInProgress) != 0;
IsPointerPoseValid = (_handState.Status & OVRPlugin.HandStatus.InputStateValid) != 0;
IsDominantHand = (_handState.Status & OVRPlugin.HandStatus.DominantHand) != 0;
PointerPose.localPosition = _handState.PointerPose.Position.FromFlippedZVector3f();
PointerPose.localRotation = _handState.PointerPose.Orientation.FromFlippedZQuatf();
HandScale = _handState.HandScale;
HandConfidence = (TrackingConfidence)_handState.HandConfidence;
IsDataValid = true;
IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
// Hands cannot be doing pointer poses or system gestures when they are holding controllers
//OVRInput.Hand inputHandType = (HandType == Hand.)
OVRInput.ControllerInHandState controllerInHandState =
OVRInput.GetControllerIsInHandState((OVRInput.Hand)HandType);
if (controllerInHandState == OVRInput.ControllerInHandState.ControllerInHand)
{
// This hand is holding a controller
IsSystemGestureInProgress = false;
IsPointerPoseValid = false;
}
switch (m_showState)
{
case OVRInput.InputDeviceShowState.Always:
// intentionally blank
break;
case OVRInput.InputDeviceShowState.ControllerInHandOrNoHand:
if (controllerInHandState == OVRInput.ControllerInHandState.ControllerNotInHand)
{
IsDataValid = false;
}
break;
case OVRInput.InputDeviceShowState.ControllerInHand:
if (controllerInHandState != OVRInput.ControllerInHandState.ControllerInHand)
{
IsDataValid = false;
}
break;
case OVRInput.InputDeviceShowState.ControllerNotInHand:
if (controllerInHandState != OVRInput.ControllerInHandState.ControllerNotInHand)
{
IsDataValid = false;
}
break;
case OVRInput.InputDeviceShowState.NoHand:
if (controllerInHandState != OVRInput.ControllerInHandState.NoHand)
{
IsDataValid = false;
}
break;
}
}
else
{
IsTracked = false;
IsSystemGestureInProgress = false;
IsPointerPoseValid = false;
PointerPose.localPosition = Vector3.zero;
PointerPose.localRotation = Quaternion.identity;
HandScale = 1.0f;
HandConfidence = TrackingConfidence.Low;
IsDataValid = false;
IsDataHighConfidence = false;
}
}
public bool GetFingerIsPinching(HandFinger finger)
{
return IsDataValid && (((int)_handState.Pinches & (1 << (int)finger)) != 0);
}
public float GetFingerPinchStrength(HandFinger finger)
{
if (IsDataValid
&& _handState.PinchStrength != null
&& _handState.PinchStrength.Length == (int)OVRPlugin.HandFinger.Max)
{
return _handState.PinchStrength[(int)finger];
}
return 0.0f;
}
public TrackingConfidence GetFingerConfidence(HandFinger finger)
{
if (IsDataValid
&& _handState.FingerConfidences != null
&& _handState.FingerConfidences.Length == (int)OVRPlugin.HandFinger.Max)
{
return (TrackingConfidence)_handState.FingerConfidences[(int)finger];
}
return TrackingConfidence.Low;
}
OVRSkeleton.SkeletonType OVRSkeleton.IOVRSkeletonDataProvider.GetSkeletonType()
{
switch (HandType)
{
case Hand.HandLeft:
return OVRSkeleton.SkeletonType.HandLeft;
case Hand.HandRight:
return OVRSkeleton.SkeletonType.HandRight;
case Hand.None:
default:
return OVRSkeleton.SkeletonType.None;
}
}
OVRSkeleton.SkeletonPoseData OVRSkeleton.IOVRSkeletonDataProvider.GetSkeletonPoseData()
{
var data = new OVRSkeleton.SkeletonPoseData();
data.IsDataValid = IsDataValid;
if (IsDataValid)
{
data.RootPose = _handState.RootPose;
data.RootScale = _handState.HandScale;
data.BoneRotations = _handState.BoneRotations;
data.IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
}
return data;
}
OVRSkeletonRenderer.SkeletonRendererData OVRSkeletonRenderer.IOVRSkeletonRendererDataProvider.
GetSkeletonRendererData()
{
var data = new OVRSkeletonRenderer.SkeletonRendererData();
data.IsDataValid = IsDataValid;
if (IsDataValid)
{
data.RootScale = _handState.HandScale;
data.IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
data.ShouldUseSystemGestureMaterial = IsSystemGestureInProgress;
}
return data;
}
OVRMesh.MeshType OVRMesh.IOVRMeshDataProvider.GetMeshType()
{
switch (HandType)
{
case Hand.None:
return OVRMesh.MeshType.None;
case Hand.HandLeft:
return OVRMesh.MeshType.HandLeft;
case Hand.HandRight:
return OVRMesh.MeshType.HandRight;
default:
return OVRMesh.MeshType.None;
}
}
OVRMeshRenderer.MeshRendererData OVRMeshRenderer.IOVRMeshRendererDataProvider.GetMeshRendererData()
{
var data = new OVRMeshRenderer.MeshRendererData();
data.IsDataValid = IsDataValid;
if (IsDataValid)
{
data.IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
data.ShouldUseSystemGestureMaterial = IsSystemGestureInProgress;
}
return data;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cb7623a8f0b49cf4dbaa40aea4d4be64
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: -90
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,196 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRHandTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
private OVRPlugin.HandState hs_LH = new OVRPlugin.HandState();
private OVRPlugin.HandState hs_RH = new OVRPlugin.HandState();
private OVRPlugin.Skeleton skel_LH = new OVRPlugin.Skeleton();
private OVRPlugin.Skeleton skel_RH = new OVRPlugin.Skeleton();
private OVRPlugin.Mesh mesh_LH = new OVRPlugin.Mesh();
private OVRPlugin.Mesh mesh_RH = new OVRPlugin.Mesh();
private bool result_skel_LH = false;
private bool result_skel_RH = false;
private bool result_mesh_LH = false;
private bool result_mesh_RH = false;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
};
result_skel_LH = OVRPlugin.GetSkeleton(OVRPlugin.SkeletonType.HandLeft, out skel_LH);
result_skel_RH = OVRPlugin.GetSkeleton(OVRPlugin.SkeletonType.HandRight, out skel_RH);
result_mesh_LH = OVRPlugin.GetMesh(OVRPlugin.MeshType.HandLeft, out mesh_LH);
result_mesh_RH = OVRPlugin.GetMesh(OVRPlugin.MeshType.HandRight, out mesh_RH);
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed",
() => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
data.Length = 0;
OVRInput.Controller activeController = OVRInput.GetActiveController();
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
data.AppendFormat("HandTrackingEnabled: {0}\n", OVRPlugin.GetHandTrackingEnabled());
bool result_hs_LH = OVRPlugin.GetHandState(OVRPlugin.Step.Render, OVRPlugin.Hand.HandLeft, ref hs_LH);
data.AppendFormat("LH HS Query Res: {0}\n", result_hs_LH);
data.AppendFormat("LH HS Status: {0}\n", hs_LH.Status);
data.AppendFormat("LH HS Pose: {0}\n", hs_LH.RootPose);
data.AppendFormat("LH HS HandConf: {0}\n", hs_LH.HandConfidence);
bool result_hs_RH = OVRPlugin.GetHandState(OVRPlugin.Step.Render, OVRPlugin.Hand.HandRight, ref hs_RH);
data.AppendFormat("RH HS Query Res: {0}\n", result_hs_RH);
data.AppendFormat("RH HS Status: {0}\n", hs_RH.Status);
data.AppendFormat("RH HS Pose: {0}\n", hs_RH.RootPose);
data.AppendFormat("RH HS HandConf: {0}\n", hs_RH.HandConfidence);
data.AppendFormat("LH Skel Query Res: {0}\n", result_skel_LH);
data.AppendFormat("LH Skel Type: {0}\n", skel_LH.Type);
data.AppendFormat("LH Skel NumBones: {0}\n", skel_LH.NumBones);
data.AppendFormat("RH Skel Query Res: {0}\n", result_skel_RH);
data.AppendFormat("RH Skel Type: {0}\n", skel_RH.Type);
data.AppendFormat("RH Skel NumBones: {0}\n", skel_RH.NumBones);
data.AppendFormat("LH Mesh Query Res: {0}\n", result_mesh_LH);
data.AppendFormat("LH Mesh Type: {0}\n", mesh_LH.Type);
data.AppendFormat("LH Mesh NumVers: {0}\n", mesh_LH.NumVertices);
data.AppendFormat("RH Mesh Query Res: {0}\n", result_mesh_RH);
data.AppendFormat("RH Mesh Type: {0}\n", mesh_RH.Type);
data.AppendFormat("RH Mesh NumVers: {0}\n", mesh_RH.NumVertices);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 12a1e4a79af8fe849b7a2769ff0d3886
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,947 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.EventSystems
{
/// <summary>
/// VR extension of PointerInputModule which supports gaze and controller pointing.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_input_module")]
public class OVRInputModule : PointerInputModule
{
[Tooltip("Object which points with Z axis. E.g. CentreEyeAnchor from OVRCameraRig")]
public Transform rayTransform;
public OVRCursor m_Cursor;
[Tooltip("Gamepad button to act as gaze click")]
public OVRInput.Button joyPadClickButton = OVRInput.Button.One;
[Tooltip("Keyboard button to act as gaze click")]
public KeyCode gazeClickKey = KeyCode.Space;
[Header("Physics")]
[Tooltip("Perform an sphere cast to determine correct depth for gaze pointer")]
public bool performSphereCastForGazepointer;
[Header("Gamepad Stick Scroll")]
[Tooltip("Enable scrolling with the right stick on a gamepad")]
public bool useRightStickScroll = true;
[Tooltip("Deadzone for right stick to prevent accidental scrolling")]
public float rightStickDeadZone = 0.15f;
[Header("Touchpad Swipe Scroll")]
[Tooltip("Enable scrolling by swiping the touchpad")]
public bool useSwipeScroll = true;
[Tooltip("Minimum trackpad movement in pixels to start swiping")]
public float swipeDragThreshold = 2;
[Tooltip("Distance scrolled when swipe scroll occurs")]
public float swipeDragScale = 1f;
[Tooltip("Invert X axis on touchpad")]
public bool InvertSwipeXAxis = false;
// The raycaster that gets to do pointer interaction (e.g. with a mouse), gaze interaction always works
[NonSerialized]
public OVRRaycaster activeGraphicRaycaster;
[Header("Dragging")]
[Tooltip("Minimum pointer movement in degrees to start dragging")]
public float angleDragThreshold = 1;
[SerializeField]
private float m_SpherecastRadius = 1.0f;
// The following region contains code exactly the same as the implementation
// of StandaloneInputModule. It is copied here rather than inheriting from StandaloneInputModule
// because most of StandaloneInputModule is private so it isn't possible to easily derive from.
// Future changes from Unity to StandaloneInputModule will make it possible for this class to
// derive from StandaloneInputModule instead of PointerInput module.
//
// The following functions are not present in the following region since they have modified
// versions in the next region:
// Process
// ProcessMouseEvent
// UseMouse
#region StandaloneInputModule code
private float m_NextAction;
private Vector2 m_LastMousePosition;
private Vector2 m_MousePosition;
protected OVRInputModule()
{
}
#if UNITY_EDITOR
protected override void Reset()
{
allowActivationOnMobileDevice = true;
}
#endif
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.",
false)]
public enum InputMode
{
Mouse,
Buttons
}
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.",
false)]
public InputMode inputMode
{
get { return InputMode.Mouse; }
}
[Header("Standalone Input Module")]
[SerializeField]
private string m_HorizontalAxis = "Horizontal";
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
[SerializeField]
private string m_VerticalAxis = "Vertical";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_SubmitButton = "Submit";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_CancelButton = "Cancel";
[SerializeField]
private float m_InputActionsPerSecond = 10;
[SerializeField]
private bool m_AllowActivationOnMobileDevice;
public bool allowActivationOnMobileDevice
{
get { return m_AllowActivationOnMobileDevice; }
set { m_AllowActivationOnMobileDevice = value; }
}
public float inputActionsPerSecond
{
get { return m_InputActionsPerSecond; }
set { m_InputActionsPerSecond = value; }
}
/// <summary>
/// Name of the horizontal axis for movement (if axis events are used).
/// </summary>
public string horizontalAxis
{
get { return m_HorizontalAxis; }
set { m_HorizontalAxis = value; }
}
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
public string verticalAxis
{
get { return m_VerticalAxis; }
set { m_VerticalAxis = value; }
}
public string submitButton
{
get { return m_SubmitButton; }
set { m_SubmitButton = value; }
}
public string cancelButton
{
get { return m_CancelButton; }
set { m_CancelButton = value; }
}
public override void UpdateModule()
{
#if ENABLE_LEGACY_INPUT_MANAGER
m_LastMousePosition = m_MousePosition;
m_MousePosition = Input.mousePosition;
#endif
}
public override bool IsModuleSupported()
{
// Check for mouse presence instead of whether touch is supported,
// as you can connect mouse to a tablet and in that case we'd want
// to use StandaloneInputModule for non-touch input events.
return m_AllowActivationOnMobileDevice || Input.mousePresent;
}
public override bool ShouldActivateModule()
{
if (!base.ShouldActivateModule())
return false;
#if ENABLE_LEGACY_INPUT_MANAGER
var shouldActivate = Input.GetButtonDown(m_SubmitButton);
shouldActivate |= Input.GetButtonDown(m_CancelButton);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_HorizontalAxis), 0.0f);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_VerticalAxis), 0.0f);
shouldActivate |= (m_MousePosition - m_LastMousePosition).sqrMagnitude > 0.0f;
shouldActivate |= Input.GetMouseButtonDown(0);
return shouldActivate;
#else
return false;
#endif
}
public override void ActivateModule()
{
base.ActivateModule();
#if ENABLE_LEGACY_INPUT_MANAGER
m_MousePosition = Input.mousePosition;
m_LastMousePosition = Input.mousePosition;
#endif
var toSelect = eventSystem.currentSelectedGameObject;
if (toSelect == null)
toSelect = eventSystem.firstSelectedGameObject;
eventSystem.SetSelectedGameObject(toSelect, GetBaseEventData());
}
public override void DeactivateModule()
{
base.DeactivateModule();
ClearSelection();
}
/// <summary>
/// Process submit keys.
/// </summary>
private bool SendSubmitEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetButtonDown(m_SubmitButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.submitHandler);
if (Input.GetButtonDown(m_CancelButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.cancelHandler);
#endif
return data.used;
}
private bool AllowMoveEventProcessing(float time)
{
#if ENABLE_LEGACY_INPUT_MANAGER
bool allow = Input.GetButtonDown(m_HorizontalAxis);
allow |= Input.GetButtonDown(m_VerticalAxis);
allow |= (time > m_NextAction);
return allow;
#else
return false;
#endif
}
private Vector2 GetRawMoveVector()
{
Vector2 move = Vector2.zero;
move.x = Input.GetAxisRaw(m_HorizontalAxis);
move.y = Input.GetAxisRaw(m_VerticalAxis);
if (Input.GetButtonDown(m_HorizontalAxis))
{
if (move.x < 0)
move.x = -1f;
if (move.x > 0)
move.x = 1f;
}
if (Input.GetButtonDown(m_VerticalAxis))
{
if (move.y < 0)
move.y = -1f;
if (move.y > 0)
move.y = 1f;
}
return move;
}
/// <summary>
/// Process keyboard events.
/// </summary>
private bool SendMoveEventToSelectedObject()
{
float time = Time.unscaledTime;
if (!AllowMoveEventProcessing(time))
return false;
Vector2 movement = GetRawMoveVector();
// Debug.Log(m_ProcessingEvent.rawType + " axis:" + m_AllowAxisEvents + " value:" + "(" + x + "," + y + ")");
var axisEventData = GetAxisEventData(movement.x, movement.y, 0.6f);
if (!Mathf.Approximately(axisEventData.moveVector.x, 0f)
|| !Mathf.Approximately(axisEventData.moveVector.y, 0f))
{
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, axisEventData, ExecuteEvents.moveHandler);
}
m_NextAction = time + 1f / m_InputActionsPerSecond;
return axisEventData.used;
}
private bool SendUpdateEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.updateSelectedHandler);
return data.used;
}
/// <summary>
/// Process the current mouse press.
/// </summary>
private void ProcessMousePress(MouseButtonEventData data)
{
var pointerEvent = data.buttonData;
var currentOverGo = pointerEvent.pointerCurrentRaycast.gameObject;
// PointerDown notification
if (data.PressedThisFrame())
{
pointerEvent.eligibleForClick = true;
pointerEvent.delta = Vector2.zero;
pointerEvent.dragging = false;
pointerEvent.useDragThreshold = true;
pointerEvent.pressPosition = pointerEvent.position;
if (pointerEvent.IsVRPointer())
{
#if ENABLE_LEGACY_INPUT_MANAGER
pointerEvent.SetSwipeStart(Input.mousePosition);
#endif
}
pointerEvent.pointerPressRaycast = pointerEvent.pointerCurrentRaycast;
DeselectIfSelectionChanged(currentOverGo, pointerEvent);
// search for the control that will receive the press
// if we can't find a press handler set the press
// handler to be what would receive a click.
var newPressed =
ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.pointerDownHandler);
// didnt find a press handler... search for a click handler
if (newPressed == null)
newPressed = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// Debug.Log("Pressed: " + newPressed);
float time = Time.unscaledTime;
if (newPressed == pointerEvent.lastPress)
{
var diffTime = time - pointerEvent.clickTime;
if (diffTime < 0.3f)
++pointerEvent.clickCount;
else
pointerEvent.clickCount = 1;
pointerEvent.clickTime = time;
}
else
{
pointerEvent.clickCount = 1;
}
pointerEvent.pointerPress = newPressed;
pointerEvent.rawPointerPress = currentOverGo;
pointerEvent.clickTime = time;
// Save the drag handler as well
pointerEvent.pointerDrag = ExecuteEvents.GetEventHandler<IDragHandler>(currentOverGo);
if (pointerEvent.pointerDrag != null)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent,
ExecuteEvents.initializePotentialDrag);
}
// PointerUp notification
if (data.ReleasedThisFrame())
{
// Debug.Log("Executing pressup on: " + pointer.pointerPress);
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
// Debug.Log("KeyCode: " + pointer.eventData.keyCode);
// see if we mouse up on the same element that we clicked on...
var pointerUpHandler = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// PointerClick and Drop events
if (pointerEvent.pointerPress == pointerUpHandler && pointerEvent.eligibleForClick)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerClickHandler);
}
else if (pointerEvent.pointerDrag != null)
{
ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.dropHandler);
}
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
if (pointerEvent.pointerDrag != null && pointerEvent.dragging)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.endDragHandler);
pointerEvent.dragging = false;
pointerEvent.pointerDrag = null;
// redo pointer enter / exit to refresh state
// so that if we moused over somethign that ignored it before
// due to having pressed on something else
// it now gets it.
if (currentOverGo != pointerEvent.pointerEnter)
{
HandlePointerExitAndEnter(pointerEvent, null);
HandlePointerExitAndEnter(pointerEvent, currentOverGo);
}
}
}
#endregion
#region Modified StandaloneInputModule methods
/// <summary>
/// Process all mouse events. This is the same as the StandaloneInputModule version except that
/// it takes MouseState as a parameter, allowing it to be used for both Gaze and Mouse
/// pointerss.
/// </summary>
private void ProcessMouseEvent(MouseState mouseData)
{
var pressed = mouseData.AnyPressesThisFrame();
var released = mouseData.AnyReleasesThisFrame();
var leftButtonData = mouseData.GetButtonState(PointerEventData.InputButton.Left).eventData;
if (!UseMouse(pressed, released, leftButtonData.buttonData))
return;
// Process the first mouse button fully
ProcessMousePress(leftButtonData);
ProcessMove(leftButtonData.buttonData);
ProcessDrag(leftButtonData.buttonData);
// Now process right / middle clicks
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData.buttonData);
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData.buttonData);
if (!Mathf.Approximately(leftButtonData.buttonData.scrollDelta.sqrMagnitude, 0.0f))
{
var scrollHandler = ExecuteEvents.GetEventHandler<IScrollHandler>(
leftButtonData.buttonData.pointerCurrentRaycast.gameObject);
ExecuteEvents.ExecuteHierarchy(scrollHandler, leftButtonData.buttonData, ExecuteEvents.scrollHandler);
}
}
/// <summary>
/// Process this InputModule. Same as the StandaloneInputModule version, except that it calls
/// ProcessMouseEvent twice, once for gaze pointers, and once for mouse pointers.
/// </summary>
public override void Process()
{
bool usedEvent = SendUpdateEventToSelectedObject();
if (eventSystem.sendNavigationEvents)
{
if (!usedEvent)
usedEvent |= SendMoveEventToSelectedObject();
if (!usedEvent)
SendSubmitEventToSelectedObject();
}
ProcessMouseEvent(GetGazePointerData());
#if !UNITY_ANDROID
ProcessMouseEvent(GetCanvasPointerData());
#endif
}
/// <summary>
/// Decide if mouse events need to be processed this frame. Same as StandloneInputModule except
/// that the IsPointerMoving method from this class is used, instead of the method on PointerEventData
/// </summary>
private static bool UseMouse(bool pressed, bool released, PointerEventData pointerData)
{
if (pressed || released || IsPointerMoving(pointerData) || pointerData.IsScrolling())
return true;
return false;
}
#endregion
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected void CopyFromTo(OVRPointerEventData @from, OVRPointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
@to.worldSpaceRay = @from.worldSpaceRay;
}
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected new void CopyFromTo(PointerEventData @from, PointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
}
// In the following region we extend the PointerEventData system implemented in PointerInputModule
// We define an additional dictionary for ray(e.g. gaze) based pointers. Mouse pointers still use the dictionary
// in PointerInputModule
#region PointerEventData pool
// Pool for OVRRayPointerEventData for ray based pointers
protected Dictionary<int, OVRPointerEventData> m_VRRayPointerData = new Dictionary<int, OVRPointerEventData>();
protected bool GetPointerData(int id, out OVRPointerEventData data, bool create)
{
if (!m_VRRayPointerData.TryGetValue(id, out data) && create)
{
data = new OVRPointerEventData(eventSystem)
{
pointerId = id,
};
m_VRRayPointerData.Add(id, data);
return true;
}
return false;
}
/// <summary>
/// Clear pointer state for both types of pointer
/// </summary>
protected new void ClearSelection()
{
var baseEventData = GetBaseEventData();
foreach (var pointer in m_PointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
foreach (var pointer in m_VRRayPointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
m_PointerData.Clear();
eventSystem.SetSelectedGameObject(null, baseEventData);
}
#endregion
/// <summary>
/// For RectTransform, calculate it's normal in world space
/// </summary>
static Vector3 GetRectTransformNormal(RectTransform rectTransform)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
rectTransform.GetWorldCorners(corners);
return Vector3.Cross(BottomEdge, LeftEdge).normalized;
}
private readonly MouseState m_MouseState = new MouseState();
// The following 2 functions are equivalent to PointerInputModule.GetMousePointerEventData but are customized to
// get data for ray pointers and canvas mouse pointers.
/// <summary>
/// State for a pointer controlled by a world space ray. E.g. gaze pointer
/// </summary>
/// <returns></returns>
virtual protected MouseState GetGazePointerData()
{
// Get the OVRRayPointerEventData reference
OVRPointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true);
leftData.Reset();
//Now set the world space ray. This ray is what the user uses to point at UI elements
leftData.worldSpaceRay = new Ray(rayTransform.position, rayTransform.forward);
leftData.scrollDelta = GetExtraScrollDelta();
//Populate some default values
leftData.button = PointerEventData.InputButton.Left;
leftData.useDragThreshold = true;
// Perform raycast to find intersections with world
eventSystem.RaycastAll(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
m_Cursor.SetCursorRay(rayTransform);
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
// We're only interested in intersections from OVRRaycasters
if (ovrRaycaster)
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
leftData.position = ovrRaycaster.GetScreenPosition(raycast);
// Find the world position and normal the Graphic the ray intersected
RectTransform graphicRect = raycast.gameObject.GetComponent<RectTransform>();
if (graphicRect != null)
{
// Set are gaze indicator with this world position and normal
Vector3 worldPos = raycast.worldPosition;
Vector3 normal = GetRectTransformNormal(graphicRect);
m_Cursor.SetCursorStartDest(rayTransform.position, worldPos, normal);
}
}
// Now process physical raycast intersections
OVRPhysicsRaycaster physicsRaycaster = raycast.module as OVRPhysicsRaycaster;
if (physicsRaycaster)
{
Vector3 position = raycast.worldPosition;
if (performSphereCastForGazepointer)
{
// Here we cast a sphere into the scene rather than a ray. This gives a more accurate depth
// for positioning a circular gaze pointer
List<RaycastResult> results = new List<RaycastResult>();
physicsRaycaster.Spherecast(leftData, results, m_SpherecastRadius);
if (results.Count > 0 && results[0].distance < raycast.distance)
{
position = results[0].worldPosition;
}
}
leftData.position = physicsRaycaster.GetScreenPos(raycast.worldPosition);
m_Cursor.SetCursorStartDest(rayTransform.position, position, raycast.worldNormal);
}
// Stick default data values in right and middle slots for compatability
// copy the apropriate data into right and middle slots
OVRPointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true);
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
OVRPointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true);
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left,
GetGazeButtonState(), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right,
PointerEventData.FramePressState.NotChanged, rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle,
PointerEventData.FramePressState.NotChanged, middleData);
return m_MouseState;
}
/// <summary>
/// Get state for pointer which is a pointer moving in world space across the surface of a world space canvas.
/// </summary>
/// <returns></returns>
protected MouseState GetCanvasPointerData()
{
// Get the OVRRayPointerEventData reference
PointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true);
leftData.Reset();
// Setup default values here. Set position to zero because we don't actually know the pointer
// positions. Each canvas knows the position of its canvas pointer.
leftData.position = Vector2.zero;
#if ENABLE_LEGACY_INPUT_MANAGER
leftData.scrollDelta = Input.mouseScrollDelta;
#endif
leftData.button = PointerEventData.InputButton.Left;
if (activeGraphicRaycaster)
{
// Let the active raycaster find intersections on its canvas
activeGraphicRaycaster.RaycastPointer(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
if (ovrRaycaster) // raycast may not actually contain a result
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
Vector2 position = ovrRaycaster.GetScreenPosition(raycast);
leftData.delta = position - leftData.position;
leftData.position = position;
}
}
// copy the apropriate data into right and middle slots
PointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true);
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
PointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true);
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
#if ENABLE_LEGACY_INPUT_MANAGER
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, StateForMouseButton(0), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, StateForMouseButton(1), rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, StateForMouseButton(2), middleData);
#endif
return m_MouseState;
}
/// <summary>
/// New version of ShouldStartDrag implemented first in PointerInputModule. This version differs in that
/// for ray based pointers it makes a decision about whether a drag should start based on the angular change
/// the pointer has made so far, as seen from the camera. This also works when the world space ray is
/// translated rather than rotated, since the beginning and end of the movement are considered as angle from
/// the same point.
/// </summary>
private bool ShouldStartDrag(PointerEventData pointerEvent)
{
if (!pointerEvent.useDragThreshold)
return true;
if (!pointerEvent.IsVRPointer())
{
// Same as original behaviour for canvas based pointers
return (pointerEvent.pressPosition - pointerEvent.position).sqrMagnitude >=
eventSystem.pixelDragThreshold * eventSystem.pixelDragThreshold;
}
else
{
#if UNITY_ANDROID && !UNITY_EDITOR // On android allow swiping to start drag
if (useSwipeScroll && ((Vector3)pointerEvent.GetSwipeStart() - Input.mousePosition).magnitude > swipeDragThreshold)
{
return true;
}
#endif
// When it's not a screen space pointer we have to look at the angle it moved rather than the pixels distance
// For gaze based pointing screen-space distance moved will always be near 0
Vector3 cameraPos = pointerEvent.pressEventCamera.transform.position;
Vector3 pressDir = (pointerEvent.pointerPressRaycast.worldPosition - cameraPos).normalized;
Vector3 currentDir = (pointerEvent.pointerCurrentRaycast.worldPosition - cameraPos).normalized;
return Vector3.Dot(pressDir, currentDir) < Mathf.Cos(Mathf.Deg2Rad * (angleDragThreshold));
}
}
/// <summary>
/// The purpose of this function is to allow us to switch between using the standard IsPointerMoving
/// method for mouse driven pointers, but to always return true when it's a ray based pointer.
/// All real-world ray-based input devices are always moving so for simplicity we just return true
/// for them.
///
/// If PointerEventData.IsPointerMoving was virtual we could just override that in
/// OVRRayPointerEventData.
/// </summary>
/// <param name="pointerEvent"></param>
/// <returns></returns>
static bool IsPointerMoving(PointerEventData pointerEvent)
{
if (pointerEvent.IsVRPointer())
return true;
else
return pointerEvent.IsPointerMoving();
}
protected Vector2 SwipeAdjustedPosition(Vector2 originalPosition, PointerEventData pointerEvent)
{
#if UNITY_ANDROID && !UNITY_EDITOR
// On android we use the touchpad position (accessed through Input.mousePosition) to modify
// the effective cursor position for events related to dragging. This allows the user to
// use the touchpad to drag draggable UI elements
if (useSwipeScroll)
{
Vector2 delta = (Vector2)Input.mousePosition - pointerEvent.GetSwipeStart();
if (InvertSwipeXAxis)
delta.x *= -1;
return originalPosition + delta * swipeDragScale;
}
#endif
return originalPosition;
}
/// <summary>
/// Exactly the same as the code from PointerInputModule, except that we call our own
/// IsPointerMoving.
///
/// This would also not be necessary if PointerEventData.IsPointerMoving was virtual
/// </summary>
/// <param name="pointerEvent"></param>
protected override void ProcessDrag(PointerEventData pointerEvent)
{
Vector2 originalPosition = pointerEvent.position;
bool moving = IsPointerMoving(pointerEvent);
if (moving && pointerEvent.pointerDrag != null
&& !pointerEvent.dragging
&& ShouldStartDrag(pointerEvent))
{
if (pointerEvent.IsVRPointer())
{
//adjust the position used based on swiping action. Allowing the user to
//drag items by swiping on the touchpad
pointerEvent.position = SwipeAdjustedPosition(originalPosition, pointerEvent);
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.beginDragHandler);
pointerEvent.dragging = true;
}
// Drag notification
if (pointerEvent.dragging && moving && pointerEvent.pointerDrag != null)
{
if (pointerEvent.IsVRPointer())
{
pointerEvent.position = SwipeAdjustedPosition(originalPosition, pointerEvent);
}
// Before doing drag we should cancel any pointer down state
// And clear selection!
if (pointerEvent.pointerPress != pointerEvent.pointerDrag)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.dragHandler);
}
}
/// <summary>
/// Get state of button corresponding to gaze pointer
/// </summary>
/// <returns></returns>
virtual protected PointerEventData.FramePressState GetGazeButtonState()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
var pressed = Input.GetKeyDown(gazeClickKey) || OVRInput.GetDown(joyPadClickButton);
var released = Input.GetKeyUp(gazeClickKey) || OVRInput.GetUp(joyPadClickButton);
#if UNITY_ANDROID && !UNITY_EDITOR
pressed |= Input.GetMouseButtonDown(0);
released |= Input.GetMouseButtonUp(0);
#endif
#else
var pressed = OVRInput.GetDown(joyPadClickButton);
var released = OVRInput.GetUp(joyPadClickButton);
#endif
if (pressed && released)
return PointerEventData.FramePressState.PressedAndReleased;
if (pressed)
return PointerEventData.FramePressState.Pressed;
if (released)
return PointerEventData.FramePressState.Released;
return PointerEventData.FramePressState.NotChanged;
}
/// <summary>
/// Get extra scroll delta from gamepad
/// </summary>
protected Vector2 GetExtraScrollDelta()
{
Vector2 scrollDelta = new Vector2();
if (useRightStickScroll)
{
Vector2 s = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (Mathf.Abs(s.x) < rightStickDeadZone) s.x = 0;
if (Mathf.Abs(s.y) < rightStickDeadZone) s.y = 0;
scrollDelta = s;
}
return scrollDelta;
}
};
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8f1a9a1d119a5944aacfb87d1ec283a2
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,179 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using Unity.Collections;
using Unity.Jobs;
using UnityEngine;
using UnityEngine.Rendering;
public class OVRMesh : MonoBehaviour
{
public interface IOVRMeshDataProvider
{
MeshType GetMeshType();
}
public enum MeshType
{
None = OVRPlugin.MeshType.None,
HandLeft = OVRPlugin.MeshType.HandLeft,
HandRight = OVRPlugin.MeshType.HandRight,
}
[SerializeField]
private IOVRMeshDataProvider _dataProvider;
[SerializeField]
private MeshType _meshType = MeshType.None;
private Mesh _mesh;
public bool IsInitialized { get; private set; }
public Mesh Mesh
{
get => _mesh;
}
internal void SetMeshType(MeshType type)
{
_meshType = type;
}
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRMeshDataProvider>();
}
if (_dataProvider != null)
{
_meshType = _dataProvider.GetMeshType();
}
if (ShouldInitialize())
{
Initialize(_meshType);
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
if (_meshType == MeshType.None)
{
return false;
}
else if (_meshType == MeshType.HandLeft || _meshType == MeshType.HandRight)
{
#if UNITY_EDITOR
return OVRInput.IsControllerConnected(OVRInput.Controller.Hands);
#else
return true;
#endif
}
else
{
return true;
}
}
private void Initialize(MeshType meshType)
{
_mesh = new Mesh();
if (OVRPlugin.GetMesh((OVRPlugin.MeshType)_meshType, out var ovrpMesh))
{
TransformOvrpMesh(ovrpMesh, _mesh);
IsInitialized = true;
}
}
private void TransformOvrpMesh(OVRPlugin.Mesh ovrpMesh, Mesh mesh)
{
int numVertices = (int)ovrpMesh.NumVertices;
int numIndices = (int)ovrpMesh.NumIndices;
using (var verticesNativeArray =
new OVRMeshJobs.NativeArrayHelper<OVRPlugin.Vector3f>(ovrpMesh.VertexPositions, numVertices))
using (var normalsNativeArray =
new OVRMeshJobs.NativeArrayHelper<OVRPlugin.Vector3f>(ovrpMesh.VertexNormals, numVertices))
using (var uvNativeArray =
new OVRMeshJobs.NativeArrayHelper<OVRPlugin.Vector2f>(ovrpMesh.VertexUV0, numVertices))
using (var weightsNativeArray =
new OVRMeshJobs.NativeArrayHelper<OVRPlugin.Vector4f>(ovrpMesh.BlendWeights, numVertices))
using (var indicesNativeArray =
new OVRMeshJobs.NativeArrayHelper<OVRPlugin.Vector4s>(ovrpMesh.BlendIndices, numVertices))
using (var trianglesNativeArray = new OVRMeshJobs.NativeArrayHelper<short>(ovrpMesh.Indices, numIndices))
using (var vertices = new NativeArray<Vector3>(numVertices, Allocator.TempJob))
using (var normals = new NativeArray<Vector3>(numVertices, Allocator.TempJob))
using (var uv = new NativeArray<Vector2>(numVertices, Allocator.TempJob))
using (var boneWeights = new NativeArray<BoneWeight>(numVertices, Allocator.TempJob))
using (var triangles = new NativeArray<uint>(numIndices, Allocator.TempJob))
{
var job = new OVRMeshJobs.TransformToUnitySpaceJob
{
Vertices = vertices,
Normals = normals,
UV = uv,
BoneWeights = boneWeights,
MeshVerticesPosition = verticesNativeArray.UnityNativeArray,
MeshNormals = normalsNativeArray.UnityNativeArray,
MeshUV = uvNativeArray.UnityNativeArray,
MeshBoneWeights = weightsNativeArray.UnityNativeArray,
MeshBoneIndices = indicesNativeArray.UnityNativeArray
};
var jobTransformTriangle = new OVRMeshJobs.TransformTrianglesJob
{
Triangles = triangles,
MeshIndices = trianglesNativeArray.UnityNativeArray,
NumIndices = numIndices
};
var handle = job.Schedule(numVertices, 20);
var handleTriangleJob = jobTransformTriangle.Schedule(numIndices, 60);
JobHandle.CombineDependencies(handle, handleTriangleJob).Complete();
mesh.SetVertices(job.Vertices);
mesh.SetNormals(job.Normals);
mesh.SetUVs(0, job.UV);
mesh.boneWeights = job.BoneWeights.ToArray();
mesh.SetIndexBufferParams(numIndices, IndexFormat.UInt32);
mesh.SetIndexBufferData(jobTransformTriangle.Triangles, 0, 0, numIndices);
mesh.SetSubMesh(0, new SubMeshDescriptor(0, numIndices));
}
}
#if UNITY_EDITOR
private void Update()
{
if (ShouldInitialize())
{
Initialize(_meshType);
}
}
#endif
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5cc5c234723e3a54d8dd09b131117743
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: -80
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,120 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Runtime.InteropServices;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Jobs;
using UnityEngine;
public class OVRMeshJobs
{
public struct TransformToUnitySpaceJob : IJobParallelFor
{
public NativeArray<Vector3> Vertices;
public NativeArray<Vector3> Normals;
public NativeArray<Vector2> UV;
public NativeArray<BoneWeight> BoneWeights;
public NativeArray<OVRPlugin.Vector3f> MeshVerticesPosition;
public NativeArray<OVRPlugin.Vector3f> MeshNormals;
public NativeArray<OVRPlugin.Vector2f> MeshUV;
public NativeArray<OVRPlugin.Vector4f> MeshBoneWeights;
public NativeArray<OVRPlugin.Vector4s> MeshBoneIndices;
public void Execute(int index)
{
Vertices[index] = MeshVerticesPosition[index].FromFlippedXVector3f();
Normals[index] = MeshNormals[index].FromFlippedXVector3f();
UV[index] = new Vector2
{
x = MeshUV[index].x,
y = -MeshUV[index].y
};
var currentBlendWeight = MeshBoneWeights[index];
var currentBlendIndices = MeshBoneIndices[index];
BoneWeights[index] = new BoneWeight
{
boneIndex0 = currentBlendIndices.x,
weight0 = currentBlendWeight.x,
boneIndex1 = currentBlendIndices.y,
weight1 = currentBlendWeight.y,
boneIndex2 = currentBlendIndices.z,
weight2 = currentBlendWeight.z,
boneIndex3 = currentBlendIndices.w,
weight3 = currentBlendWeight.w,
};
}
}
public struct TransformTrianglesJob : IJobParallelFor
{
public NativeArray<uint> Triangles;
[ReadOnly]
public NativeArray<short> MeshIndices;
public int NumIndices;
public void Execute(int index)
{
Triangles[index] = (uint)MeshIndices[NumIndices - index - 1];
}
}
public unsafe struct NativeArrayHelper<T> : IDisposable where T : struct
{
public NativeArray<T> UnityNativeArray;
private GCHandle _handle;
#if ENABLE_UNITY_COLLECTIONS_CHECKS
private readonly AtomicSafetyHandle _atomicSafetyHandle;
#endif
public NativeArrayHelper(T[] ovrArray, int length)
{
_handle = GCHandle.Alloc(ovrArray, GCHandleType.Pinned);
var ptr = _handle.AddrOfPinnedObject();
UnityNativeArray = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray<T>(
(void*)ptr, length, Allocator.None);
#if ENABLE_UNITY_COLLECTIONS_CHECKS
_atomicSafetyHandle = AtomicSafetyHandle.Create();
NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref UnityNativeArray, _atomicSafetyHandle);
#endif
}
public void Dispose()
{
#if ENABLE_UNITY_COLLECTIONS_CHECKS
AtomicSafetyHandle.Release(_atomicSafetyHandle);
#endif
_handle.Free();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b3f4a655128585a47aba854348fd95a0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,213 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRMeshRenderer : MonoBehaviour
{
public interface IOVRMeshRendererDataProvider
{
MeshRendererData GetMeshRendererData();
}
public struct MeshRendererData
{
public bool IsDataValid { get; set; }
public bool IsDataHighConfidence { get; set; }
public bool ShouldUseSystemGestureMaterial { get; set; }
}
public enum ConfidenceBehavior
{
None,
ToggleRenderer,
}
public enum SystemGestureBehavior
{
None,
SwapMaterial,
}
[SerializeField]
private IOVRMeshRendererDataProvider _dataProvider;
[SerializeField]
private OVRMesh _ovrMesh;
[SerializeField]
private OVRSkeleton _ovrSkeleton;
[SerializeField]
private ConfidenceBehavior _confidenceBehavior = ConfidenceBehavior.ToggleRenderer;
[SerializeField]
private SystemGestureBehavior _systemGestureBehavior = SystemGestureBehavior.SwapMaterial;
[SerializeField]
private Material _systemGestureMaterial = null;
private Material _originalMaterial = null;
private SkinnedMeshRenderer _skinnedMeshRenderer;
public bool IsInitialized { get; private set; }
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public bool ShouldUseSystemGestureMaterial { get; private set; }
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRMeshRendererDataProvider>();
}
if (_ovrMesh == null)
{
_ovrMesh = GetComponent<OVRMesh>();
}
if (_ovrSkeleton == null)
{
_ovrSkeleton = GetComponent<OVRSkeleton>();
}
}
private void Start()
{
if (_ovrMesh == null)
{
// disable if no mesh configured
this.enabled = false;
return;
}
if (ShouldInitialize())
{
Initialize();
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
if ((_ovrMesh == null) || ((_ovrMesh != null) && !_ovrMesh.IsInitialized) ||
((_ovrSkeleton != null) && !_ovrSkeleton.IsInitialized))
{
// do not initialize if mesh or optional skeleton are not initialized
return false;
}
return true;
}
private void Initialize()
{
_skinnedMeshRenderer = GetComponent<SkinnedMeshRenderer>();
if (!_skinnedMeshRenderer)
{
_skinnedMeshRenderer = gameObject.AddComponent<SkinnedMeshRenderer>();
}
_skinnedMeshRenderer.sharedMesh = _ovrMesh.Mesh;
_originalMaterial = _skinnedMeshRenderer.sharedMaterial;
if ((_ovrSkeleton != null))
{
int numSkinnableBones = _ovrSkeleton.GetCurrentNumSkinnableBones();
var bindPoses = new Matrix4x4[numSkinnableBones];
var bones = new Transform[numSkinnableBones];
var localToWorldMatrix = transform.localToWorldMatrix;
for (int i = 0; i < numSkinnableBones && i < _ovrSkeleton.Bones.Count; ++i)
{
bones[i] = _ovrSkeleton.Bones[i].Transform;
bindPoses[i] = _ovrSkeleton.BindPoses[i].Transform.worldToLocalMatrix * localToWorldMatrix;
}
_ovrMesh.Mesh.bindposes = bindPoses;
_skinnedMeshRenderer.bones = bones;
_skinnedMeshRenderer.updateWhenOffscreen = true;
}
IsInitialized = true;
}
private void Update()
{
#if UNITY_EDITOR
if (ShouldInitialize())
{
Initialize();
}
#endif
IsDataValid = false;
IsDataHighConfidence = false;
ShouldUseSystemGestureMaterial = false;
if (IsInitialized)
{
bool shouldRender = false;
if (_dataProvider != null)
{
var data = _dataProvider.GetMeshRendererData();
IsDataValid = data.IsDataValid;
IsDataHighConfidence = data.IsDataHighConfidence;
ShouldUseSystemGestureMaterial = data.ShouldUseSystemGestureMaterial;
shouldRender = data.IsDataValid && data.IsDataHighConfidence;
}
if (_confidenceBehavior == ConfidenceBehavior.ToggleRenderer)
{
if (_skinnedMeshRenderer != null && _skinnedMeshRenderer.enabled != shouldRender)
{
_skinnedMeshRenderer.enabled = shouldRender;
}
}
if (_systemGestureBehavior == SystemGestureBehavior.SwapMaterial)
{
if (_skinnedMeshRenderer != null)
{
if (ShouldUseSystemGestureMaterial && _systemGestureMaterial != null &&
_skinnedMeshRenderer.sharedMaterial != _systemGestureMaterial)
{
_skinnedMeshRenderer.sharedMaterial = _systemGestureMaterial;
}
else if (!ShouldUseSystemGestureMaterial && _originalMaterial != null &&
_skinnedMeshRenderer.sharedMaterial != _originalMaterial)
{
_skinnedMeshRenderer.sharedMaterial = _originalMaterial;
}
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 73eac66b128fc8749a21623225c34541
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,114 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System;
using UnityEngine;
public interface OVRMixedRealityCaptureConfiguration
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
bool enableMixedReality { get; set; }
LayerMask extraHiddenLayers { get; set; }
LayerMask extraVisibleLayers { get; set; }
bool dynamicCullingMask { get; set; }
OVRManager.CompositionMethod compositionMethod { get; set; }
Color externalCompositionBackdropColorRift { get; set; }
Color externalCompositionBackdropColorQuest { get; set; }
[Obsolete("Deprecated", false)]
OVRManager.CameraDevice capturingCameraDevice { get; set; }
bool flipCameraFrameHorizontally { get; set; }
bool flipCameraFrameVertically { get; set; }
float handPoseStateLatency { get; set; }
float sandwichCompositionRenderLatency { get; set; }
int sandwichCompositionBufferedFrames { get; set; }
Color chromaKeyColor { get; set; }
float chromaKeySimilarity { get; set; }
float chromaKeySmoothRange { get; set; }
float chromaKeySpillRange { get; set; }
bool useDynamicLighting { get; set; }
[Obsolete("Deprecated", false)]
OVRManager.DepthQuality depthQuality { get; set; }
float dynamicLightingSmoothFactor { get; set; }
float dynamicLightingDepthVariationClampingValue { get; set; }
[Obsolete("Deprecated", false)]
OVRManager.VirtualGreenScreenType virtualGreenScreenType { get; set; }
float virtualGreenScreenTopY { get; set; }
float virtualGreenScreenBottomY { get; set; }
bool virtualGreenScreenApplyDepthCulling { get; set; }
float virtualGreenScreenDepthTolerance { get; set; }
OVRManager.MrcActivationMode mrcActivationMode { get; set; }
OVRManager.InstantiateMrcCameraDelegate instantiateMixedRealityCameraGameObject { get; set; }
#endif
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public static class OVRMixedRealityCaptureConfigurationExtensions
{
public static void ApplyTo(this OVRMixedRealityCaptureConfiguration dest,
OVRMixedRealityCaptureConfiguration source)
{
dest.ReadFrom(source);
}
public static void ReadFrom(this OVRMixedRealityCaptureConfiguration dest,
OVRMixedRealityCaptureConfiguration source)
{
dest.enableMixedReality = source.enableMixedReality;
dest.compositionMethod = source.compositionMethod;
dest.extraHiddenLayers = source.extraHiddenLayers;
dest.externalCompositionBackdropColorRift = source.externalCompositionBackdropColorRift;
dest.externalCompositionBackdropColorQuest = source.externalCompositionBackdropColorQuest;
dest.flipCameraFrameHorizontally = source.flipCameraFrameHorizontally;
dest.flipCameraFrameVertically = source.flipCameraFrameVertically;
dest.handPoseStateLatency = source.handPoseStateLatency;
dest.sandwichCompositionRenderLatency = source.sandwichCompositionRenderLatency;
dest.sandwichCompositionBufferedFrames = source.sandwichCompositionBufferedFrames;
dest.chromaKeyColor = source.chromaKeyColor;
dest.chromaKeySimilarity = source.chromaKeySimilarity;
dest.chromaKeySmoothRange = source.chromaKeySmoothRange;
dest.chromaKeySpillRange = source.chromaKeySpillRange;
dest.useDynamicLighting = source.useDynamicLighting;
dest.dynamicLightingSmoothFactor = source.dynamicLightingSmoothFactor;
dest.dynamicLightingDepthVariationClampingValue = source.dynamicLightingDepthVariationClampingValue;
dest.virtualGreenScreenTopY = source.virtualGreenScreenTopY;
dest.virtualGreenScreenBottomY = source.virtualGreenScreenBottomY;
dest.virtualGreenScreenApplyDepthCulling = source.virtualGreenScreenApplyDepthCulling;
dest.virtualGreenScreenDepthTolerance = source.virtualGreenScreenDepthTolerance;
dest.mrcActivationMode = source.mrcActivationMode;
dest.instantiateMixedRealityCameraGameObject = source.instantiateMixedRealityCameraGameObject;
#pragma warning disable CS0618
dest.capturingCameraDevice = source.capturingCameraDevice;
dest.depthQuality = source.depthQuality;
dest.virtualGreenScreenType = source.virtualGreenScreenType;
#pragma warning restore CS0618
}
}
#endif

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 3daa35105ec74ccb94bc0756d34b2d55
timeCreated: 1605293235

View File

@ -0,0 +1,284 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using UnityEngine;
using System;
using System.IO;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public class OVRMixedRealityCaptureSettings : ScriptableObject, OVRMixedRealityCaptureConfiguration
{
public bool enableMixedReality = false;
public LayerMask extraHiddenLayers;
public LayerMask extraVisibleLayers;
public bool dynamicCullingMask = true;
public OVRManager.CompositionMethod compositionMethod = OVRManager.CompositionMethod.External;
public Color externalCompositionBackdropColorRift = Color.green;
public Color externalCompositionBackdropColorQuest = Color.clear;
[Obsolete("Deprecated", false)]
public OVRManager.CameraDevice capturingCameraDevice = OVRManager.CameraDevice.WebCamera0;
public bool flipCameraFrameHorizontally = false;
public bool flipCameraFrameVertically = false;
public float handPoseStateLatency = 0.0f;
public float sandwichCompositionRenderLatency = 0.0f;
public int sandwichCompositionBufferedFrames = 8;
public Color chromaKeyColor = Color.green;
public float chromaKeySimilarity = 0.6f;
public float chromaKeySmoothRange = 0.03f;
public float chromaKeySpillRange = 0.04f;
public bool useDynamicLighting = false;
[Obsolete("Deprecated", false)]
public OVRManager.DepthQuality depthQuality = OVRManager.DepthQuality.Medium;
public float dynamicLightingSmoothFactor = 8.0f;
public float dynamicLightingDepthVariationClampingValue = 0.001f;
[Obsolete("Deprecated", false)]
public OVRManager.VirtualGreenScreenType virtualGreenScreenType = OVRManager.VirtualGreenScreenType.Off;
public float virtualGreenScreenTopY;
public float virtualGreenScreenBottomY;
public bool virtualGreenScreenApplyDepthCulling = false;
public float virtualGreenScreenDepthTolerance = 0.2f;
public OVRManager.MrcActivationMode mrcActivationMode;
// OVRMixedRealityCaptureConfiguration Interface implementation
bool OVRMixedRealityCaptureConfiguration.enableMixedReality
{
get { return enableMixedReality; }
set { enableMixedReality = value; }
}
LayerMask OVRMixedRealityCaptureConfiguration.extraHiddenLayers
{
get { return extraHiddenLayers; }
set { extraHiddenLayers = value; }
}
LayerMask OVRMixedRealityCaptureConfiguration.extraVisibleLayers
{
get { return extraVisibleLayers; }
set { extraVisibleLayers = value; }
}
bool OVRMixedRealityCaptureConfiguration.dynamicCullingMask
{
get { return dynamicCullingMask; }
set { dynamicCullingMask = value; }
}
OVRManager.CompositionMethod OVRMixedRealityCaptureConfiguration.compositionMethod
{
get { return compositionMethod; }
set { compositionMethod = value; }
}
Color OVRMixedRealityCaptureConfiguration.externalCompositionBackdropColorRift
{
get { return externalCompositionBackdropColorRift; }
set { externalCompositionBackdropColorRift = value; }
}
Color OVRMixedRealityCaptureConfiguration.externalCompositionBackdropColorQuest
{
get { return externalCompositionBackdropColorQuest; }
set { externalCompositionBackdropColorQuest = value; }
}
[Obsolete("Deprecated", false)]
OVRManager.CameraDevice OVRMixedRealityCaptureConfiguration.capturingCameraDevice
{
get { return capturingCameraDevice; }
set { capturingCameraDevice = value; }
}
bool OVRMixedRealityCaptureConfiguration.flipCameraFrameHorizontally
{
get { return flipCameraFrameHorizontally; }
set { flipCameraFrameHorizontally = value; }
}
bool OVRMixedRealityCaptureConfiguration.flipCameraFrameVertically
{
get { return flipCameraFrameVertically; }
set { flipCameraFrameVertically = value; }
}
float OVRMixedRealityCaptureConfiguration.handPoseStateLatency
{
get { return handPoseStateLatency; }
set { handPoseStateLatency = value; }
}
float OVRMixedRealityCaptureConfiguration.sandwichCompositionRenderLatency
{
get { return sandwichCompositionRenderLatency; }
set { sandwichCompositionRenderLatency = value; }
}
int OVRMixedRealityCaptureConfiguration.sandwichCompositionBufferedFrames
{
get { return sandwichCompositionBufferedFrames; }
set { sandwichCompositionBufferedFrames = value; }
}
Color OVRMixedRealityCaptureConfiguration.chromaKeyColor
{
get { return chromaKeyColor; }
set { chromaKeyColor = value; }
}
float OVRMixedRealityCaptureConfiguration.chromaKeySimilarity
{
get { return chromaKeySimilarity; }
set { chromaKeySimilarity = value; }
}
float OVRMixedRealityCaptureConfiguration.chromaKeySmoothRange
{
get { return chromaKeySmoothRange; }
set { chromaKeySmoothRange = value; }
}
float OVRMixedRealityCaptureConfiguration.chromaKeySpillRange
{
get { return chromaKeySpillRange; }
set { chromaKeySpillRange = value; }
}
bool OVRMixedRealityCaptureConfiguration.useDynamicLighting
{
get { return useDynamicLighting; }
set { useDynamicLighting = value; }
}
[Obsolete("Deprecated", false)]
OVRManager.DepthQuality OVRMixedRealityCaptureConfiguration.depthQuality
{
get { return depthQuality; }
set { depthQuality = value; }
}
float OVRMixedRealityCaptureConfiguration.dynamicLightingSmoothFactor
{
get { return dynamicLightingSmoothFactor; }
set { dynamicLightingSmoothFactor = value; }
}
float OVRMixedRealityCaptureConfiguration.dynamicLightingDepthVariationClampingValue
{
get { return dynamicLightingDepthVariationClampingValue; }
set { dynamicLightingDepthVariationClampingValue = value; }
}
[Obsolete("Deprecated", false)]
OVRManager.VirtualGreenScreenType OVRMixedRealityCaptureConfiguration.virtualGreenScreenType
{
get { return virtualGreenScreenType; }
set { virtualGreenScreenType = value; }
}
float OVRMixedRealityCaptureConfiguration.virtualGreenScreenTopY
{
get { return virtualGreenScreenTopY; }
set { virtualGreenScreenTopY = value; }
}
float OVRMixedRealityCaptureConfiguration.virtualGreenScreenBottomY
{
get { return virtualGreenScreenBottomY; }
set { virtualGreenScreenBottomY = value; }
}
bool OVRMixedRealityCaptureConfiguration.virtualGreenScreenApplyDepthCulling
{
get { return virtualGreenScreenApplyDepthCulling; }
set { virtualGreenScreenApplyDepthCulling = value; }
}
float OVRMixedRealityCaptureConfiguration.virtualGreenScreenDepthTolerance
{
get { return virtualGreenScreenDepthTolerance; }
set { virtualGreenScreenDepthTolerance = value; }
}
OVRManager.MrcActivationMode OVRMixedRealityCaptureConfiguration.mrcActivationMode
{
get { return mrcActivationMode; }
set { mrcActivationMode = value; }
}
OVRManager.InstantiateMrcCameraDelegate OVRMixedRealityCaptureConfiguration.instantiateMixedRealityCameraGameObject
{
get;
set;
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN // Rift MRC only
const string configFileName = "mrc.config";
public void WriteToConfigurationFile()
{
string text = JsonUtility.ToJson(this, true);
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
Debug.Log("Write OVRMixedRealityCaptureSettings to " + configPath);
File.WriteAllText(configPath, text);
}
catch (Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
public void CombineWithConfigurationFile()
{
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
if (File.Exists(configPath))
{
Debug.Log("MixedRealityCapture configuration file found at " + configPath);
string text = File.ReadAllText(configPath);
Debug.Log("Apply MixedRealityCapture configuration");
JsonUtility.FromJsonOverwrite(text, this);
}
else
{
Debug.Log("MixedRealityCapture configuration file doesn't exist at " + configPath);
}
}
catch (Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
#endif
}
#endif

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 99bbd170d56da4248941de890e6d7af5
timeCreated: 1501004238
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,273 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRMixedRealityCaptureTest : MonoBehaviour
{
bool inited = false;
enum CameraMode
{
Normal = 0,
OverrideFov,
ThirdPerson,
}
CameraMode currentMode = CameraMode.Normal;
public Camera defaultExternalCamera;
OVRPlugin.Fovf defaultFov;
// Use this for initialization
void Start()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!defaultExternalCamera)
{
Debug.LogWarning("defaultExternalCamera undefined");
}
#if !OVR_ANDROID_MRC
// On Quest, we enable MRC automatically through the configuration
if (!OVRManager.instance.enableMixedReality)
{
OVRManager.instance.enableMixedReality = true;
}
#endif
#endif
}
void Initialize()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (inited)
return;
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
return;
#else
if (!OVRPlugin.IsMixedRealityInitialized())
return;
#endif
OVRPlugin.ResetDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount before adding manual external camera {0}",
OVRPlugin.GetExternalCameraCount());
UpdateDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount after adding manual external camera {0}",
OVRPlugin.GetExternalCameraCount());
// obtain default FOV
{
OVRPlugin.CameraIntrinsics cameraIntrinsics;
OVRPlugin.CameraExtrinsics cameraExtrinsics;
OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics);
defaultFov = cameraIntrinsics.FOVPort;
}
inited = true;
#endif
}
void UpdateDefaultExternalCamera()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
// always build a 1080p external camera
const int cameraPixelWidth = 1920;
const int cameraPixelHeight = 1080;
const float cameraAspect = (float)cameraPixelWidth / cameraPixelHeight;
string cameraName = "UnityExternalCamera";
OVRPlugin.CameraIntrinsics cameraIntrinsics = new OVRPlugin.CameraIntrinsics();
OVRPlugin.CameraExtrinsics cameraExtrinsics = new OVRPlugin.CameraExtrinsics();
// intrinsics
cameraIntrinsics.IsValid = OVRPlugin.Bool.True;
cameraIntrinsics.LastChangedTimeSeconds = Time.time;
float vFov = defaultExternalCamera.fieldOfView * Mathf.Deg2Rad;
float hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * cameraAspect) * 2.0f;
OVRPlugin.Fovf fov = new OVRPlugin.Fovf();
fov.UpTan = fov.DownTan = Mathf.Tan(vFov * 0.5f);
fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
cameraIntrinsics.FOVPort = fov;
cameraIntrinsics.VirtualNearPlaneDistanceMeters = defaultExternalCamera.nearClipPlane;
cameraIntrinsics.VirtualFarPlaneDistanceMeters = defaultExternalCamera.farClipPlane;
cameraIntrinsics.ImageSensorPixelResolution.w = cameraPixelWidth;
cameraIntrinsics.ImageSensorPixelResolution.h = cameraPixelHeight;
// extrinsics
cameraExtrinsics.IsValid = OVRPlugin.Bool.True;
cameraExtrinsics.LastChangedTimeSeconds = Time.time;
cameraExtrinsics.CameraStatusData = OVRPlugin.CameraStatus.CameraStatus_Calibrated;
cameraExtrinsics.AttachedToNode = OVRPlugin.Node.None;
Camera mainCamera = Camera.main;
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig)
{
Transform trackingSpace = cameraRig.trackingSpace;
OVRPose trackingSpacePose = trackingSpace.ToOVRPose(false);
OVRPose cameraPose = defaultExternalCamera.transform.ToOVRPose(false);
OVRPose relativePose = trackingSpacePose.Inverse() * cameraPose;
#if OVR_ANDROID_MRC
OVRPose stageToLocalPose =
OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
relativePose = localToStagePose * relativePose;
#endif
cameraExtrinsics.RelativePose = relativePose.ToPosef();
}
else
{
cameraExtrinsics.RelativePose = OVRPlugin.Posef.identity;
}
if (!OVRPlugin.SetDefaultExternalCamera(cameraName, ref cameraIntrinsics, ref cameraExtrinsics))
{
Debug.LogError("SetDefaultExternalCamera() failed");
}
#endif
}
// Update is called once per frame
void Update()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!inited)
{
Initialize();
return;
}
if (!defaultExternalCamera)
{
return;
}
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
{
return;
}
#else
if (!OVRPlugin.IsMixedRealityInitialized())
{
return;
}
#endif
if (OVRInput.GetDown(OVRInput.Button.One))
{
if (currentMode == CameraMode.ThirdPerson)
{
currentMode = CameraMode.Normal;
}
else
{
currentMode = currentMode + 1;
}
Debug.LogFormat("Camera mode change to {0}", currentMode);
}
if (currentMode == CameraMode.Normal)
{
UpdateDefaultExternalCamera();
OVRPlugin.OverrideExternalCameraFov(0, false, new OVRPlugin.Fovf());
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
}
else if (currentMode == CameraMode.OverrideFov)
{
OVRPlugin.Fovf fov = defaultFov;
OVRPlugin.Fovf newFov = new OVRPlugin.Fovf();
newFov.LeftTan = fov.LeftTan * 2.0f;
newFov.RightTan = fov.RightTan * 2.0f;
newFov.UpTan = fov.UpTan * 2.0f;
newFov.DownTan = fov.DownTan * 2.0f;
OVRPlugin.OverrideExternalCameraFov(0, true, newFov);
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
if (!OVRPlugin.GetUseOverriddenExternalCameraFov(0))
{
Debug.LogWarning("FOV not overridden");
}
}
else if (currentMode == CameraMode.ThirdPerson)
{
Camera camera = GetComponent<Camera>();
if (camera == null)
{
return;
}
float vFov = camera.fieldOfView * Mathf.Deg2Rad;
float hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * camera.aspect) * 2.0f;
OVRPlugin.Fovf fov = new OVRPlugin.Fovf();
fov.UpTan = fov.DownTan = Mathf.Tan(vFov * 0.5f);
fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
OVRPlugin.OverrideExternalCameraFov(0, true, fov);
Camera mainCamera = Camera.main;
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig)
{
Transform trackingSpace = cameraRig.trackingSpace;
OVRPose trackingSpacePose = trackingSpace.ToOVRPose(false);
OVRPose cameraPose = transform.ToOVRPose(false);
OVRPose relativePose = trackingSpacePose.Inverse() * cameraPose;
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage)
.ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
OVRPose relativePoseInStage = localToStagePose * relativePose;
OVRPlugin.Posef relativePosef = relativePoseInStage.ToPosef();
OVRPlugin.OverrideExternalCameraStaticPose(0, true, relativePosef);
}
else
{
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
}
if (!OVRPlugin.GetUseOverriddenExternalCameraFov(0))
{
Debug.LogWarning("FOV not overridden");
}
if (!OVRPlugin.GetUseOverriddenExternalCameraStaticPose(0))
{
Debug.LogWarning("StaticPose not overridden");
}
}
#endif
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 95a5f282b22a9d846bd1a9d2de25079c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,86 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
/// <summary>
/// Logs when the application enters power save mode and allows you to a low-power CPU/GPU level with a button press.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_mode_parms")]
public class OVRModeParms : MonoBehaviour
{
#region Member Variables
/// <summary>
/// The gamepad button that will switch the application to CPU level 0 and GPU level 1.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.X;
#endregion
/// <summary>
/// Invoke power state mode test.
/// </summary>
void Start()
{
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
// Call TestPowerLevelState after 10 seconds
// and repeats every 10 seconds.
InvokeRepeating("TestPowerStateMode", 10, 10.0f);
}
/// <summary>
/// Change default vr mode parms dynamically.
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(resetButton))
{
//*************************
// Dynamically change VrModeParms cpu and gpu level.
// NOTE: Reset will cause 1 frame of flicker as it leaves
// and re-enters Vr mode.
//*************************
OVRPlugin.suggestedCpuPerfLevel = OVRPlugin.ProcessorPerformanceLevel.PowerSavings;
OVRPlugin.suggestedGpuPerfLevel = OVRPlugin.ProcessorPerformanceLevel.SustainedLow;
}
}
/// <summary>
/// Check current power state mode.
/// </summary>
void TestPowerStateMode()
{
//*************************
// Check power-level state mode
//*************************
if (OVRPlugin.powerSaving)
{
// The device has been throttled
Debug.Log("POWER SAVE MODE ACTIVATED");
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6a6ae8e8def81df429a8fdfc00f63e5c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,52 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
/// <summary>
/// Allows you to toggle monoscopic rendering with a gamepad button press.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_monoscopic")]
public class OVRMonoscopic : MonoBehaviour
{
/// <summary>
/// The gamepad button that will toggle monoscopic rendering.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.B;
private bool monoscopic = false;
/// <summary>
/// Check input and toggle monoscopic rendering mode if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle monoscopic rendering mode
//*************************
monoscopic = !monoscopic;
OVRManager.instance.monoscopic = monoscopic;
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 06ef2a389c534554c848533f88dbb32c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,454 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading;
using UnityEngine;
using Debug = UnityEngine.Debug;
public class OVRNetwork
{
public const int MaxBufferLength = 65536;
public const int MaxPayloadLength = MaxBufferLength - FrameHeader.StructSize;
public const uint FrameHeaderMagicIdentifier = 0x5283A76B;
[StructLayout(LayoutKind.Sequential, Pack = 1)]
struct FrameHeader
{
public uint protocolIdentifier;
public int payloadType;
public int payloadLength;
public const int StructSize = sizeof(uint) + sizeof(int) + sizeof(int);
// endianness conversion is NOT handled since all our current mobile/PC devices are little-endian
public byte[] ToBytes()
{
int size = Marshal.SizeOf(this);
Trace.Assert(size == StructSize);
byte[] arr = new byte[size];
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.StructureToPtr(this, ptr, true);
Marshal.Copy(ptr, arr, 0, size);
Marshal.FreeHGlobal(ptr);
return arr;
}
public static FrameHeader FromBytes(byte[] arr)
{
FrameHeader header = new FrameHeader();
int size = Marshal.SizeOf(header);
Trace.Assert(size == StructSize);
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.Copy(arr, 0, ptr, size);
header = (FrameHeader)Marshal.PtrToStructure(ptr, header.GetType());
Marshal.FreeHGlobal(ptr);
return header;
}
}
public class OVRNetworkTcpServer
{
public TcpListener tcpListener = null;
private readonly object clientsLock = new object();
public readonly List<TcpClient> clients = new List<TcpClient>();
public void StartListening(int listeningPort)
{
if (tcpListener != null)
{
Debug.LogWarning("[OVRNetworkTcpServer] tcpListener is not null");
return;
}
IPAddress localAddr = IPAddress.Any;
tcpListener = new TcpListener(localAddr, listeningPort);
try
{
tcpListener.Start();
Debug.LogFormat("TcpListener started. Local endpoint: {0}", tcpListener.LocalEndpoint.ToString());
}
catch (SocketException e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] Unsable to start TcpListener. Socket exception: {0}",
e.Message);
Debug.LogWarning(
"It could be caused by multiple instances listening at the same port, or the port is forwarded to the Android device through ADB");
Debug.LogWarning(
"If the port is forwarded through ADB, use the Android Tools in Tools/Oculus/System Metrics Profiler to kill the server");
tcpListener = null;
}
if (tcpListener != null)
{
Debug.LogFormat("[OVRNetworkTcpServer] Start Listening on port {0}", listeningPort);
try
{
tcpListener.BeginAcceptTcpClient(new AsyncCallback(DoAcceptTcpClientCallback), tcpListener);
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] can't accept new client: {0}", e.Message);
}
}
}
public void StopListening()
{
if (tcpListener == null)
{
Debug.LogWarning("[OVRNetworkTcpServer] tcpListener is null");
return;
}
lock (clientsLock)
{
clients.Clear();
}
tcpListener.Stop();
tcpListener = null;
Debug.Log("[OVRNetworkTcpServer] Stopped listening");
}
private void DoAcceptTcpClientCallback(IAsyncResult ar)
{
TcpListener listener = ar.AsyncState as TcpListener;
try
{
TcpClient client = listener.EndAcceptTcpClient(ar);
lock (clientsLock)
{
clients.Add(client);
Debug.Log("[OVRNetworkTcpServer] client added");
}
try
{
tcpListener.BeginAcceptTcpClient(new AsyncCallback(DoAcceptTcpClientCallback), tcpListener);
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] can't accept new client: {0}", e.Message);
}
}
catch (ObjectDisposedException)
{
// Do nothing. It happens when stop preview in editor, which is normal behavior.
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] EndAcceptTcpClient failed: {0}", e.Message);
}
}
public bool HasConnectedClient()
{
lock (clientsLock)
{
foreach (TcpClient client in clients)
{
if (client.Connected)
{
return true;
}
}
}
return false;
}
public void Broadcast(int payloadType, byte[] payload)
{
if (payload.Length > OVRNetwork.MaxPayloadLength)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] drop payload because it's too long: {0} bytes",
payload.Length);
}
FrameHeader header = new FrameHeader();
header.protocolIdentifier = FrameHeaderMagicIdentifier;
header.payloadType = payloadType;
header.payloadLength = payload.Length;
byte[] headerBuffer = header.ToBytes();
byte[] dataBuffer = new byte[headerBuffer.Length + payload.Length];
headerBuffer.CopyTo(dataBuffer, 0);
payload.CopyTo(dataBuffer, headerBuffer.Length);
lock (clientsLock)
{
foreach (TcpClient client in clients)
{
if (client.Connected)
{
try
{
client.GetStream().BeginWrite(dataBuffer, 0, dataBuffer.Length,
new AsyncCallback(DoWriteDataCallback), client.GetStream());
}
catch (SocketException e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] close client because of socket error: {0}",
e.Message);
client.GetStream().Close();
client.Close();
}
}
}
}
}
private void DoWriteDataCallback(IAsyncResult ar)
{
NetworkStream stream = ar.AsyncState as NetworkStream;
stream.EndWrite(ar);
}
}
public class OVRNetworkTcpClient
{
public Action connectionStateChangedCallback;
public Action<int, byte[], int, int> payloadReceivedCallback;
public enum ConnectionState
{
Disconnected,
Connected,
Connecting
}
public ConnectionState connectionState
{
get
{
if (tcpClient == null)
{
return ConnectionState.Disconnected;
}
else
{
if (tcpClient.Connected)
{
return ConnectionState.Connected;
}
else
{
return ConnectionState.Connecting;
}
}
}
}
public bool Connected
{
get { return connectionState == ConnectionState.Connected; }
}
TcpClient tcpClient = null;
byte[][] receivedBuffers = { new byte[OVRNetwork.MaxBufferLength], new byte[OVRNetwork.MaxBufferLength] };
int receivedBufferIndex = 0;
int receivedBufferDataSize = 0;
ManualResetEvent readyReceiveDataEvent = new ManualResetEvent(true);
public void Connect(int listeningPort)
{
if (tcpClient == null)
{
receivedBufferIndex = 0;
receivedBufferDataSize = 0;
readyReceiveDataEvent.Set();
string remoteAddress = "127.0.0.1";
tcpClient = new TcpClient(AddressFamily.InterNetwork);
tcpClient.BeginConnect(remoteAddress, listeningPort, new AsyncCallback(ConnectCallback), tcpClient);
if (connectionStateChangedCallback != null)
{
connectionStateChangedCallback();
}
}
else
{
Debug.LogWarning("[OVRNetworkTcpClient] already connected");
}
}
void ConnectCallback(IAsyncResult ar)
{
try
{
TcpClient client = ar.AsyncState as TcpClient;
client.EndConnect(ar);
Debug.LogFormat("[OVRNetworkTcpClient] connected to {0}", client.ToString());
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpClient] connect error {0}", e.Message);
}
if (connectionStateChangedCallback != null)
{
connectionStateChangedCallback();
}
}
public void Disconnect()
{
if (tcpClient != null)
{
if (!readyReceiveDataEvent.WaitOne(5))
{
Debug.LogWarning(
"[OVRNetworkTcpClient] readyReceiveDataEvent not signaled. data receiving timeout?");
}
Debug.Log("[OVRNetworkTcpClient] close tcpClient");
try
{
tcpClient.GetStream().Close();
tcpClient.Close();
}
catch (Exception e)
{
Debug.LogWarning("[OVRNetworkTcpClient] " + e.Message);
}
tcpClient = null;
if (connectionStateChangedCallback != null)
{
connectionStateChangedCallback();
}
}
else
{
Debug.LogWarning("[OVRNetworkTcpClient] not connected");
}
}
public void Tick()
{
if (tcpClient == null || !tcpClient.Connected)
{
return;
}
if (readyReceiveDataEvent.WaitOne(TimeSpan.Zero))
{
if (tcpClient.GetStream().DataAvailable)
{
if (receivedBufferDataSize >= OVRNetwork.MaxBufferLength)
{
Debug.LogWarning(
"[OVRNetworkTcpClient] receive buffer overflow. It should not happen since we have the constraint on message size");
Disconnect();
return;
}
readyReceiveDataEvent.Reset();
int maximumDataSize = OVRSystemPerfMetrics.MaxBufferLength - receivedBufferDataSize;
tcpClient.GetStream().BeginRead(receivedBuffers[receivedBufferIndex], receivedBufferDataSize,
maximumDataSize, new AsyncCallback(OnReadDataCallback), tcpClient.GetStream());
}
}
}
void OnReadDataCallback(IAsyncResult ar)
{
NetworkStream stream = ar.AsyncState as NetworkStream;
try
{
int numBytes = stream.EndRead(ar);
receivedBufferDataSize += numBytes;
while (receivedBufferDataSize >= FrameHeader.StructSize)
{
FrameHeader header = FrameHeader.FromBytes(receivedBuffers[receivedBufferIndex]);
if (header.protocolIdentifier != OVRNetwork.FrameHeaderMagicIdentifier)
{
Debug.LogWarning("[OVRNetworkTcpClient] header mismatch");
Disconnect();
return;
}
if (header.payloadLength < 0 || header.payloadLength > OVRNetwork.MaxPayloadLength)
{
Debug.LogWarningFormat("[OVRNetworkTcpClient] Sanity check failed. PayloadLength %d",
header.payloadLength);
Disconnect();
return;
}
if (receivedBufferDataSize >= FrameHeader.StructSize + header.payloadLength)
{
if (payloadReceivedCallback != null)
{
payloadReceivedCallback(header.payloadType, receivedBuffers[receivedBufferIndex],
FrameHeader.StructSize, header.payloadLength);
}
// swap receive buffer
int newBufferIndex = 1 - receivedBufferIndex;
int newBufferDataSize =
receivedBufferDataSize - (FrameHeader.StructSize + header.payloadLength);
if (newBufferDataSize > 0)
{
Array.Copy(receivedBuffers[receivedBufferIndex],
(FrameHeader.StructSize + header.payloadLength), receivedBuffers[newBufferIndex], 0,
newBufferDataSize);
}
receivedBufferIndex = newBufferIndex;
receivedBufferDataSize = newBufferDataSize;
}
}
readyReceiveDataEvent.Set();
}
catch (SocketException e)
{
Debug.LogErrorFormat("[OVRNetworkTcpClient] OnReadDataCallback: socket error: {0}", e.Message);
Disconnect();
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: df7e9afcfd21ebd44951ca0eeb5cd692
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,162 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
/// <summary>
/// Helper class to handle generic class object pools and avoid allocations in the SDK that would lead to garbage collection.
/// </summary>
internal static class OVRObjectPool
{
private static class Storage<T> where T : class, new()
{
public static readonly HashSet<T> HashSet = new HashSet<T>();
}
/// <summary>
/// Gets an object of type T from it's respective pool. If none is available a new one is created.
/// </summary>
/// <returns>Object of type T</returns>
public static T Get<T>() where T : class, new()
{
using var enumerator = Storage<T>.HashSet.GetEnumerator();
if (!enumerator.MoveNext()) return new T();
var item = enumerator.Current;
Storage<T>.HashSet.Remove(item);
if (item is IList list) list.Clear();
else if (item is IDictionary dict) dict.Clear();
return item;
}
public static List<T> List<T>() => Get<List<T>>();
public static Dictionary<TKey, TValue> Dictionary<TKey, TValue>() => Get<Dictionary<TKey, TValue>>();
public static HashSet<T> HashSet<T>()
{
var item = Get<HashSet<T>>();
item.Clear();
return item;
}
public static Stack<T> Stack<T>()
{
var item = Get<Stack<T>>();
item.Clear();
return item;
}
public static Queue<T> Queue<T>()
{
var item = Get<Queue<T>>();
item.Clear();
return item;
}
/// <summary>
/// Returns an object of type T to it's respective pool. If the object is null or already present in the pool no changes are made.
/// </summary>
/// <remarks>
/// After returning an object to the object pool using it is not allowed and leads to undefined behaviour, please <see cref="Get{T}"/> another object from the pool instead.
/// </remarks>
public static void Return<T>(T obj) where T : class, new()
{
switch (obj)
{
case null: return;
case IList list:
list.Clear();
break;
case IDictionary dict:
dict.Clear();
break;
}
Storage<T>.HashSet.Add(obj);
}
public static void Return<T>(HashSet<T> set)
{
set?.Clear();
Return<HashSet<T>>(set);
}
public static void Return<T>(Stack<T> stack)
{
stack?.Clear();
Return<Stack<T>>(stack);
}
public static void Return<T>(Queue<T> queue)
{
queue?.Clear();
Return<Queue<T>>(queue);
}
public struct ListScope<T> : IDisposable
{
List<T> _list;
public ListScope(out List<T> list) => _list = list = List<T>();
public void Dispose() => Return(_list);
}
public readonly struct DictionaryScope<TKey, TValue> : IDisposable
{
readonly Dictionary<TKey, TValue> _dictionary;
public DictionaryScope(out Dictionary<TKey, TValue> dictionary)
=> _dictionary = dictionary = Dictionary<TKey, TValue>();
public void Dispose() => Return(_dictionary);
}
public readonly struct HashSetScope<T> : IDisposable
{
readonly HashSet<T> _set;
public HashSetScope(out HashSet<T> set) => _set = set = HashSet<T>();
public void Dispose() => Return(_set);
}
public readonly struct StackScope<T> : IDisposable
{
readonly Stack<T> _stack;
public StackScope(out Stack<T> stack) => _stack = stack = Stack<T>();
public void Dispose() => Return(_stack);
}
public readonly struct QueueScope<T> : IDisposable
{
readonly Queue<T> _queue;
public QueueScope(out Queue<T> queue) => _queue = queue = Queue<T>();
public void Dispose() => Return(_queue);
}
public readonly struct ItemScope<T> : IDisposable where T : class, new()
{
readonly T _item;
public ItemScope(out T item) => _item = item = Get<T>();
public void Dispose() => Return(_item);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: acabf7b7905ecaa4e8bc2cb28efaabcc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,182 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Simple event system using physics raycasts. Very closely based on UnityEngine.EventSystems.PhysicsRaycaster
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_physics_raycaster")]
public class OVRPhysicsRaycaster : BaseRaycaster
{
/// <summary>
/// Const to use for clarity when no event mask is set
/// </summary>
protected const int kNoEventMaskSet = -1;
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
[SerializeField]
protected LayerMask m_EventMask = kNoEventMaskSet;
protected OVRPhysicsRaycaster()
{
}
public override Camera eventCamera
{
get { return GetComponent<OVRCameraRig>().leftEyeCamera; }
}
/// <summary>
/// Depth used to determine the order of event processing.
/// </summary>
public virtual int depth
{
get { return (eventCamera != null) ? (int)eventCamera.depth : 0xFFFFFF; }
}
public int sortOrder = 0;
public override int sortOrderPriority
{
get { return sortOrder; }
}
/// <summary>
/// Event mask used to determine which objects will receive events.
/// </summary>
public int finalEventMask
{
get { return (eventCamera != null) ? eventCamera.cullingMask & m_EventMask : kNoEventMaskSet; }
}
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
public LayerMask eventMask
{
get { return m_EventMask; }
set { m_EventMask = value; }
}
/// <summary>
/// Perform a raycast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
// This function is closely based on PhysicsRaycaster.Raycast
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.RaycastAll(ray, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Perform a Spherecast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
/// <param name="radius">Radius of the sphere</param>
public void Spherecast(PointerEventData eventData, List<RaycastResult> resultAppendList, float radius)
{
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.SphereCastAll(ray, radius, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Get screen position of this world position as seen by the event camera of this OVRPhysicsRaycaster
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPos(Vector3 worldPosition)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(worldPosition);
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f8e7ff1cdf4c4e74db00c3684108bc9a
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,650 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using UnityEngine;
/// <summary>
/// Controls the player's movement in virtual reality.
/// </summary>
[RequireComponent(typeof(CharacterController))]
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_player_controller")]
public class OVRPlayerController : MonoBehaviour
{
/// <summary>
/// The rate acceleration during movement.
/// </summary>
public float Acceleration = 0.1f;
/// <summary>
/// The rate of damping on movement.
/// </summary>
public float Damping = 0.3f;
/// <summary>
/// The rate of additional damping when moving sideways or backwards.
/// </summary>
public float BackAndSideDampen = 0.5f;
/// <summary>
/// The force applied to the character when jumping.
/// </summary>
public float JumpForce = 0.3f;
/// <summary>
/// The rate of rotation when using a gamepad.
/// </summary>
public float RotationAmount = 1.5f;
/// <summary>
/// The rate of rotation when using the keyboard.
/// </summary>
public float RotationRatchet = 45.0f;
/// <summary>
/// The player will rotate in fixed steps if Snap Rotation is enabled.
/// </summary>
[Tooltip("The player will rotate in fixed steps if Snap Rotation is enabled.")]
public bool SnapRotation = true;
/// <summary>
/// [Deprecated] When enabled, snap rotation will happen about the guardian rather
/// than the player/camera viewpoint.
/// </summary>
[Tooltip("[Deprecated] When enabled, snap rotation will happen about the center of the " +
"guardian rather than the center of the player/camera viewpoint. This (legacy) " +
"option should be left off except for edge cases that require extreme behavioral " +
"backwards compatibility.")]
public bool RotateAroundGuardianCenter = false;
/// <summary>
/// How many fixed speeds to use with linear movement? 0=linear control
/// </summary>
[Tooltip("How many fixed speeds to use with linear movement? 0=linear control")]
public int FixedSpeedSteps;
/// <summary>
/// If true, reset the initial yaw of the player controller when the Hmd pose is recentered.
/// </summary>
public bool HmdResetsY = true;
/// <summary>
/// If true, tracking data from a child OVRCameraRig will update the direction of movement.
/// </summary>
public bool HmdRotatesY = true;
/// <summary>
/// Modifies the strength of gravity.
/// </summary>
public float GravityModifier = 0.379f;
/// <summary>
/// If true, each OVRPlayerController will use the player's physical height.
/// </summary>
public bool useProfileData = true;
/// <summary>
/// The CameraHeight is the actual height of the HMD and can be used to adjust the height of the character controller, which will affect the
/// ability of the character to move into areas with a low ceiling.
/// </summary>
[NonSerialized]
public float CameraHeight;
/// <summary>
/// This event is raised after the character controller is moved. This is used by the OVRAvatarLocomotion script to keep the avatar transform synchronized
/// with the OVRPlayerController.
/// </summary>
public event Action<Transform> TransformUpdated;
/// <summary>
/// This bool is set to true whenever the player controller has been teleported. It is reset after every frame. Some systems, such as
/// CharacterCameraConstraint, test this boolean in order to disable logic that moves the character controller immediately
/// following the teleport.
/// </summary>
[NonSerialized] // This doesn't need to be visible in the inspector.
public bool Teleported;
/// <summary>
/// This event is raised immediately after the camera transform has been updated, but before movement is updated.
/// </summary>
public event Action CameraUpdated;
/// <summary>
/// This event is raised right before the character controller is actually moved in order to provide other systems the opportunity to
/// move the character controller in response to things other than user input, such as movement of the HMD. See CharacterCameraConstraint.cs
/// for an example of this.
/// </summary>
public event Action PreCharacterMove;
/// <summary>
/// When true, user input will be applied to linear movement. Set this to false whenever the player controller needs to ignore input for
/// linear movement.
/// </summary>
public bool EnableLinearMovement = true;
/// <summary>
/// When true, user input will be applied to rotation. Set this to false whenever the player controller needs to ignore input for rotation.
/// </summary>
public bool EnableRotation = true;
/// <summary>
/// Rotation defaults to secondary thumbstick. You can allow either here. Note that this won't behave well if EnableLinearMovement is true.
/// </summary>
public bool RotationEitherThumbstick = false;
protected CharacterController Controller = null;
protected OVRCameraRig CameraRig = null;
private float MoveScale = 1.0f;
private Vector3 MoveThrottle = Vector3.zero;
private float FallSpeed = 0.0f;
private OVRPose? InitialPose;
public float InitialYRotation { get; private set; }
private float MoveScaleMultiplier = 1.0f;
private float RotationScaleMultiplier = 1.0f;
// It is rare to want to use mouse movement in VR, so ignore the mouse by default.
private bool SkipMouseRotation = true;
private bool HaltUpdateMovement = false;
private bool prevHatLeft = false;
private bool prevHatRight = false;
private float SimulationRate = 60f;
private float buttonRotation = 0f;
// Set to true when a snap turn has occurred, code requires one frame of centered thumbstick to enable another snap turn.
private bool ReadyToSnapTurn;
private bool playerControllerEnabled = false;
void Start()
{
// Add eye-depth as a camera offset from the player controller
var p = CameraRig.transform.localPosition;
p.z = OVRManager.profile.eyeDepth;
CameraRig.transform.localPosition = p;
}
void Awake()
{
Controller = gameObject.GetComponent<CharacterController>();
if (Controller == null)
Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (CameraRigs.Length == 0)
Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
InitialYRotation = transform.rotation.eulerAngles.y;
}
void OnEnable()
{
}
void OnDisable()
{
if (playerControllerEnabled)
{
OVRManager.display.RecenteredPose -= ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors -= UpdateTransform;
}
playerControllerEnabled = false;
}
}
void Update()
{
if (!playerControllerEnabled)
{
if (OVRManager.OVRManagerinitialized)
{
OVRManager.display.RecenteredPose += ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors += UpdateTransform;
}
playerControllerEnabled = true;
}
else
return;
}
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
//Use keys to ratchet rotation
if (Input.GetKeyDown(KeyCode.Q))
buttonRotation -= RotationRatchet;
if (Input.GetKeyDown(KeyCode.E))
buttonRotation += RotationRatchet;
#endif
}
protected virtual void UpdateController()
{
if (useProfileData)
{
if (InitialPose == null)
{
// Save the initial pose so it can be recovered if useProfileData
// is turned off later.
InitialPose = new OVRPose()
{
position = CameraRig.transform.localPosition,
orientation = CameraRig.transform.localRotation
};
}
var p = CameraRig.transform.localPosition;
if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel)
{
p.y = OVRManager.profile.eyeHeight - (0.5f * Controller.height) + Controller.center.y;
}
else if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.FloorLevel)
{
p.y = -(0.5f * Controller.height) + Controller.center.y;
}
CameraRig.transform.localPosition = p;
}
else if (InitialPose != null)
{
// Return to the initial pose if useProfileData was turned off at runtime
CameraRig.transform.localPosition = InitialPose.Value.position;
CameraRig.transform.localRotation = InitialPose.Value.orientation;
InitialPose = null;
}
CameraHeight = CameraRig.centerEyeAnchor.localPosition.y;
if (CameraUpdated != null)
{
CameraUpdated();
}
UpdateMovement();
Vector3 moveDirection = Vector3.zero;
float motorDamp = (1.0f + (Damping * SimulationRate * Time.deltaTime));
MoveThrottle.x /= motorDamp;
MoveThrottle.y = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y;
MoveThrottle.z /= motorDamp;
moveDirection += MoveThrottle * SimulationRate * Time.deltaTime;
// Gravity
if (Controller.isGrounded && FallSpeed <= 0)
FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f)));
else
FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * SimulationRate * Time.deltaTime);
moveDirection.y += FallSpeed * SimulationRate * Time.deltaTime;
if (Controller.isGrounded && MoveThrottle.y <= transform.lossyScale.y * 0.001f)
{
// Offset correction for uneven ground
float bumpUpOffset = Mathf.Max(Controller.stepOffset,
new Vector3(moveDirection.x, 0, moveDirection.z).magnitude);
moveDirection -= bumpUpOffset * Vector3.up;
}
if (PreCharacterMove != null)
{
PreCharacterMove();
Teleported = false;
}
Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection), new Vector3(1, 0, 1));
// Move contoller
Controller.Move(moveDirection);
Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1));
if (predictedXZ != actualXZ)
MoveThrottle += (actualXZ - predictedXZ) / (SimulationRate * Time.deltaTime);
}
public virtual void UpdateMovement()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (HaltUpdateMovement)
return;
if (EnableLinearMovement)
{
bool moveForward = Input.GetKey(KeyCode.W) || Input.GetKey(KeyCode.UpArrow);
bool moveLeft = Input.GetKey(KeyCode.A) || Input.GetKey(KeyCode.LeftArrow);
bool moveRight = Input.GetKey(KeyCode.D) || Input.GetKey(KeyCode.RightArrow);
bool moveBack = Input.GetKey(KeyCode.S) || Input.GetKey(KeyCode.DownArrow);
bool dpad_move = false;
if (OVRInput.Get(OVRInput.Button.DpadUp))
{
moveForward = true;
dpad_move = true;
}
if (OVRInput.Get(OVRInput.Button.DpadDown))
{
moveBack = true;
dpad_move = true;
}
MoveScale = 1.0f;
if ((moveForward && moveLeft) || (moveForward && moveRight) ||
(moveBack && moveLeft) || (moveBack && moveRight))
MoveScale = 0.70710678f;
// No positional movement if we are in the air
if (!Controller.isGrounded)
MoveScale = 0.0f;
MoveScale *= SimulationRate * Time.deltaTime;
// Compute this for key movement
float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
// Run!
if (dpad_move || Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift))
moveInfluence *= 2.0f;
Quaternion ort = transform.rotation;
Vector3 ortEuler = ort.eulerAngles;
ortEuler.z = ortEuler.x = 0f;
ort = Quaternion.Euler(ortEuler);
if (moveForward)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * Vector3.forward);
if (moveBack)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * BackAndSideDampen * Vector3.back);
if (moveLeft)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.left);
if (moveRight)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.right);
moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
#if !UNITY_ANDROID // LeftTrigger not avail on Android game pad
moveInfluence *= 1.0f + OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
#endif
Vector2 primaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
// If speed quantization is enabled, adjust the input to the number of fixed speed steps.
if (FixedSpeedSteps > 0)
{
primaryAxis.y = Mathf.Round(primaryAxis.y * FixedSpeedSteps) / FixedSpeedSteps;
primaryAxis.x = Mathf.Round(primaryAxis.x * FixedSpeedSteps) / FixedSpeedSteps;
}
if (primaryAxis.y > 0.0f)
MoveThrottle += ort * (primaryAxis.y * transform.lossyScale.z * moveInfluence * Vector3.forward);
if (primaryAxis.y < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.y) * transform.lossyScale.z * moveInfluence *
BackAndSideDampen * Vector3.back);
if (primaryAxis.x < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.x) * transform.lossyScale.x * moveInfluence *
BackAndSideDampen * Vector3.left);
if (primaryAxis.x > 0.0f)
MoveThrottle += ort * (primaryAxis.x * transform.lossyScale.x * moveInfluence * BackAndSideDampen *
Vector3.right);
}
if (EnableRotation)
{
Vector3 euler = RotateAroundGuardianCenter ? transform.rotation.eulerAngles : Vector3.zero;
float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;
bool curHatLeft = OVRInput.Get(OVRInput.Button.PrimaryShoulder);
if (curHatLeft && !prevHatLeft)
euler.y -= RotationRatchet;
prevHatLeft = curHatLeft;
bool curHatRight = OVRInput.Get(OVRInput.Button.SecondaryShoulder);
if (curHatRight && !prevHatRight)
euler.y += RotationRatchet;
prevHatRight = curHatRight;
euler.y += buttonRotation;
buttonRotation = 0f;
#if !UNITY_ANDROID || UNITY_EDITOR
if (!SkipMouseRotation)
euler.y += Input.GetAxis("Mouse X") * rotateInfluence * 3.25f;
#endif
if (SnapRotation)
{
if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickLeft) ||
(RotationEitherThumbstick && OVRInput.Get(OVRInput.Button.PrimaryThumbstickLeft)))
{
if (ReadyToSnapTurn)
{
euler.y -= RotationRatchet;
ReadyToSnapTurn = false;
}
}
else if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickRight) ||
(RotationEitherThumbstick && OVRInput.Get(OVRInput.Button.PrimaryThumbstickRight)))
{
if (ReadyToSnapTurn)
{
euler.y += RotationRatchet;
ReadyToSnapTurn = false;
}
}
else
{
ReadyToSnapTurn = true;
}
}
else
{
Vector2 secondaryAxis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (RotationEitherThumbstick)
{
Vector2 altSecondaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
if (secondaryAxis.sqrMagnitude < altSecondaryAxis.sqrMagnitude)
{
secondaryAxis = altSecondaryAxis;
}
}
euler.y += secondaryAxis.x * rotateInfluence;
}
if (RotateAroundGuardianCenter)
{
transform.rotation = Quaternion.Euler(euler);
}
else
{
transform.RotateAround(CameraRig.centerEyeAnchor.position, Vector3.up, euler.y);
}
}
#endif
}
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
if (HmdRotatesY && !Teleported)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
UpdateController();
if (TransformUpdated != null)
{
TransformUpdated(root);
}
}
/// <summary>
/// Jump! Must be enabled manually.
/// </summary>
public bool Jump()
{
if (!Controller.isGrounded)
return false;
MoveThrottle += new Vector3(0, transform.lossyScale.y * JumpForce, 0);
return true;
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop()
{
Controller.Move(Vector3.zero);
MoveThrottle = Vector3.zero;
FallSpeed = 0.0f;
}
/// <summary>
/// Gets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void GetMoveScaleMultiplier(ref float moveScaleMultiplier)
{
moveScaleMultiplier = MoveScaleMultiplier;
}
/// <summary>
/// Sets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void SetMoveScaleMultiplier(float moveScaleMultiplier)
{
MoveScaleMultiplier = moveScaleMultiplier;
}
/// <summary>
/// Gets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void GetRotationScaleMultiplier(ref float rotationScaleMultiplier)
{
rotationScaleMultiplier = RotationScaleMultiplier;
}
/// <summary>
/// Sets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void SetRotationScaleMultiplier(float rotationScaleMultiplier)
{
RotationScaleMultiplier = rotationScaleMultiplier;
}
/// <summary>
/// Gets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">Allow mouse rotation.</param>
public void GetSkipMouseRotation(ref bool skipMouseRotation)
{
skipMouseRotation = SkipMouseRotation;
}
/// <summary>
/// Sets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">If set to <c>true</c> allow mouse rotation.</param>
public void SetSkipMouseRotation(bool skipMouseRotation)
{
SkipMouseRotation = skipMouseRotation;
}
/// <summary>
/// Gets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">Halt update movement.</param>
public void GetHaltUpdateMovement(ref bool haltUpdateMovement)
{
haltUpdateMovement = HaltUpdateMovement;
}
/// <summary>
/// Sets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">If set to <c>true</c> halt update movement.</param>
public void SetHaltUpdateMovement(bool haltUpdateMovement)
{
HaltUpdateMovement = haltUpdateMovement;
}
/// <summary>
/// Resets the player look rotation when the device orientation is reset.
/// </summary>
public void ResetOrientation()
{
if (HmdResetsY && !HmdRotatesY)
{
Vector3 euler = transform.rotation.eulerAngles;
euler.y = InitialYRotation;
transform.rotation = Quaternion.Euler(euler);
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0950df82e7936c84983497630bde5b54
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,95 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Text;
using UnityEngine;
using UnityEngine.Assertions;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Extension of Unity's PointerEventData to support ray based pointing and also touchpad swiping
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_pointer_event_data")]
public class OVRPointerEventData : PointerEventData
{
public OVRPointerEventData(EventSystem eventSystem)
: base(eventSystem)
{
}
public Ray worldSpaceRay;
public Vector2 swipeStart;
public override string ToString()
{
var sb = new StringBuilder();
sb.AppendLine("<b>Position</b>: " + position);
sb.AppendLine("<b>delta</b>: " + delta);
sb.AppendLine("<b>eligibleForClick</b>: " + eligibleForClick);
sb.AppendLine("<b>pointerEnter</b>: " + pointerEnter);
sb.AppendLine("<b>pointerPress</b>: " + pointerPress);
sb.AppendLine("<b>lastPointerPress</b>: " + lastPress);
sb.AppendLine("<b>pointerDrag</b>: " + pointerDrag);
sb.AppendLine("<b>worldSpaceRay</b>: " + worldSpaceRay);
sb.AppendLine("<b>swipeStart</b>: " + swipeStart);
sb.AppendLine("<b>Use Drag Threshold</b>: " + useDragThreshold);
return sb.ToString();
}
}
/// <summary>
/// Static helpers for OVRPointerEventData.
/// </summary>
public static class PointerEventDataExtension
{
public static bool IsVRPointer(this PointerEventData pointerEventData)
{
return (pointerEventData is OVRPointerEventData);
}
public static Ray GetRay(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.worldSpaceRay;
}
public static Vector2 GetSwipeStart(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.swipeStart;
}
public static void SetSwipeStart(this PointerEventData pointerEventData, Vector2 start)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
vrPointerEventData.swipeStart = start;
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 646c937ce12610744adc2b5e487f77ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,26 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Moved to /Scripts/Editor. This stub only exists to overwrite previous instances of OVRProfiler.
//
namespace Oculus.Deprecated
{
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 3303d4232ee59ac40a9fdc223870fbbc
timeCreated: 1520636357
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,28 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using UnityEngine.Profiling;
internal struct OVRProfilerScope : IDisposable
{
public OVRProfilerScope(string name) => Profiler.BeginSample(name);
void IDisposable.Dispose() => Profiler.EndSample();
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 9543f58e29dd4e78866a84f468672b25
timeCreated: 1660697239

View File

@ -0,0 +1,47 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
/// <summary>
/// Visualizes progress for operations such as loading.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_progress_indicator")]
public class OVRProgressIndicator : MonoBehaviour
{
public MeshRenderer progressImage;
[Range(0, 1)]
public float currentProgress = 0.7f;
void Awake()
{
progressImage.sortingOrder = 150;
}
// Update is called once per frame
void Update()
{
progressImage.sharedMaterial.SetFloat("_AlphaCutoff", 1 - currentProgress);
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f14ece5575e2b1e4d80619901d65b428
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,336 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.EventSystems;
using UnityEngine.Serialization;
/// <summary>
/// Extension of GraphicRaycaster to support ray casting with world space rays instead of just screen-space
/// pointer positions
/// </summary>
[RequireComponent(typeof(Canvas))]
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_raycaster")]
public class OVRRaycaster : GraphicRaycaster, IPointerEnterHandler
{
[Tooltip("A world space pointer for this canvas")]
public GameObject pointer;
public int sortOrder = 0;
protected OVRRaycaster()
{
}
[NonSerialized]
private Canvas m_Canvas;
private Canvas canvas
{
get
{
if (m_Canvas != null)
return m_Canvas;
m_Canvas = GetComponent<Canvas>();
return m_Canvas;
}
}
public override Camera eventCamera
{
get { return canvas.worldCamera; }
}
public override int sortOrderPriority
{
get { return sortOrder; }
}
protected override void Start()
{
if (!canvas.worldCamera)
{
Debug.Log("Canvas does not have an event camera attached. " +
"Attaching OVRCameraRig.centerEyeAnchor as default.");
OVRCameraRig rig = FindObjectOfType<OVRCameraRig>();
canvas.worldCamera = rig.centerEyeAnchor.gameObject.GetComponent<Camera>();
}
}
/// <summary>
/// For the given ray, find graphics on this canvas which it intersects and are not blocked by other
/// world objects
/// </summary>
[NonSerialized]
private List<RaycastHit> m_RaycastResults = new List<RaycastHit>();
private void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList, Ray ray,
bool checkForBlocking)
{
//This function is closely based on
//void GraphicRaycaster.Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
if (canvas == null)
return;
float hitDistance = float.MaxValue;
if (checkForBlocking && blockingObjects != BlockingObjects.None)
{
float dist = eventCamera.farClipPlane;
if (blockingObjects == BlockingObjects.ThreeD || blockingObjects == BlockingObjects.All)
{
var hits = Physics.RaycastAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].distance < hitDistance)
{
hitDistance = hits[0].distance;
}
}
if (blockingObjects == BlockingObjects.TwoD || blockingObjects == BlockingObjects.All)
{
var hits = Physics2D.GetRayIntersectionAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].fraction * dist < hitDistance)
{
hitDistance = hits[0].fraction * dist;
}
}
}
m_RaycastResults.Clear();
GraphicRaycast(canvas, ray, m_RaycastResults);
for (var index = 0; index < m_RaycastResults.Count; index++)
{
var go = m_RaycastResults[index].graphic.gameObject;
bool appendGraphic = true;
if (ignoreReversedGraphics)
{
// If we have a camera compare the direction against the cameras forward.
var cameraFoward = ray.direction;
var dir = go.transform.rotation * Vector3.forward;
appendGraphic = Vector3.Dot(cameraFoward, dir) > 0;
}
// Ignore points behind us (can happen with a canvas pointer)
if (eventCamera.transform.InverseTransformPoint(m_RaycastResults[index].worldPos).z <= 0)
{
appendGraphic = false;
}
if (appendGraphic)
{
float distance = Vector3.Distance(ray.origin, m_RaycastResults[index].worldPos);
if (distance >= hitDistance)
{
continue;
}
var castResult = new RaycastResult
{
gameObject = go,
module = this,
distance = distance,
index = resultAppendList.Count,
depth = m_RaycastResults[index].graphic.depth,
worldPosition = m_RaycastResults[index].worldPos
};
resultAppendList.Add(castResult);
}
}
}
/// <summary>
/// Performs a raycast using eventData.worldSpaceRay
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (eventData.IsVRPointer())
{
Raycast(eventData, resultAppendList, eventData.GetRay(), true);
}
}
/// <summary>
/// Performs a raycast using the pointer object attached to this OVRRaycaster
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public void RaycastPointer(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (pointer != null && pointer.activeInHierarchy)
{
Raycast(eventData, resultAppendList,
new Ray(eventCamera.transform.position,
(pointer.transform.position - eventCamera.transform.position).normalized), false);
}
}
/// <summary>
/// Perform a raycast into the screen and collect all graphics underneath it.
/// </summary>
[NonSerialized]
static readonly List<RaycastHit> s_SortedGraphics = new List<RaycastHit>();
private void GraphicRaycast(Canvas canvas, Ray ray, List<RaycastHit> results)
{
//This function is based closely on :
// void GraphicRaycaster.Raycast(Canvas canvas, Camera eventCamera, Vector2 pointerPosition, List<Graphic> results)
// But modified to take a Ray instead of a canvas pointer, and also to explicitly ignore
// the graphic associated with the pointer
// Necessary for the event system
var foundGraphics = GraphicRegistry.GetGraphicsForCanvas(canvas);
s_SortedGraphics.Clear();
for (int i = 0; i < foundGraphics.Count; ++i)
{
Graphic graphic = foundGraphics[i];
// -1 means it hasn't been processed by the canvas, which means it isn't actually drawn
if (graphic.depth == -1 || (pointer == graphic.gameObject))
continue;
Vector3 worldPos;
if (RayIntersectsRectTransform(graphic.rectTransform, ray, out worldPos))
{
//Work out where this is on the screen for compatibility with existing Unity UI code
Vector2 screenPos = eventCamera.WorldToScreenPoint(worldPos);
// mask/image intersection - See Unity docs on eventAlphaThreshold for when this does anything
if (graphic.Raycast(screenPos, eventCamera))
{
RaycastHit hit;
hit.graphic = graphic;
hit.worldPos = worldPos;
hit.fromMouse = false;
s_SortedGraphics.Add(hit);
}
}
}
s_SortedGraphics.Sort((g1, g2) => g2.graphic.depth.CompareTo(g1.graphic.depth));
for (int i = 0; i < s_SortedGraphics.Count; ++i)
{
results.Add(s_SortedGraphics[i]);
}
}
/// <summary>
/// Get screen position of worldPosition contained in this RaycastResult
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPosition(RaycastResult raycastResult)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(raycastResult.worldPosition);
}
/// <summary>
/// Detects whether a ray intersects a RectTransform and if it does also
/// returns the world position of the intersection.
/// </summary>
/// <param name="rectTransform"></param>
/// <param name="ray"></param>
/// <param name="worldPos"></param>
/// <returns></returns>
static bool RayIntersectsRectTransform(RectTransform rectTransform, Ray ray, out Vector3 worldPos)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Plane plane = new Plane(corners[0], corners[1], corners[2]);
float enter;
if (!plane.Raycast(ray, out enter))
{
worldPos = Vector3.zero;
return false;
}
Vector3 intersection = ray.GetPoint(enter);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
float BottomDot = Vector3.Dot(intersection - corners[0], BottomEdge);
float LeftDot = Vector3.Dot(intersection - corners[0], LeftEdge);
if (BottomDot < BottomEdge.sqrMagnitude && // Can use sqrMag because BottomEdge is not normalized
LeftDot < LeftEdge.sqrMagnitude &&
BottomDot >= 0 &&
LeftDot >= 0)
{
worldPos = corners[0] + LeftDot * LeftEdge / LeftEdge.sqrMagnitude +
BottomDot * BottomEdge / BottomEdge.sqrMagnitude;
return true;
}
else
{
worldPos = Vector3.zero;
return false;
}
}
struct RaycastHit
{
public Graphic graphic;
public Vector3 worldPos;
public bool fromMouse;
};
/// <summary>
/// Is this the currently focussed Raycaster according to the InputModule
/// </summary>
/// <returns></returns>
public bool IsFocussed()
{
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
return inputModule && inputModule.activeGraphicRaycaster == this;
}
public void OnPointerEnter(PointerEventData e)
{
if (e.IsVRPointer())
{
// Gaze has entered this canvas. We'll make it the active one so that canvas-mouse pointer can be used.
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
if (inputModule != null)
{
inputModule.activeGraphicRaycaster = this;
}
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7aaf960227867044282d921171d2d7ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
internal class OVRReadOnlyAttribute : PropertyAttribute
{
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7ab0aaa927d24aa40b4e2a6dc41aaa35
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,78 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Assets.OVR.Scripts
{
public class Record
{
public int sortOrder;
public string category;
public string message;
public Record(int order, string cat, string msg)
{
sortOrder = order;
category = cat;
message = msg;
}
}
public class RangedRecord : Record
{
public float value;
public float min;
public float max;
public RangedRecord(int order, string cat, string msg, float val, float minVal, float maxVal)
: base(order, cat, msg)
{
value = val;
min = minVal;
max = maxVal;
}
}
public delegate void FixMethodDelegate(UnityEngine.Object obj, bool isLastInSet, int selectedIndex);
public class FixRecord : Record
{
public FixMethodDelegate fixMethod;
public UnityEngine.Object targetObject;
public string[] buttonNames;
public bool editModeRequired;
public bool complete;
public FixRecord(int order, string cat, string msg, FixMethodDelegate fix, UnityEngine.Object target,
bool editRequired, string[] buttons)
: base(order, cat, msg)
{
buttonNames = buttons;
fixMethod = fix;
targetObject = target;
editModeRequired = editRequired;
complete = false;
}
}
}

View File

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 63f0fe0d60ddeb54f9f43d701286af2d
timeCreated: 1520636357
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,49 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
/// <summary>
/// Allows you to reset VR input tracking with a gamepad button press.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_reset_orientation")]
public class OVRResetOrientation : MonoBehaviour
{
/// <summary>
/// The gamepad button that will reset VR input tracking.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.Y;
/// <summary>
/// Check input and reset orientation if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(resetButton))
{
//*************************
// reset orientation
//*************************
OVRManager.display.RecenterPose();
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 09bb0a17b6a704298b65be4fb08ef480
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,214 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_runtime_controller")]
public class OVRRuntimeController : MonoBehaviour
{
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
/// <summary>
/// Shader that will be used for the controller model
/// </summary>
public Shader m_controllerModelShader;
/// <summary>
/// Support render model animation
/// </summary>
public bool m_supportAnimation = true;
private GameObject m_controllerObject;
private static string leftControllerModelPath = "/model_fb/controller/left";
private static string rightControllerModelPath = "/model_fb/controller/right";
private string m_controllerModelPath;
private bool m_modelSupported = false;
private bool m_hasInputFocus = true;
private bool m_hasInputFocusPrev = false;
private bool m_controllerConnectedPrev = false;
private Dictionary<OVRGLTFInputNode, OVRGLTFAnimatinonNode> m_animationNodes;
// Start is called before the first frame update
void Start()
{
if (m_controller == OVRInput.Controller.LTouch)
m_controllerModelPath = leftControllerModelPath;
else if (m_controller == OVRInput.Controller.RTouch)
m_controllerModelPath = rightControllerModelPath;
m_modelSupported = IsModelSupported(m_controllerModelPath);
if (m_modelSupported)
{
StartCoroutine(UpdateControllerModel());
}
OVRManager.InputFocusAcquired += InputFocusAquired;
OVRManager.InputFocusLost += InputFocusLost;
}
// Update is called once per frame
void Update()
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if (m_hasInputFocus != m_hasInputFocusPrev || controllerConnected != m_controllerConnectedPrev)
{
if (m_controllerObject != null)
{
m_controllerObject.SetActive(controllerConnected && m_hasInputFocus);
}
m_hasInputFocusPrev = m_hasInputFocus;
m_controllerConnectedPrev = controllerConnected;
}
if (controllerConnected)
{
UpdateControllerAnimation();
}
}
private bool IsModelSupported(string modelPath)
{
string[] modelPaths = OVRPlugin.GetRenderModelPaths();
if (modelPaths.Length == 0)
{
Debug.LogError("Failed to enumerate model paths from the runtime. " +
"Check that the render model feature is enabled in OVRManager.");
return false;
}
for (int i = 0; i < modelPaths.Length; i++)
{
if (modelPaths[i].Equals(modelPath))
return true;
}
Debug.LogError("Render model path " + modelPath + " not supported by this device.");
return false;
}
private bool LoadControllerModel(string modelPath)
{
var modelProperties = new OVRPlugin.RenderModelProperties();
if (OVRPlugin.GetRenderModelProperties(modelPath, ref modelProperties))
{
if (modelProperties.ModelKey != OVRPlugin.RENDER_MODEL_NULL_KEY)
{
byte[] modelData = OVRPlugin.LoadRenderModel(modelProperties.ModelKey);
if (modelData != null)
{
OVRGLTFLoader loader = new OVRGLTFLoader(modelData);
loader.SetModelShader(m_controllerModelShader);
OVRGLTFScene scene = loader.LoadGLB(m_supportAnimation);
m_controllerObject = scene.root;
m_animationNodes = scene.animationNodes;
if (m_controllerObject != null)
{
m_controllerObject.transform.SetParent(transform, false);
// Apply the OpenXR grip pose offset so runtime controller models are in the right position
m_controllerObject.transform.parent.localPosition = new Vector3(0.0f, -0.03f, -0.04f);
m_controllerObject.transform.parent.localRotation =
Quaternion.AngleAxis(-60.0f, new Vector3(1.0f, 0.0f, 0.0f));
return true;
}
}
}
Debug.LogError("Retrived a null model key of " + modelPath);
}
Debug.LogError("Failed to load controller model of " + modelPath);
return false;
}
private IEnumerator UpdateControllerModel()
{
while (true)
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if (m_controllerObject == null && controllerConnected)
{
LoadControllerModel(m_controllerModelPath);
}
yield return new WaitForSeconds(.5f);
}
}
private void UpdateControllerAnimation()
{
if (m_animationNodes == null)
{
return;
}
if (m_animationNodes.ContainsKey(OVRGLTFInputNode.Button_A_X))
m_animationNodes[OVRGLTFInputNode.Button_A_X].UpdatePose(
OVRInput.Get(m_controller == OVRInput.Controller.LTouch ? OVRInput.RawButton.X : OVRInput.RawButton.A));
if (m_animationNodes.ContainsKey(OVRGLTFInputNode.Button_B_Y))
m_animationNodes[OVRGLTFInputNode.Button_B_Y].UpdatePose(
OVRInput.Get(m_controller == OVRInput.Controller.LTouch ? OVRInput.RawButton.Y : OVRInput.RawButton.B));
if (m_animationNodes.ContainsKey(OVRGLTFInputNode.Button_Oculus_Menu))
m_animationNodes[OVRGLTFInputNode.Button_Oculus_Menu].UpdatePose(
OVRInput.Get(OVRInput.RawButton.Start));
if (m_animationNodes.ContainsKey(OVRGLTFInputNode.Trigger_Grip))
m_animationNodes[OVRGLTFInputNode.Trigger_Grip].UpdatePose(
OVRInput.Get(m_controller == OVRInput.Controller.LTouch
? OVRInput.RawAxis1D.LHandTrigger
: OVRInput.RawAxis1D.RHandTrigger));
if (m_animationNodes.ContainsKey(OVRGLTFInputNode.Trigger_Front))
m_animationNodes[OVRGLTFInputNode.Trigger_Front].UpdatePose(
OVRInput.Get(m_controller == OVRInput.Controller.LTouch
? OVRInput.RawAxis1D.LIndexTrigger
: OVRInput.RawAxis1D.RIndexTrigger));
if (m_animationNodes.ContainsKey(OVRGLTFInputNode.ThumbStick))
m_animationNodes[OVRGLTFInputNode.ThumbStick].UpdatePose(
OVRInput.Get(m_controller == OVRInput.Controller.LTouch
? OVRInput.RawAxis2D.LThumbstick
: OVRInput.RawAxis2D.RThumbstick));
}
public void InputFocusAquired()
{
m_hasInputFocus = true;
}
public void InputFocusLost()
{
m_hasInputFocus = false;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ed24508d182abcf4ba02d983e302d34e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,231 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections;
/// <summary>
/// Sample that allows you to play with various VR settings.
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_scene_sample_controller")]
public class OVRSceneSampleController : MonoBehaviour
{
/// <summary>
/// The key that quits the application.
/// </summary>
public KeyCode quitKey = KeyCode.Escape;
/// <summary>
/// An optional texture that appears before the menu fades in.
/// </summary>
public Texture fadeInTexture = null;
/// <summary>
/// Controls how quickly the player's speed and rotation change based on input.
/// </summary>
public float speedRotationIncrement = 0.05f;
private OVRPlayerController playerController = null;
// Handle to OVRCameraRig
private OVRCameraRig cameraController = null;
/// <summary>
/// We can set the layer to be anything we want to, this allows
/// a specific camera to render it.
/// </summary>
public string layerName = "Default";
// Vision mode on/off
private bool visionMode = true;
// We want to hold onto GridCube, for potential sharing
// of the menu RenderTarget
OVRGridCube gridCube = null;
#if SHOW_DK2_VARIABLES
private string strVisionMode = "Vision Enabled: ON";
#endif
#region MonoBehaviour Message Handlers
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] cameraControllers;
cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (cameraControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
}
else if (cameraControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
}
else
{
cameraController = cameraControllers[0];
}
// Find player controller
OVRPlayerController[] playerControllers;
playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if (playerControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
}
else if (playerControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
}
else
{
playerController = playerControllers[0];
}
}
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
// Make sure to hide cursor
if (Application.isEditor == false)
{
Cursor.visible = false;
Cursor.lockState = CursorLockMode.Locked;
}
// CameraController updates
if (cameraController != null)
{
// Add a GridCube component to this object
gridCube = gameObject.AddComponent<OVRGridCube>();
gridCube.SetOVRCameraController(ref cameraController);
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
// Recenter pose
UpdateRecenterPose();
// Turn On/Off Vision Mode
UpdateVisionMode();
// Update Speed and Rotation Scale
if (playerController != null)
UpdateSpeedAndRotationScaleMultiplier();
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
// Toggle Fullscreen
if (Input.GetKeyDown(KeyCode.F11))
Screen.fullScreen = !Screen.fullScreen;
if (Input.GetKeyDown(KeyCode.M))
UnityEngine.XR.XRSettings.showDeviceView = !UnityEngine.XR.XRSettings.showDeviceView;
#if !UNITY_ANDROID || UNITY_EDITOR
// Escape Application
if (Input.GetKeyDown(quitKey))
Application.Quit();
#endif
#endif
}
#endregion
/// <summary>
/// Updates the vision mode.
/// </summary>
void UpdateVisionMode()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.F2))
{
visionMode ^= visionMode;
OVRManager.tracker.isEnabled = visionMode;
}
#endif
}
/// <summary>
/// Updates the speed and rotation scale multiplier.
/// </summary>
void UpdateSpeedAndRotationScaleMultiplier()
{
float moveScaleMultiplier = 0.0f;
playerController.GetMoveScaleMultiplier(ref moveScaleMultiplier);
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.Alpha7))
{
moveScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha8))
{
moveScaleMultiplier += speedRotationIncrement;
}
#endif
playerController.SetMoveScaleMultiplier(moveScaleMultiplier);
float rotationScaleMultiplier = 0.0f;
playerController.GetRotationScaleMultiplier(ref rotationScaleMultiplier);
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.Alpha9))
{
rotationScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha0))
{
rotationScaleMultiplier += speedRotationIncrement;
}
#endif
playerController.SetRotationScaleMultiplier(rotationScaleMultiplier);
}
/// <summary>
/// Recenter pose
/// </summary>
void UpdateRecenterPose()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.R))
OVRManager.display.RecenterPose();
#endif
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4f07515ada089df47868559a20dd6783
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,241 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using UnityEngine;
using System.Collections; // required for Coroutines
/// <summary>
/// Fades the screen from black after a new scene is loaded. Fade can also be controlled mid-scene using SetUIFade and SetFadeLevel
/// </summary>
[HelpURL("https://developer.oculus.com/reference/unity/latest/class_o_v_r_screen_fade")]
public class OVRScreenFade : MonoBehaviour
{
public static OVRScreenFade instance { get; private set; }
[Tooltip("Fade duration")]
public float fadeTime = 2.0f;
[Tooltip("Screen color at maximum fade")]
public Color fadeColor = new Color(0.01f, 0.01f, 0.01f, 1.0f);
public bool fadeOnStart = true;
/// <summary>
/// The render queue used by the fade mesh. Reduce this if you need to render on top of it.
/// </summary>
public int renderQueue = 5000;
/// <summary>
/// Renders the current alpha value being used to fade the screen.
/// </summary>
public float currentAlpha
{
get { return Mathf.Max(explicitFadeAlpha, animatedFadeAlpha, uiFadeAlpha); }
}
private float explicitFadeAlpha = 0.0f;
private float animatedFadeAlpha = 0.0f;
private float uiFadeAlpha = 0.0f;
private MeshRenderer fadeRenderer;
private MeshFilter fadeMesh;
private Material fadeMaterial = null;
private bool isFading = false;
/// <summary>
/// Automatically starts a fade in
/// </summary>
void Start()
{
if (gameObject.name.StartsWith("OculusMRC_"))
{
Destroy(this);
return;
}
// create the fade material
fadeMaterial = new Material(Shader.Find("Oculus/Unlit Transparent Color"));
fadeMesh = gameObject.AddComponent<MeshFilter>();
fadeRenderer = gameObject.AddComponent<MeshRenderer>();
var mesh = new Mesh();
fadeMesh.mesh = mesh;
Vector3[] vertices = new Vector3[4];
float width = 2f;
float height = 2f;
float depth = 1f;
vertices[0] = new Vector3(-width, -height, depth);
vertices[1] = new Vector3(width, -height, depth);
vertices[2] = new Vector3(-width, height, depth);
vertices[3] = new Vector3(width, height, depth);
mesh.vertices = vertices;
int[] tri = new int[6];
tri[0] = 0;
tri[1] = 2;
tri[2] = 1;
tri[3] = 2;
tri[4] = 3;
tri[5] = 1;
mesh.triangles = tri;
Vector3[] normals = new Vector3[4];
normals[0] = -Vector3.forward;
normals[1] = -Vector3.forward;
normals[2] = -Vector3.forward;
normals[3] = -Vector3.forward;
mesh.normals = normals;
Vector2[] uv = new Vector2[4];
uv[0] = new Vector2(0, 0);
uv[1] = new Vector2(1, 0);
uv[2] = new Vector2(0, 1);
uv[3] = new Vector2(1, 1);
mesh.uv = uv;
explicitFadeAlpha = 0.0f;
animatedFadeAlpha = 0.0f;
uiFadeAlpha = 0.0f;
if (fadeOnStart)
{
FadeIn();
}
instance = this;
}
/// <summary>
/// Start a fade in
/// </summary>
public void FadeIn()
{
StartCoroutine(Fade(1.0f, 0.0f));
}
/// <summary>
/// Start a fade out
/// </summary>
public void FadeOut()
{
StartCoroutine(Fade(0, 1));
}
/// <summary>
/// Starts a fade in when a new level is loaded
/// </summary>
void OnLevelFinishedLoading(int level)
{
FadeIn();
}
void OnEnable()
{
if (!fadeOnStart)
{
explicitFadeAlpha = 0.0f;
animatedFadeAlpha = 0.0f;
uiFadeAlpha = 0.0f;
}
}
/// <summary>
/// Cleans up the fade material
/// </summary>
void OnDestroy()
{
instance = null;
if (fadeRenderer != null)
Destroy(fadeRenderer);
if (fadeMaterial != null)
Destroy(fadeMaterial);
if (fadeMesh != null)
Destroy(fadeMesh);
}
/// <summary>
/// Set the UI fade level - fade due to UI in foreground
/// </summary>
public void SetUIFade(float level)
{
uiFadeAlpha = Mathf.Clamp01(level);
SetMaterialAlpha();
}
/// <summary>
/// Override current fade level
/// </summary>
/// <param name="level"></param>
public void SetExplicitFade(float level)
{
explicitFadeAlpha = level;
SetMaterialAlpha();
}
/// <summary>
/// Fades alpha from 1.0 to 0.0
/// </summary>
IEnumerator Fade(float startAlpha, float endAlpha)
{
float elapsedTime = 0.0f;
while (elapsedTime < fadeTime)
{
elapsedTime += Time.deltaTime;
animatedFadeAlpha = Mathf.Lerp(startAlpha, endAlpha, Mathf.Clamp01(elapsedTime / fadeTime));
SetMaterialAlpha();
yield return new WaitForEndOfFrame();
}
animatedFadeAlpha = endAlpha;
SetMaterialAlpha();
}
/// <summary>
/// Update material alpha. UI fade and the current fade due to fade in/out animations (or explicit control)
/// both affect the fade. (The max is taken)
/// </summary>
private void SetMaterialAlpha()
{
Color color = fadeColor;
color.a = currentAlpha;
isFading = color.a > 0;
if (fadeMaterial != null)
{
fadeMaterial.color = color;
fadeMaterial.renderQueue = renderQueue;
fadeRenderer.material = fadeMaterial;
fadeRenderer.enabled = isFading;
}
}
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: df8e1d778abf442e4bec449c360e9e1c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: -100
icon: {instanceID: 0}
userData:

View File

@ -0,0 +1,967 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Assertions;
public class OVRSkeleton : MonoBehaviour
{
public interface IOVRSkeletonDataProvider
{
SkeletonType GetSkeletonType();
SkeletonPoseData GetSkeletonPoseData();
bool enabled { get; }
}
public struct SkeletonPoseData
{
public OVRPlugin.Posef RootPose { get; set; }
public float RootScale { get; set; }
public OVRPlugin.Quatf[] BoneRotations { get; set; }
public bool IsDataValid { get; set; }
public bool IsDataHighConfidence { get; set; }
public OVRPlugin.Vector3f[] BoneTranslations { get; set; }
public int SkeletonChangedCount { get; set; }
}
public enum SkeletonType
{
None = OVRPlugin.SkeletonType.None,
HandLeft = OVRPlugin.SkeletonType.HandLeft,
HandRight = OVRPlugin.SkeletonType.HandRight,
Body = OVRPlugin.SkeletonType.Body,
}
public enum BoneId
{
Invalid = OVRPlugin.BoneId.Invalid,
// hand bones
Hand_Start = OVRPlugin.BoneId.Hand_Start,
Hand_WristRoot = OVRPlugin.BoneId.Hand_WristRoot, // root frame of the hand, where the wrist is located
Hand_ForearmStub = OVRPlugin.BoneId.Hand_ForearmStub, // frame for user's forearm
Hand_Thumb0 = OVRPlugin.BoneId.Hand_Thumb0, // thumb trapezium bone
Hand_Thumb1 = OVRPlugin.BoneId.Hand_Thumb1, // thumb metacarpal bone
Hand_Thumb2 = OVRPlugin.BoneId.Hand_Thumb2, // thumb proximal phalange bone
Hand_Thumb3 = OVRPlugin.BoneId.Hand_Thumb3, // thumb distal phalange bone
Hand_Index1 = OVRPlugin.BoneId.Hand_Index1, // index proximal phalange bone
Hand_Index2 = OVRPlugin.BoneId.Hand_Index2, // index intermediate phalange bone
Hand_Index3 = OVRPlugin.BoneId.Hand_Index3, // index distal phalange bone
Hand_Middle1 = OVRPlugin.BoneId.Hand_Middle1, // middle proximal phalange bone
Hand_Middle2 = OVRPlugin.BoneId.Hand_Middle2, // middle intermediate phalange bone
Hand_Middle3 = OVRPlugin.BoneId.Hand_Middle3, // middle distal phalange bone
Hand_Ring1 = OVRPlugin.BoneId.Hand_Ring1, // ring proximal phalange bone
Hand_Ring2 = OVRPlugin.BoneId.Hand_Ring2, // ring intermediate phalange bone
Hand_Ring3 = OVRPlugin.BoneId.Hand_Ring3, // ring distal phalange bone
Hand_Pinky0 = OVRPlugin.BoneId.Hand_Pinky0, // pinky metacarpal bone
Hand_Pinky1 = OVRPlugin.BoneId.Hand_Pinky1, // pinky proximal phalange bone
Hand_Pinky2 = OVRPlugin.BoneId.Hand_Pinky2, // pinky intermediate phalange bone
Hand_Pinky3 = OVRPlugin.BoneId.Hand_Pinky3, // pinky distal phalange bone
Hand_MaxSkinnable = OVRPlugin.BoneId.Hand_MaxSkinnable,
// Bone tips are position only. They are not used for skinning but are useful for hit-testing.
// NOTE: Hand_ThumbTip == Hand_MaxSkinnable since the extended tips need to be contiguous
Hand_ThumbTip = OVRPlugin.BoneId.Hand_ThumbTip, // tip of the thumb
Hand_IndexTip = OVRPlugin.BoneId.Hand_IndexTip, // tip of the index finger
Hand_MiddleTip = OVRPlugin.BoneId.Hand_MiddleTip, // tip of the middle finger
Hand_RingTip = OVRPlugin.BoneId.Hand_RingTip, // tip of the ring finger
Hand_PinkyTip = OVRPlugin.BoneId.Hand_PinkyTip, // tip of the pinky
Hand_End = OVRPlugin.BoneId.Hand_End,
// body bones
Body_Start = OVRPlugin.BoneId.Body_Start,
Body_Root = OVRPlugin.BoneId.Body_Root,
Body_Hips = OVRPlugin.BoneId.Body_Hips,
Body_SpineLower = OVRPlugin.BoneId.Body_SpineLower,
Body_SpineMiddle = OVRPlugin.BoneId.Body_SpineMiddle,
Body_SpineUpper = OVRPlugin.BoneId.Body_SpineUpper,
Body_Chest = OVRPlugin.BoneId.Body_Chest,
Body_Neck = OVRPlugin.BoneId.Body_Neck,
Body_Head = OVRPlugin.BoneId.Body_Head,
Body_LeftShoulder = OVRPlugin.BoneId.Body_LeftShoulder,
Body_LeftScapula = OVRPlugin.BoneId.Body_LeftScapula,
Body_LeftArmUpper = OVRPlugin.BoneId.Body_LeftArmUpper,
Body_LeftArmLower = OVRPlugin.BoneId.Body_LeftArmLower,
Body_LeftHandWristTwist = OVRPlugin.BoneId.Body_LeftHandWristTwist,
Body_RightShoulder = OVRPlugin.BoneId.Body_RightShoulder,
Body_RightScapula = OVRPlugin.BoneId.Body_RightScapula,
Body_RightArmUpper = OVRPlugin.BoneId.Body_RightArmUpper,
Body_RightArmLower = OVRPlugin.BoneId.Body_RightArmLower,
Body_RightHandWristTwist = OVRPlugin.BoneId.Body_RightHandWristTwist,
Body_LeftHandPalm = OVRPlugin.BoneId.Body_LeftHandPalm,
Body_LeftHandWrist = OVRPlugin.BoneId.Body_LeftHandWrist,
Body_LeftHandThumbMetacarpal = OVRPlugin.BoneId.Body_LeftHandThumbMetacarpal,
Body_LeftHandThumbProximal = OVRPlugin.BoneId.Body_LeftHandThumbProximal,
Body_LeftHandThumbDistal = OVRPlugin.BoneId.Body_LeftHandThumbDistal,
Body_LeftHandThumbTip = OVRPlugin.BoneId.Body_LeftHandThumbTip,
Body_LeftHandIndexMetacarpal = OVRPlugin.BoneId.Body_LeftHandIndexMetacarpal,
Body_LeftHandIndexProximal = OVRPlugin.BoneId.Body_LeftHandIndexProximal,
Body_LeftHandIndexIntermediate = OVRPlugin.BoneId.Body_LeftHandIndexIntermediate,
Body_LeftHandIndexDistal = OVRPlugin.BoneId.Body_LeftHandIndexDistal,
Body_LeftHandIndexTip = OVRPlugin.BoneId.Body_LeftHandIndexTip,
Body_LeftHandMiddleMetacarpal = OVRPlugin.BoneId.Body_LeftHandMiddleMetacarpal,
Body_LeftHandMiddleProximal = OVRPlugin.BoneId.Body_LeftHandMiddleProximal,
Body_LeftHandMiddleIntermediate = OVRPlugin.BoneId.Body_LeftHandMiddleIntermediate,
Body_LeftHandMiddleDistal = OVRPlugin.BoneId.Body_LeftHandMiddleDistal,
Body_LeftHandMiddleTip = OVRPlugin.BoneId.Body_LeftHandMiddleTip,
Body_LeftHandRingMetacarpal = OVRPlugin.BoneId.Body_LeftHandRingMetacarpal,
Body_LeftHandRingProximal = OVRPlugin.BoneId.Body_LeftHandRingProximal,
Body_LeftHandRingIntermediate = OVRPlugin.BoneId.Body_LeftHandRingIntermediate,
Body_LeftHandRingDistal = OVRPlugin.BoneId.Body_LeftHandRingDistal,
Body_LeftHandRingTip = OVRPlugin.BoneId.Body_LeftHandRingTip,
Body_LeftHandLittleMetacarpal = OVRPlugin.BoneId.Body_LeftHandLittleMetacarpal,
Body_LeftHandLittleProximal = OVRPlugin.BoneId.Body_LeftHandLittleProximal,
Body_LeftHandLittleIntermediate = OVRPlugin.BoneId.Body_LeftHandLittleIntermediate,
Body_LeftHandLittleDistal = OVRPlugin.BoneId.Body_LeftHandLittleDistal,
Body_LeftHandLittleTip = OVRPlugin.BoneId.Body_LeftHandLittleTip,
Body_RightHandPalm = OVRPlugin.BoneId.Body_RightHandPalm,
Body_RightHandWrist = OVRPlugin.BoneId.Body_RightHandWrist,
Body_RightHandThumbMetacarpal = OVRPlugin.BoneId.Body_RightHandThumbMetacarpal,
Body_RightHandThumbProximal = OVRPlugin.BoneId.Body_RightHandThumbProximal,
Body_RightHandThumbDistal = OVRPlugin.BoneId.Body_RightHandThumbDistal,
Body_RightHandThumbTip = OVRPlugin.BoneId.Body_RightHandThumbTip,
Body_RightHandIndexMetacarpal = OVRPlugin.BoneId.Body_RightHandIndexMetacarpal,
Body_RightHandIndexProximal = OVRPlugin.BoneId.Body_RightHandIndexProximal,
Body_RightHandIndexIntermediate = OVRPlugin.BoneId.Body_RightHandIndexIntermediate,
Body_RightHandIndexDistal = OVRPlugin.BoneId.Body_RightHandIndexDistal,
Body_RightHandIndexTip = OVRPlugin.BoneId.Body_RightHandIndexTip,
Body_RightHandMiddleMetacarpal = OVRPlugin.BoneId.Body_RightHandMiddleMetacarpal,
Body_RightHandMiddleProximal = OVRPlugin.BoneId.Body_RightHandMiddleProximal,
Body_RightHandMiddleIntermediate = OVRPlugin.BoneId.Body_RightHandMiddleIntermediate,
Body_RightHandMiddleDistal = OVRPlugin.BoneId.Body_RightHandMiddleDistal,
Body_RightHandMiddleTip = OVRPlugin.BoneId.Body_RightHandMiddleTip,
Body_RightHandRingMetacarpal = OVRPlugin.BoneId.Body_RightHandRingMetacarpal,
Body_RightHandRingProximal = OVRPlugin.BoneId.Body_RightHandRingProximal,
Body_RightHandRingIntermediate = OVRPlugin.BoneId.Body_RightHandRingIntermediate,
Body_RightHandRingDistal = OVRPlugin.BoneId.Body_RightHandRingDistal,
Body_RightHandRingTip = OVRPlugin.BoneId.Body_RightHandRingTip,
Body_RightHandLittleMetacarpal = OVRPlugin.BoneId.Body_RightHandLittleMetacarpal,
Body_RightHandLittleProximal = OVRPlugin.BoneId.Body_RightHandLittleProximal,
Body_RightHandLittleIntermediate = OVRPlugin.BoneId.Body_RightHandLittleIntermediate,
Body_RightHandLittleDistal = OVRPlugin.BoneId.Body_RightHandLittleDistal,
Body_RightHandLittleTip = OVRPlugin.BoneId.Body_RightHandLittleTip,
Body_End = OVRPlugin.BoneId.Body_End,
// add new bones here
Max = OVRPlugin.BoneId.Max,
}
[SerializeField]
protected SkeletonType _skeletonType = SkeletonType.None;
[SerializeField]
private IOVRSkeletonDataProvider _dataProvider;
[SerializeField]
private bool _updateRootPose = false;
[SerializeField]
private bool _updateRootScale = false;
[SerializeField]
private bool _enablePhysicsCapsules = false;
[SerializeField]
private bool _applyBoneTranslations = true;
private GameObject _bonesGO;
private GameObject _bindPosesGO;
private GameObject _capsulesGO;
protected List<OVRBone> _bones;
private List<OVRBone> _bindPoses;
private List<OVRBoneCapsule> _capsules;
protected OVRPlugin.Skeleton2 _skeleton = new OVRPlugin.Skeleton2();
private readonly Quaternion wristFixupRotation = new Quaternion(0.0f, 1.0f, 0.0f, 0.0f);
public bool IsInitialized { get; private set; }
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public IList<OVRBone> Bones { get; protected set; }
public IList<OVRBone> BindPoses { get; private set; }
public IList<OVRBoneCapsule> Capsules { get; private set; }
public SkeletonType GetSkeletonType()
{
return _skeletonType;
}
internal virtual void SetSkeletonType(SkeletonType type)
{
_skeletonType = type;
}
public bool IsValidBone(BoneId bone)
{
return OVRPlugin.IsValidBone((OVRPlugin.BoneId)bone, (OVRPlugin.SkeletonType)_skeletonType);
}
public int SkeletonChangedCount { get; private set; }
protected virtual void Awake()
{
if (_dataProvider == null)
{
var foundDataProvider = SearchSkeletonDataProvider();
if (foundDataProvider != null)
{
_dataProvider = foundDataProvider;
if (_dataProvider is MonoBehaviour mb)
{
Debug.Log($"Found IOVRSkeletonDataProvider reference in {mb.name} due to unassigned field.");
}
}
}
_bones = new List<OVRBone>();
Bones = _bones.AsReadOnly();
_bindPoses = new List<OVRBone>();
BindPoses = _bindPoses.AsReadOnly();
_capsules = new List<OVRBoneCapsule>();
Capsules = _capsules.AsReadOnly();
}
internal IOVRSkeletonDataProvider SearchSkeletonDataProvider()
{
var dataProviders = gameObject.GetComponentsInParent<IOVRSkeletonDataProvider>();
foreach (var dataProvider in dataProviders)
{
if (dataProvider.GetSkeletonType() == _skeletonType)
{
return dataProvider;
}
}
return null;
}
/// <summary>
/// Start this instance.
/// Initialize data structures.
/// </summary>
protected virtual void Start()
{
if (_dataProvider == null && _skeletonType == SkeletonType.Body)
{
Debug.LogWarning("OVRSkeleton and its subclasses requires OVRBody to function.");
}
if (ShouldInitialize())
{
Initialize();
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
if (_dataProvider != null && !_dataProvider.enabled)
{
return false;
}
if (_skeletonType == SkeletonType.None)
{
return false;
}
else if (IsHandSkeleton(_skeletonType))
{
#if UNITY_EDITOR
return OVRInput.IsControllerConnected(OVRInput.Controller.Hands);
#else
return true;
#endif
}
else
{
return true;
}
}
private void Initialize()
{
if (OVRPlugin.GetSkeleton2((OVRPlugin.SkeletonType)_skeletonType, ref _skeleton))
{
InitializeBones();
InitializeBindPose();
InitializeCapsules();
IsInitialized = true;
}
}
protected virtual Transform GetBoneTransform(BoneId boneId) => null;
protected virtual void InitializeBones()
{
bool flipX = IsHandSkeleton(_skeletonType);
if (!_bonesGO)
{
_bonesGO = new GameObject("Bones");
_bonesGO.transform.SetParent(transform, false);
_bonesGO.transform.localPosition = Vector3.zero;
_bonesGO.transform.localRotation = Quaternion.identity;
}
if (_bones == null || _bones.Count != _skeleton.NumBones)
{
_bones = new List<OVRBone>(new OVRBone[_skeleton.NumBones]);
Bones = _bones.AsReadOnly();
}
bool newBonesCreated = false;
// pre-populate bones list before attempting to apply bone hierarchy
for (int i = 0; i < _bones.Count; ++i)
{
OVRBone bone = _bones[i] ?? (_bones[i] = new OVRBone());
bone.Id = (OVRSkeleton.BoneId)_skeleton.Bones[i].Id;
bone.ParentBoneIndex = _skeleton.Bones[i].ParentBoneIndex;
Assert.IsTrue((int)bone.Id >= 0 && bone.Id <= BoneId.Max);
// don't create new bones each time; rely on
// pre-existing bone transforms.
if (bone.Transform == null)
{
newBonesCreated = true;
bone.Transform = GetBoneTransform(bone.Id);
if (bone.Transform == null)
{
bone.Transform = new GameObject(BoneLabelFromBoneId(_skeletonType, bone.Id)).transform;
}
}
// if allocated bone here before, make sure the name is correct.
if (GetBoneTransform(bone.Id) == null)
{
bone.Transform.name = BoneLabelFromBoneId(_skeletonType, bone.Id);
}
var pose = _skeleton.Bones[i].Pose;
if (_applyBoneTranslations)
{
bone.Transform.localPosition = flipX
? pose.Position.FromFlippedXVector3f()
: pose.Position.FromFlippedZVector3f();
}
bone.Transform.localRotation = flipX
? pose.Orientation.FromFlippedXQuatf()
: pose.Orientation.FromFlippedZQuatf();
}
if (newBonesCreated)
{
for (int i = 0; i < _bones.Count; ++i)
{
if (!IsValidBone((BoneId)_bones[i].ParentBoneIndex) ||
IsBodySkeleton(_skeletonType)) // Body bones are always in tracking space
{
_bones[i].Transform.SetParent(_bonesGO.transform, false);
}
else
{
_bones[i].Transform.SetParent(_bones[_bones[i].ParentBoneIndex].Transform, false);
}
}
}
}
private void InitializeBindPose()
{
if (!_bindPosesGO)
{
_bindPosesGO = new GameObject("BindPoses");
_bindPosesGO.transform.SetParent(transform, false);
_bindPosesGO.transform.localPosition = Vector3.zero;
_bindPosesGO.transform.localRotation = Quaternion.identity;
}
if (_bindPoses == null || _bindPoses.Count != _bones.Count)
{
_bindPoses = new List<OVRBone>(new OVRBone[_bones.Count]);
BindPoses = _bindPoses.AsReadOnly();
}
// pre-populate bones list before attempting to apply bone hierarchy
for (int i = 0; i < _bindPoses.Count; ++i)
{
OVRBone bone = _bones[i];
OVRBone bindPoseBone = _bindPoses[i] ?? (_bindPoses[i] = new OVRBone());
bindPoseBone.Id = bone.Id;
bindPoseBone.ParentBoneIndex = bone.ParentBoneIndex;
Transform trans = bindPoseBone.Transform
? bindPoseBone.Transform
: (bindPoseBone.Transform =
new GameObject(BoneLabelFromBoneId(_skeletonType, bindPoseBone.Id)).transform);
trans.localPosition = bone.Transform.localPosition;
trans.localRotation = bone.Transform.localRotation;
}
for (int i = 0; i < _bindPoses.Count; ++i)
{
if (!IsValidBone((BoneId)_bindPoses[i].ParentBoneIndex) ||
IsBodySkeleton(_skeletonType)) // Body bones are always in tracking space
{
_bindPoses[i].Transform.SetParent(_bindPosesGO.transform, false);
}
else
{
_bindPoses[i].Transform.SetParent(_bindPoses[_bindPoses[i].ParentBoneIndex].Transform, false);
}
}
}
private void InitializeCapsules()
{
bool flipX = IsHandSkeleton(_skeletonType);
if (_enablePhysicsCapsules)
{
if (!_capsulesGO)
{
_capsulesGO = new GameObject("Capsules");
_capsulesGO.transform.SetParent(transform, false);
_capsulesGO.transform.localPosition = Vector3.zero;
_capsulesGO.transform.localRotation = Quaternion.identity;
}
if (_capsules == null || _capsules.Count != _skeleton.NumBoneCapsules)
{
_capsules = new List<OVRBoneCapsule>(new OVRBoneCapsule[_skeleton.NumBoneCapsules]);
Capsules = _capsules.AsReadOnly();
}
for (int i = 0; i < _capsules.Count; ++i)
{
OVRBone bone = _bones[_skeleton.BoneCapsules[i].BoneIndex];
OVRBoneCapsule capsule = _capsules[i] ?? (_capsules[i] = new OVRBoneCapsule());
capsule.BoneIndex = _skeleton.BoneCapsules[i].BoneIndex;
if (capsule.CapsuleRigidbody == null)
{
capsule.CapsuleRigidbody =
new GameObject(BoneLabelFromBoneId(_skeletonType, bone.Id) + "_CapsuleRigidbody")
.AddComponent<Rigidbody>();
capsule.CapsuleRigidbody.mass = 1.0f;
capsule.CapsuleRigidbody.isKinematic = true;
capsule.CapsuleRigidbody.useGravity = false;
capsule.CapsuleRigidbody.collisionDetectionMode = CollisionDetectionMode.ContinuousSpeculative;
}
GameObject rbGO = capsule.CapsuleRigidbody.gameObject;
rbGO.transform.SetParent(_capsulesGO.transform, false);
rbGO.transform.position = bone.Transform.position;
rbGO.transform.rotation = bone.Transform.rotation;
if (capsule.CapsuleCollider == null)
{
capsule.CapsuleCollider =
new GameObject(BoneLabelFromBoneId(_skeletonType, bone.Id) + "_CapsuleCollider")
.AddComponent<CapsuleCollider>();
capsule.CapsuleCollider.isTrigger = false;
}
var p0 = flipX
? _skeleton.BoneCapsules[i].StartPoint.FromFlippedXVector3f()
: _skeleton.BoneCapsules[i].StartPoint.FromFlippedZVector3f();
var p1 = flipX
? _skeleton.BoneCapsules[i].EndPoint.FromFlippedXVector3f()
: _skeleton.BoneCapsules[i].EndPoint.FromFlippedZVector3f();
var delta = p1 - p0;
var mag = delta.magnitude;
var rot = Quaternion.FromToRotation(Vector3.right, delta);
capsule.CapsuleCollider.radius = _skeleton.BoneCapsules[i].Radius;
capsule.CapsuleCollider.height = mag + _skeleton.BoneCapsules[i].Radius * 2.0f;
capsule.CapsuleCollider.direction = 0;
capsule.CapsuleCollider.center = Vector3.right * mag * 0.5f;
GameObject ccGO = capsule.CapsuleCollider.gameObject;
ccGO.transform.SetParent(rbGO.transform, false);
ccGO.transform.localPosition = p0;
ccGO.transform.localRotation = rot;
}
}
}
protected virtual void Update()
{
UpdateSkeleton();
}
protected void UpdateSkeleton()
{
if (ShouldInitialize())
{
Initialize();
}
if (!IsInitialized || _dataProvider == null)
{
IsDataValid = false;
IsDataHighConfidence = false;
return;
}
var data = _dataProvider.GetSkeletonPoseData();
IsDataValid = data.IsDataValid;
if (!data.IsDataValid)
{
return;
}
if (SkeletonChangedCount != data.SkeletonChangedCount)
{
SkeletonChangedCount = data.SkeletonChangedCount;
IsInitialized = false;
Initialize();
}
IsDataHighConfidence = data.IsDataHighConfidence;
if (_updateRootPose)
{
transform.localPosition = data.RootPose.Position.FromFlippedZVector3f();
transform.localRotation = data.RootPose.Orientation.FromFlippedZQuatf();
}
if (_updateRootScale)
{
transform.localScale = new Vector3(data.RootScale, data.RootScale, data.RootScale);
}
for (var i = 0; i < _bones.Count; ++i)
{
var boneTransform = _bones[i].Transform;
if (boneTransform == null) continue;
if (IsBodySkeleton(_skeletonType))
{
boneTransform.localPosition = data.BoneTranslations[i].FromFlippedZVector3f();
boneTransform.localRotation = data.BoneRotations[i].FromFlippedZQuatf();
}
else if (IsHandSkeleton(_skeletonType))
{
boneTransform.localRotation = data.BoneRotations[i].FromFlippedXQuatf();
if (_bones[i].Id == BoneId.Hand_WristRoot)
{
boneTransform.localRotation *= wristFixupRotation;
}
}
else
{
boneTransform.localRotation = data.BoneRotations[i].FromFlippedZQuatf();
}
}
}
private void FixedUpdate()
{
if (!IsInitialized || _dataProvider == null)
{
IsDataValid = false;
IsDataHighConfidence = false;
return;
}
Update();
if (_enablePhysicsCapsules)
{
var data = _dataProvider.GetSkeletonPoseData();
IsDataValid = data.IsDataValid;
IsDataHighConfidence = data.IsDataHighConfidence;
for (int i = 0; i < _capsules.Count; ++i)
{
OVRBoneCapsule capsule = _capsules[i];
var capsuleGO = capsule.CapsuleRigidbody.gameObject;
if (data.IsDataValid && data.IsDataHighConfidence)
{
Transform bone = _bones[(int)capsule.BoneIndex].Transform;
if (capsuleGO.activeSelf)
{
capsule.CapsuleRigidbody.MovePosition(bone.position);
capsule.CapsuleRigidbody.MoveRotation(bone.rotation);
}
else
{
capsuleGO.SetActive(true);
capsule.CapsuleRigidbody.position = bone.position;
capsule.CapsuleRigidbody.rotation = bone.rotation;
}
}
else
{
if (capsuleGO.activeSelf)
{
capsuleGO.SetActive(false);
}
}
}
}
}
public BoneId GetCurrentStartBoneId()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return BoneId.Hand_Start;
case SkeletonType.Body:
return BoneId.Body_Start;
case SkeletonType.None:
default:
return BoneId.Invalid;
}
}
public BoneId GetCurrentEndBoneId()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return BoneId.Hand_End;
case SkeletonType.Body:
return BoneId.Body_End;
case SkeletonType.None:
default:
return BoneId.Invalid;
}
}
private BoneId GetCurrentMaxSkinnableBoneId()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return BoneId.Hand_MaxSkinnable;
case SkeletonType.Body:
return BoneId.Body_End;
case SkeletonType.None:
default:
return BoneId.Invalid;
}
}
public int GetCurrentNumBones()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
case SkeletonType.Body:
return GetCurrentEndBoneId() - GetCurrentStartBoneId();
case SkeletonType.None:
default:
return 0;
}
}
public int GetCurrentNumSkinnableBones()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
case SkeletonType.Body:
return GetCurrentMaxSkinnableBoneId() - GetCurrentStartBoneId();
case SkeletonType.None:
default:
return 0;
}
}
// force aliased enum values to the more appropriate value
public static string BoneLabelFromBoneId(OVRSkeleton.SkeletonType skeletonType, BoneId boneId)
{
if (skeletonType == OVRSkeleton.SkeletonType.Body)
{
switch (boneId)
{
case BoneId.Body_Root:
return "Body_Root";
case BoneId.Body_Hips:
return "Body_Hips";
case BoneId.Body_SpineLower:
return "Body_SpineLower";
case BoneId.Body_SpineMiddle:
return "Body_SpineMiddle";
case BoneId.Body_SpineUpper:
return "Body_SpineUpper";
case BoneId.Body_Chest:
return "Body_Chest";
case BoneId.Body_Neck:
return "Body_Neck";
case BoneId.Body_Head:
return "Body_Head";
case BoneId.Body_LeftShoulder:
return "Body_LeftShoulder";
case BoneId.Body_LeftScapula:
return "Body_LeftScapula";
case BoneId.Body_LeftArmUpper:
return "Body_LeftArmUpper";
case BoneId.Body_LeftArmLower:
return "Body_LeftArmLower";
case BoneId.Body_LeftHandWristTwist:
return "Body_LeftHandWristTwist";
case BoneId.Body_RightShoulder:
return "Body_RightShoulder";
case BoneId.Body_RightScapula:
return "Body_RightScapula";
case BoneId.Body_RightArmUpper:
return "Body_RightArmUpper";
case BoneId.Body_RightArmLower:
return "Body_RightArmLower";
case BoneId.Body_RightHandWristTwist:
return "Body_RightHandWristTwist";
case BoneId.Body_LeftHandPalm:
return "Body_LeftHandPalm";
case BoneId.Body_LeftHandWrist:
return "Body_LeftHandWrist";
case BoneId.Body_LeftHandThumbMetacarpal:
return "Body_LeftHandThumbMetacarpal";
case BoneId.Body_LeftHandThumbProximal:
return "Body_LeftHandThumbProximal";
case BoneId.Body_LeftHandThumbDistal:
return "Body_LeftHandThumbDistal";
case BoneId.Body_LeftHandThumbTip:
return "Body_LeftHandThumbTip";
case BoneId.Body_LeftHandIndexMetacarpal:
return "Body_LeftHandIndexMetacarpal";
case BoneId.Body_LeftHandIndexProximal:
return "Body_LeftHandIndexProximal";
case BoneId.Body_LeftHandIndexIntermediate:
return "Body_LeftHandIndexIntermediate";
case BoneId.Body_LeftHandIndexDistal:
return "Body_LeftHandIndexDistal";
case BoneId.Body_LeftHandIndexTip:
return "Body_LeftHandIndexTip";
case BoneId.Body_LeftHandMiddleMetacarpal:
return "Body_LeftHandMiddleMetacarpal";
case BoneId.Body_LeftHandMiddleProximal:
return "Body_LeftHandMiddleProximal";
case BoneId.Body_LeftHandMiddleIntermediate:
return "Body_LeftHandMiddleIntermediate";
case BoneId.Body_LeftHandMiddleDistal:
return "Body_LeftHandMiddleDistal";
case BoneId.Body_LeftHandMiddleTip:
return "Body_LeftHandMiddleTip";
case BoneId.Body_LeftHandRingMetacarpal:
return "Body_LeftHandRingMetacarpal";
case BoneId.Body_LeftHandRingProximal:
return "Body_LeftHandRingProximal";
case BoneId.Body_LeftHandRingIntermediate:
return "Body_LeftHandRingIntermediate";
case BoneId.Body_LeftHandRingDistal:
return "Body_LeftHandRingDistal";
case BoneId.Body_LeftHandRingTip:
return "Body_LeftHandRingTip";
case BoneId.Body_LeftHandLittleMetacarpal:
return "Body_LeftHandLittleMetacarpal";
case BoneId.Body_LeftHandLittleProximal:
return "Body_LeftHandLittleProximal";
case BoneId.Body_LeftHandLittleIntermediate:
return "Body_LeftHandLittleIntermediate";
case BoneId.Body_LeftHandLittleDistal:
return "Body_LeftHandLittleDistal";
case BoneId.Body_LeftHandLittleTip:
return "Body_LeftHandLittleTip";
case BoneId.Body_RightHandPalm:
return "Body_RightHandPalm";
case BoneId.Body_RightHandWrist:
return "Body_RightHandWrist";
case BoneId.Body_RightHandThumbMetacarpal:
return "Body_RightHandThumbMetacarpal";
case BoneId.Body_RightHandThumbProximal:
return "Body_RightHandThumbProximal";
case BoneId.Body_RightHandThumbDistal:
return "Body_RightHandThumbDistal";
case BoneId.Body_RightHandThumbTip:
return "Body_RightHandThumbTip";
case BoneId.Body_RightHandIndexMetacarpal:
return "Body_RightHandIndexMetacarpal";
case BoneId.Body_RightHandIndexProximal:
return "Body_RightHandIndexProximal";
case BoneId.Body_RightHandIndexIntermediate:
return "Body_RightHandIndexIntermediate";
case BoneId.Body_RightHandIndexDistal:
return "Body_RightHandIndexDistal";
case BoneId.Body_RightHandIndexTip:
return "Body_RightHandIndexTip";
case BoneId.Body_RightHandMiddleMetacarpal:
return "Body_RightHandMiddleMetacarpal";
case BoneId.Body_RightHandMiddleProximal:
return "Body_RightHandMiddleProximal";
case BoneId.Body_RightHandMiddleIntermediate:
return "Body_RightHandMiddleIntermediate";
case BoneId.Body_RightHandMiddleDistal:
return "Body_RightHandMiddleDistal";
case BoneId.Body_RightHandMiddleTip:
return "Body_RightHandMiddleTip";
case BoneId.Body_RightHandRingMetacarpal:
return "Body_RightHandRingMetacarpal";
case BoneId.Body_RightHandRingProximal:
return "Body_RightHandRingProximal";
case BoneId.Body_RightHandRingIntermediate:
return "Body_RightHandRingIntermediate";
case BoneId.Body_RightHandRingDistal:
return "Body_RightHandRingDistal";
case BoneId.Body_RightHandRingTip:
return "Body_RightHandRingTip";
case BoneId.Body_RightHandLittleMetacarpal:
return "Body_RightHandLittleMetacarpal";
case BoneId.Body_RightHandLittleProximal:
return "Body_RightHandLittleProximal";
case BoneId.Body_RightHandLittleIntermediate:
return "Body_RightHandLittleIntermediate";
case BoneId.Body_RightHandLittleDistal:
return "Body_RightHandLittleDistal";
case BoneId.Body_RightHandLittleTip:
return "Body_RightHandLittleTip";
default:
return "Body_Unknown";
}
}
else if (IsHandSkeleton(skeletonType))
{
switch (boneId)
{
case OVRSkeleton.BoneId.Hand_WristRoot:
return "Hand_WristRoot";
case OVRSkeleton.BoneId.Hand_ForearmStub:
return "Hand_ForearmStub";
case OVRSkeleton.BoneId.Hand_Thumb0:
return "Hand_Thumb0";
case OVRSkeleton.BoneId.Hand_Thumb1:
return "Hand_Thumb1";
case OVRSkeleton.BoneId.Hand_Thumb2:
return "Hand_Thumb2";
case OVRSkeleton.BoneId.Hand_Thumb3:
return "Hand_Thumb3";
case OVRSkeleton.BoneId.Hand_Index1:
return "Hand_Index1";
case OVRSkeleton.BoneId.Hand_Index2:
return "Hand_Index2";
case OVRSkeleton.BoneId.Hand_Index3:
return "Hand_Index3";
case OVRSkeleton.BoneId.Hand_Middle1:
return "Hand_Middle1";
case OVRSkeleton.BoneId.Hand_Middle2:
return "Hand_Middle2";
case OVRSkeleton.BoneId.Hand_Middle3:
return "Hand_Middle3";
case OVRSkeleton.BoneId.Hand_Ring1:
return "Hand_Ring1";
case OVRSkeleton.BoneId.Hand_Ring2:
return "Hand_Ring2";
case OVRSkeleton.BoneId.Hand_Ring3:
return "Hand_Ring3";
case OVRSkeleton.BoneId.Hand_Pinky0:
return "Hand_Pinky0";
case OVRSkeleton.BoneId.Hand_Pinky1:
return "Hand_Pinky1";
case OVRSkeleton.BoneId.Hand_Pinky2:
return "Hand_Pinky2";
case OVRSkeleton.BoneId.Hand_Pinky3:
return "Hand_Pinky3";
case OVRSkeleton.BoneId.Hand_ThumbTip:
return "Hand_ThumbTip";
case OVRSkeleton.BoneId.Hand_IndexTip:
return "Hand_IndexTip";
case OVRSkeleton.BoneId.Hand_MiddleTip:
return "Hand_MiddleTip";
case OVRSkeleton.BoneId.Hand_RingTip:
return "Hand_RingTip";
case OVRSkeleton.BoneId.Hand_PinkyTip:
return "Hand_PinkyTip";
default:
return "Hand_Unknown";
}
}
else
{
return "Skeleton_Unknown";
}
}
internal static bool IsBodySkeleton(SkeletonType type) => type == SkeletonType.Body;
private static bool IsHandSkeleton(SkeletonType type) =>
type == SkeletonType.HandLeft || type == SkeletonType.HandRight;
}
public class OVRBone
{
public OVRSkeleton.BoneId Id { get; set; }
public short ParentBoneIndex { get; set; }
public Transform Transform { get; set; }
public OVRBone()
{
}
public OVRBone(OVRSkeleton.BoneId id, short parentBoneIndex, Transform trans)
{
Id = id;
ParentBoneIndex = parentBoneIndex;
Transform = trans;
}
}
public class OVRBoneCapsule
{
public short BoneIndex { get; set; }
public Rigidbody CapsuleRigidbody { get; set; }
public CapsuleCollider CapsuleCollider { get; set; }
public OVRBoneCapsule()
{
}
public OVRBoneCapsule(short boneIndex, Rigidbody capsuleRigidBody, CapsuleCollider capsuleCollider)
{
BoneIndex = boneIndex;
CapsuleRigidbody = capsuleRigidBody;
CapsuleCollider = capsuleCollider;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2609c54f376cffc4da1ab9401cc1a36f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: -80
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,388 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRSkeletonRenderer : MonoBehaviour
{
public interface IOVRSkeletonRendererDataProvider
{
SkeletonRendererData GetSkeletonRendererData();
}
public struct SkeletonRendererData
{
public float RootScale { get; set; }
public bool IsDataValid { get; set; }
public bool IsDataHighConfidence { get; set; }
public bool ShouldUseSystemGestureMaterial { get; set; }
}
public enum ConfidenceBehavior
{
None,
ToggleRenderer,
}
public enum SystemGestureBehavior
{
None,
SwapMaterial,
}
[SerializeField]
private IOVRSkeletonRendererDataProvider _dataProvider;
[SerializeField]
private ConfidenceBehavior _confidenceBehavior = ConfidenceBehavior.ToggleRenderer;
[SerializeField]
private SystemGestureBehavior _systemGestureBehavior = SystemGestureBehavior.SwapMaterial;
[SerializeField]
private bool _renderPhysicsCapsules = false;
[SerializeField]
private Material _skeletonMaterial;
private Material _skeletonDefaultMaterial;
[SerializeField]
private Material _capsuleMaterial;
private Material _capsuleDefaultMaterial;
[SerializeField]
private Material _systemGestureMaterial = null;
private Material _systemGestureDefaultMaterial;
private const float LINE_RENDERER_WIDTH = 0.005f;
private List<BoneVisualization> _boneVisualizations;
private List<CapsuleVisualization> _capsuleVisualizations;
private OVRSkeleton _ovrSkeleton;
private GameObject _skeletonGO;
private float _scale;
private static readonly Quaternion _capsuleRotationOffset = Quaternion.Euler(0, 0, 90);
public bool IsInitialized { get; private set; }
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public bool ShouldUseSystemGestureMaterial { get; private set; }
private class BoneVisualization
{
private GameObject BoneGO;
private Transform BoneBegin;
private Transform BoneEnd;
private LineRenderer Line;
private Material RenderMaterial;
private Material SystemGestureMaterial;
public BoneVisualization(GameObject rootGO,
Material renderMat,
Material systemGestureMat,
float scale,
Transform begin,
Transform end)
{
RenderMaterial = renderMat;
SystemGestureMaterial = systemGestureMat;
BoneBegin = begin;
BoneEnd = end;
BoneGO = new GameObject(begin.name);
BoneGO.transform.SetParent(rootGO.transform, false);
Line = BoneGO.AddComponent<LineRenderer>();
Line.sharedMaterial = RenderMaterial;
Line.useWorldSpace = true;
Line.positionCount = 2;
Line.SetPosition(0, BoneBegin.position);
Line.SetPosition(1, BoneEnd.position);
Line.startWidth = LINE_RENDERER_WIDTH * scale;
Line.endWidth = LINE_RENDERER_WIDTH * scale;
}
public void Update(float scale,
bool shouldRender,
bool shouldUseSystemGestureMaterial,
ConfidenceBehavior confidenceBehavior,
SystemGestureBehavior systemGestureBehavior)
{
Line.SetPosition(0, BoneBegin.position);
Line.SetPosition(1, BoneEnd.position);
Line.startWidth = LINE_RENDERER_WIDTH * scale;
Line.endWidth = LINE_RENDERER_WIDTH * scale;
if (confidenceBehavior == ConfidenceBehavior.ToggleRenderer)
{
Line.enabled = shouldRender;
}
if (systemGestureBehavior == SystemGestureBehavior.SwapMaterial)
{
if (shouldUseSystemGestureMaterial && Line.sharedMaterial != SystemGestureMaterial)
{
Line.sharedMaterial = SystemGestureMaterial;
}
else if (!shouldUseSystemGestureMaterial && Line.sharedMaterial != RenderMaterial)
{
Line.sharedMaterial = RenderMaterial;
}
}
}
}
private class CapsuleVisualization
{
private GameObject CapsuleGO;
private OVRBoneCapsule BoneCapsule;
private Vector3 capsuleScale;
private MeshRenderer Renderer;
private Material RenderMaterial;
private Material SystemGestureMaterial;
public CapsuleVisualization(GameObject rootGO,
Material renderMat,
Material systemGestureMat,
float scale,
OVRBoneCapsule boneCapsule)
{
RenderMaterial = renderMat;
SystemGestureMaterial = systemGestureMat;
BoneCapsule = boneCapsule;
CapsuleGO = GameObject.CreatePrimitive(PrimitiveType.Capsule);
CapsuleCollider collider = CapsuleGO.GetComponent<CapsuleCollider>();
Destroy(collider);
Renderer = CapsuleGO.GetComponent<MeshRenderer>();
Renderer.sharedMaterial = RenderMaterial;
capsuleScale = Vector3.one;
capsuleScale.y = boneCapsule.CapsuleCollider.height / 2;
capsuleScale.x = boneCapsule.CapsuleCollider.radius * 2;
capsuleScale.z = boneCapsule.CapsuleCollider.radius * 2;
CapsuleGO.transform.localScale = capsuleScale * scale;
}
public void Update(float scale,
bool shouldRender,
bool shouldUseSystemGestureMaterial,
ConfidenceBehavior confidenceBehavior,
SystemGestureBehavior systemGestureBehavior)
{
if (confidenceBehavior == ConfidenceBehavior.ToggleRenderer)
{
if (CapsuleGO.activeSelf != shouldRender)
{
CapsuleGO.SetActive(shouldRender);
}
}
CapsuleGO.transform.rotation = BoneCapsule.CapsuleCollider.transform.rotation * _capsuleRotationOffset;
CapsuleGO.transform.position =
BoneCapsule.CapsuleCollider.transform.TransformPoint(BoneCapsule.CapsuleCollider.center);
CapsuleGO.transform.localScale = capsuleScale * scale;
if (systemGestureBehavior == SystemGestureBehavior.SwapMaterial)
{
if (shouldUseSystemGestureMaterial && Renderer.sharedMaterial != SystemGestureMaterial)
{
Renderer.sharedMaterial = SystemGestureMaterial;
}
else if (!shouldUseSystemGestureMaterial && Renderer.sharedMaterial != RenderMaterial)
{
Renderer.sharedMaterial = RenderMaterial;
}
}
}
}
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRSkeletonRendererDataProvider>();
}
if (_ovrSkeleton == null)
{
_ovrSkeleton = GetComponent<OVRSkeleton>();
}
}
private void Start()
{
if (_ovrSkeleton == null)
{
this.enabled = false;
return;
}
if (ShouldInitialize())
{
Initialize();
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
return _ovrSkeleton.IsInitialized;
}
private void Initialize()
{
_boneVisualizations = new List<BoneVisualization>();
_capsuleVisualizations = new List<CapsuleVisualization>();
_ovrSkeleton = GetComponent<OVRSkeleton>();
_skeletonGO = new GameObject("SkeletonRenderer");
_skeletonGO.transform.SetParent(transform, false);
if (_skeletonMaterial == null)
{
_skeletonDefaultMaterial = new Material(Shader.Find("Diffuse"));
_skeletonMaterial = _skeletonDefaultMaterial;
}
if (_capsuleMaterial == null)
{
_capsuleDefaultMaterial = new Material(Shader.Find("Diffuse"));
_capsuleMaterial = _capsuleDefaultMaterial;
}
if (_systemGestureMaterial == null)
{
_systemGestureDefaultMaterial = new Material(Shader.Find("Diffuse"));
_systemGestureDefaultMaterial.color = Color.blue;
_systemGestureMaterial = _systemGestureDefaultMaterial;
}
if (_ovrSkeleton.IsInitialized)
{
for (int i = 0; i < _ovrSkeleton.Bones.Count; i++)
{
var boneVis = new BoneVisualization(
_skeletonGO,
_skeletonMaterial,
_systemGestureMaterial,
_scale,
_ovrSkeleton.Bones[i].Transform,
_ovrSkeleton.Bones[i].Transform.parent);
_boneVisualizations.Add(boneVis);
}
if (_renderPhysicsCapsules && _ovrSkeleton.Capsules != null)
{
for (int i = 0; i < _ovrSkeleton.Capsules.Count; i++)
{
var capsuleVis = new CapsuleVisualization(
_skeletonGO,
_capsuleMaterial,
_systemGestureMaterial,
_scale,
_ovrSkeleton.Capsules[i]);
_capsuleVisualizations.Add(capsuleVis);
}
}
IsInitialized = true;
}
}
public void Update()
{
#if UNITY_EDITOR
if (ShouldInitialize())
{
Initialize();
}
#endif
IsDataValid = false;
IsDataHighConfidence = false;
ShouldUseSystemGestureMaterial = false;
if (IsInitialized)
{
bool shouldRender = false;
if (_dataProvider != null)
{
var data = _dataProvider.GetSkeletonRendererData();
IsDataValid = data.IsDataValid;
IsDataHighConfidence = data.IsDataHighConfidence;
ShouldUseSystemGestureMaterial = data.ShouldUseSystemGestureMaterial;
shouldRender = data.IsDataValid && data.IsDataHighConfidence;
if (data.IsDataValid)
{
_scale = data.RootScale;
}
}
for (int i = 0; i < _boneVisualizations.Count; i++)
{
_boneVisualizations[i].Update(_scale, shouldRender, ShouldUseSystemGestureMaterial, _confidenceBehavior,
_systemGestureBehavior);
}
for (int i = 0; i < _capsuleVisualizations.Count; i++)
{
_capsuleVisualizations[i].Update(_scale, shouldRender, ShouldUseSystemGestureMaterial,
_confidenceBehavior, _systemGestureBehavior);
}
}
}
private void OnDestroy()
{
if (_skeletonDefaultMaterial != null)
{
DestroyImmediate(_skeletonDefaultMaterial, false);
}
if (_capsuleDefaultMaterial != null)
{
DestroyImmediate(_capsuleDefaultMaterial, false);
}
if (_systemGestureDefaultMaterial != null)
{
DestroyImmediate(_systemGestureDefaultMaterial, false);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 54c16b381e28e8d479237771d234dbae
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: -70
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,302 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRSpectatorModeDomeTest : MonoBehaviour
{
bool inited = false;
public Camera defaultExternalCamera;
OVRPlugin.Fovf defaultFov;
public Transform SpectatorAnchor;
public Transform Head;
#if OVR_ANDROID_MRC
private OVRPlugin.Media.PlatformCameraMode camMode = OVRPlugin.Media.PlatformCameraMode.Disabled;
private bool readyToSwitch = false;
private Transform SpectatorCamera;
// Dome sphere representation
private float distance = 0.8f;
private float elevation = 0.0f;
private float polar = 90.0f;
private const float distance_near = 0.5f;
private const float distance_far = 1.2f;
private const float elevationLimit = 30.0f;
#endif
// Start is called before the first frame update
void Awake()
{
#if OVR_ANDROID_MRC
OVRPlugin.Media.SetPlatformInitialized();
SpectatorCamera = defaultExternalCamera.transform.parent;
#endif
}
// Use this for initialization
void Start()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!defaultExternalCamera)
{
Debug.LogWarning("defaultExternalCamera undefined");
}
#if !OVR_ANDROID_MRC
// On Quest, we enable MRC automatically through the configuration
if (!OVRManager.instance.enableMixedReality)
{
OVRManager.instance.enableMixedReality = true;
}
#endif
#endif
}
void Initialize()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (inited)
return;
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
return;
#else
if (!OVRPlugin.IsMixedRealityInitialized())
return;
#endif
OVRPlugin.ResetDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount before adding manual external camera {0}",
OVRPlugin.GetExternalCameraCount());
UpdateDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount after adding manual external camera {0}",
OVRPlugin.GetExternalCameraCount());
// obtain default FOV
{
OVRPlugin.CameraIntrinsics cameraIntrinsics;
OVRPlugin.CameraExtrinsics cameraExtrinsics;
OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics);
defaultFov = cameraIntrinsics.FOVPort;
}
inited = true;
#if OVR_ANDROID_MRC
readyToSwitch = true;
#endif
#endif
}
void UpdateDefaultExternalCamera()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
// always build a 1080p external camera
const int cameraPixelWidth = 1920;
const int cameraPixelHeight = 1080;
const float cameraAspect = (float)cameraPixelWidth / cameraPixelHeight;
string cameraName = "UnityExternalCamera";
OVRPlugin.CameraIntrinsics cameraIntrinsics = new OVRPlugin.CameraIntrinsics();
OVRPlugin.CameraExtrinsics cameraExtrinsics = new OVRPlugin.CameraExtrinsics();
// intrinsics
cameraIntrinsics.IsValid = OVRPlugin.Bool.True;
cameraIntrinsics.LastChangedTimeSeconds = Time.time;
float vFov = defaultExternalCamera.fieldOfView * Mathf.Deg2Rad;
float hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * cameraAspect) * 2.0f;
OVRPlugin.Fovf fov = new OVRPlugin.Fovf();
fov.UpTan = fov.DownTan = Mathf.Tan(vFov * 0.5f);
fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
cameraIntrinsics.FOVPort = fov;
cameraIntrinsics.VirtualNearPlaneDistanceMeters = defaultExternalCamera.nearClipPlane;
cameraIntrinsics.VirtualFarPlaneDistanceMeters = defaultExternalCamera.farClipPlane;
cameraIntrinsics.ImageSensorPixelResolution.w = cameraPixelWidth;
cameraIntrinsics.ImageSensorPixelResolution.h = cameraPixelHeight;
// extrinsics
cameraExtrinsics.IsValid = OVRPlugin.Bool.True;
cameraExtrinsics.LastChangedTimeSeconds = Time.time;
cameraExtrinsics.CameraStatusData = OVRPlugin.CameraStatus.CameraStatus_Calibrated;
cameraExtrinsics.AttachedToNode = OVRPlugin.Node.None;
Camera mainCamera = Camera.main;
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig)
{
Transform trackingSpace = cameraRig.trackingSpace;
OVRPose trackingSpacePose = trackingSpace.ToOVRPose(false);
OVRPose cameraPose = defaultExternalCamera.transform.ToOVRPose(false);
OVRPose relativePose = trackingSpacePose.Inverse() * cameraPose;
#if OVR_ANDROID_MRC
OVRPose stageToLocalPose =
OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
relativePose = localToStagePose * relativePose;
#endif
cameraExtrinsics.RelativePose = relativePose.ToPosef();
}
else
{
cameraExtrinsics.RelativePose = OVRPlugin.Posef.identity;
}
if (!OVRPlugin.SetDefaultExternalCamera(cameraName, ref cameraIntrinsics, ref cameraExtrinsics))
{
Debug.LogError("SetDefaultExternalCamera() failed");
}
#endif
}
private void UpdateSpectatorCameraStatus()
{
#if OVR_ANDROID_MRC
// Trigger to switch between 1st person and spectator mode during casting to phone
if (OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger) || OVRInput.GetDown(OVRInput.Button.SecondaryIndexTrigger))
{
camMode = OVRPlugin.Media.GetPlatformCameraMode();
if (camMode == OVRPlugin.Media.PlatformCameraMode.Disabled && readyToSwitch)
{
OVRPlugin.Media.SetMrcFrameImageFlipped(false);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Initialized);
StartCoroutine(TimerCoroutine());
}
if (camMode == OVRPlugin.Media.PlatformCameraMode.Initialized && readyToSwitch)
{
OVRPlugin.Media.SetMrcFrameImageFlipped(true);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Disabled);
StartCoroutine(TimerCoroutine());
}
}
// Keep spectator camera on dome surface
Vector2 axis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (Mathf.Abs(axis.x) > 0.2f)
{
polar = polar - axis.x * 0.5f;
}
if (Mathf.Abs(axis.y) > 0.2f)
{
elevation = elevation + axis.y * 0.5f;
if (elevation < -90.0f + elevationLimit) elevation = -90.0f + elevationLimit;
if (elevation > 90.0f) elevation = 90.0f;
}
axis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
if (Mathf.Abs(axis.y) > 0.1f)
{
distance = axis.y * 0.05f + distance;
if (distance > distance_far) distance = distance_far;
if (distance < distance_near) distance = distance_near;
}
SpectatorCamera.position = SpectatorCameraDomePosition(SpectatorAnchor.position, distance, elevation, polar);
SpectatorCamera.rotation = Quaternion.LookRotation(SpectatorCamera.position - SpectatorAnchor.position);
Head.position = SpectatorAnchor.position;
Head.rotation = SpectatorAnchor.rotation;
#endif
}
Vector3 SpectatorCameraDomePosition(Vector3 spectatorAnchorPosition, float d, float e, float p)
{
float x = d * Mathf.Cos(Mathf.Deg2Rad * e) * Mathf.Cos(Mathf.Deg2Rad * p);
float y = d * Mathf.Sin(Mathf.Deg2Rad * e);
float z = d * Mathf.Cos(Mathf.Deg2Rad * e) * Mathf.Sin(Mathf.Deg2Rad * p);
return new Vector3(x + spectatorAnchorPosition.x, y + spectatorAnchorPosition.y, z + spectatorAnchorPosition.z);
}
IEnumerator TimerCoroutine()
{
#if OVR_ANDROID_MRC
readyToSwitch = false;
#endif
yield return new WaitForSeconds(2);
#if OVR_ANDROID_MRC
readyToSwitch = true;
#endif
}
// Update is called once per frame
void Update()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!inited)
{
Initialize();
return;
}
if (!defaultExternalCamera)
{
return;
}
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
{
return;
}
#else
if (!OVRPlugin.IsMixedRealityInitialized())
{
return;
}
#endif
UpdateSpectatorCameraStatus();
UpdateDefaultExternalCamera();
OVRPlugin.OverrideExternalCameraFov(0, false, new OVRPlugin.Fovf());
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
#endif
}
void OnApplicationPause()
{
#if OVR_ANDROID_MRC
OVRPlugin.Media.SetMrcFrameImageFlipped(true);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Disabled);
#endif
}
void OnApplicationQuit()
{
#if OVR_ANDROID_MRC
OVRPlugin.Media.SetMrcFrameImageFlipped(true);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Disabled);
#endif
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c4f8903397a67414fbb142fa1bfacede
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,342 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using UnityEngine;
using UnityEngine.Networking;
using Debug = UnityEngine.Debug;
public class OVRSystemPerfMetrics
{
public const int TcpListeningPort = 32419;
public const int PayloadTypeMetrics = 100;
public const int MaxBufferLength = 65536;
public const int MaxMessageLength = MaxBufferLength - sizeof(int);
public class PerfMetrics
{
public int frameCount;
public float frameTime;
public float deltaFrameTime;
public bool appCpuTime_IsValid;
public float appCpuTime;
public bool appGpuTime_IsValid;
public float appGpuTime;
public bool compositorCpuTime_IsValid;
public float compositorCpuTime;
public bool compositorGpuTime_IsValid;
public float compositorGpuTime;
public bool compositorDroppedFrameCount_IsValid;
public int compositorDroppedFrameCount;
public bool compositorSpaceWarpMode_IsValid;
public int compositorSpaceWarpMode;
public bool systemGpuUtilPercentage_IsValid;
public float systemGpuUtilPercentage;
public bool systemCpuUtilAveragePercentage_IsValid;
public float systemCpuUtilAveragePercentage;
public bool systemCpuUtilWorstPercentage_IsValid;
public float systemCpuUtilWorstPercentage;
public bool deviceCpuClockFrequencyInMHz_IsValid;
public float deviceCpuClockFrequencyInMHz;
public bool deviceGpuClockFrequencyInMHz_IsValid;
public float deviceGpuClockFrequencyInMHz;
public bool deviceCpuClockLevel_IsValid;
public int deviceCpuClockLevel;
public bool deviceGpuClockLevel_IsValid;
public int deviceGpuClockLevel;
public bool[] deviceCpuCoreUtilPercentage_IsValid = new bool[OVRPlugin.MAX_CPU_CORES];
public float[] deviceCpuCoreUtilPercentage = new float[OVRPlugin.MAX_CPU_CORES];
public string ToJSON()
{
OVRSimpleJSON.JSONObject jsonNode = new OVRSimpleJSON.JSONObject();
jsonNode.Add("frameCount", new OVRSimpleJSON.JSONNumber(frameCount));
jsonNode.Add("frameTime", new OVRSimpleJSON.JSONNumber(frameTime));
jsonNode.Add("deltaFrameTime", new OVRSimpleJSON.JSONNumber(deltaFrameTime));
if (appCpuTime_IsValid)
{
jsonNode.Add("appCpuTime", new OVRSimpleJSON.JSONNumber(appCpuTime));
}
if (appGpuTime_IsValid)
{
jsonNode.Add("appGpuTime", new OVRSimpleJSON.JSONNumber(appGpuTime));
}
if (compositorCpuTime_IsValid)
{
jsonNode.Add("compositorCpuTime", new OVRSimpleJSON.JSONNumber(compositorCpuTime));
}
if (compositorGpuTime_IsValid)
{
jsonNode.Add("compositorGpuTime", new OVRSimpleJSON.JSONNumber(compositorGpuTime));
}
if (compositorDroppedFrameCount_IsValid)
{
jsonNode.Add("compositorDroppedFrameCount", new OVRSimpleJSON.JSONNumber(compositorDroppedFrameCount));
}
if (compositorSpaceWarpMode_IsValid)
{
jsonNode.Add("compositorSpaceWarpMode", new OVRSimpleJSON.JSONNumber(compositorSpaceWarpMode));
}
if (systemGpuUtilPercentage_IsValid)
{
jsonNode.Add("systemGpuUtilPercentage", new OVRSimpleJSON.JSONNumber(systemGpuUtilPercentage));
}
if (systemCpuUtilAveragePercentage_IsValid)
{
jsonNode.Add("systemCpuUtilAveragePercentage",
new OVRSimpleJSON.JSONNumber(systemCpuUtilAveragePercentage));
}
if (systemCpuUtilWorstPercentage_IsValid)
{
jsonNode.Add("systemCpuUtilWorstPercentage",
new OVRSimpleJSON.JSONNumber(systemCpuUtilWorstPercentage));
}
if (deviceCpuClockFrequencyInMHz_IsValid)
{
jsonNode.Add("deviceCpuClockFrequencyInMHz",
new OVRSimpleJSON.JSONNumber(deviceCpuClockFrequencyInMHz));
}
if (deviceGpuClockFrequencyInMHz_IsValid)
{
jsonNode.Add("deviceGpuClockFrequencyInMHz",
new OVRSimpleJSON.JSONNumber(deviceGpuClockFrequencyInMHz));
}
if (deviceCpuClockLevel_IsValid)
{
jsonNode.Add("deviceCpuClockLevel", new OVRSimpleJSON.JSONNumber(deviceCpuClockLevel));
}
if (deviceGpuClockLevel_IsValid)
{
jsonNode.Add("deviceGpuClockLevel", new OVRSimpleJSON.JSONNumber(deviceGpuClockLevel));
}
for (int i = 0; i < OVRPlugin.MAX_CPU_CORES; i++)
{
if (deviceCpuCoreUtilPercentage_IsValid[i])
jsonNode.Add("deviceCpuCore" + i + "UtilPercentage",
new OVRSimpleJSON.JSONNumber(deviceCpuCoreUtilPercentage[i]));
}
string str = jsonNode.ToString();
return str;
}
public bool LoadFromJSON(string json)
{
OVRSimpleJSON.JSONObject jsonNode = OVRSimpleJSON.JSONObject.Parse(json) as OVRSimpleJSON.JSONObject;
if (jsonNode == null)
{
return false;
}
frameCount = jsonNode["frameCount"] != null ? jsonNode["frameCount"].AsInt : 0;
frameTime = jsonNode["frameTime"] != null ? jsonNode["frameTime"].AsFloat : 0;
deltaFrameTime = jsonNode["deltaFrameTime"] != null ? jsonNode["deltaFrameTime"].AsFloat : 0;
appCpuTime_IsValid = jsonNode["appCpuTime"] != null;
appCpuTime = appCpuTime_IsValid ? jsonNode["appCpuTime"].AsFloat : 0;
appGpuTime_IsValid = jsonNode["appGpuTime"] != null;
appGpuTime = appGpuTime_IsValid ? jsonNode["appGpuTime"].AsFloat : 0;
compositorCpuTime_IsValid = jsonNode["compositorCpuTime"] != null;
compositorCpuTime = compositorCpuTime_IsValid ? jsonNode["compositorCpuTime"].AsFloat : 0;
compositorGpuTime_IsValid = jsonNode["compositorGpuTime"] != null;
compositorGpuTime = compositorGpuTime_IsValid ? jsonNode["compositorGpuTime"].AsFloat : 0;
compositorDroppedFrameCount_IsValid = jsonNode["compositorDroppedFrameCount"] != null;
compositorDroppedFrameCount =
compositorDroppedFrameCount_IsValid ? jsonNode["compositorDroppedFrameCount"].AsInt : 0;
compositorSpaceWarpMode_IsValid = jsonNode["compositorSpaceWarpMode"] != null;
compositorSpaceWarpMode = compositorSpaceWarpMode_IsValid ? jsonNode["compositorSpaceWarpMode"].AsInt : 0;
systemGpuUtilPercentage_IsValid = jsonNode["systemGpuUtilPercentage"] != null;
systemGpuUtilPercentage = systemGpuUtilPercentage_IsValid ? jsonNode["systemGpuUtilPercentage"].AsFloat : 0;
systemCpuUtilAveragePercentage_IsValid = jsonNode["systemCpuUtilAveragePercentage"] != null;
systemCpuUtilAveragePercentage = systemCpuUtilAveragePercentage_IsValid
? jsonNode["systemCpuUtilAveragePercentage"].AsFloat
: 0;
systemCpuUtilWorstPercentage_IsValid = jsonNode["systemCpuUtilWorstPercentage"] != null;
systemCpuUtilWorstPercentage = systemCpuUtilWorstPercentage_IsValid
? jsonNode["systemCpuUtilWorstPercentage"].AsFloat
: 0;
deviceCpuClockFrequencyInMHz_IsValid = jsonNode["deviceCpuClockFrequencyInMHz"] != null;
deviceCpuClockFrequencyInMHz = deviceCpuClockFrequencyInMHz_IsValid
? jsonNode["deviceCpuClockFrequencyInMHz"].AsFloat
: 0;
deviceGpuClockFrequencyInMHz_IsValid = jsonNode["deviceGpuClockFrequencyInMHz"] != null;
deviceGpuClockFrequencyInMHz = deviceGpuClockFrequencyInMHz_IsValid
? jsonNode["deviceGpuClockFrequencyInMHz"].AsFloat
: 0;
deviceCpuClockLevel_IsValid = jsonNode["deviceCpuClockLevel"] != null;
deviceCpuClockLevel = deviceCpuClockLevel_IsValid ? jsonNode["deviceCpuClockLevel"].AsInt : 0;
deviceGpuClockLevel_IsValid = jsonNode["deviceGpuClockLevel"] != null;
deviceGpuClockLevel = deviceGpuClockLevel_IsValid ? jsonNode["deviceGpuClockLevel"].AsInt : 0;
for (int i = 0; i < OVRPlugin.MAX_CPU_CORES; i++)
{
deviceCpuCoreUtilPercentage_IsValid[i] = jsonNode["deviceCpuCore" + i + "UtilPercentage"] != null;
deviceCpuCoreUtilPercentage[i] = deviceCpuCoreUtilPercentage_IsValid[i]
? jsonNode["deviceCpuCore" + i + "UtilPercentage"].AsFloat
: 0;
}
return true;
}
}
public class OVRSystemPerfMetricsTcpServer : MonoBehaviour
{
public static OVRSystemPerfMetricsTcpServer singleton = null;
private OVRNetwork.OVRNetworkTcpServer tcpServer = new OVRNetwork.OVRNetworkTcpServer();
public int listeningPort = OVRSystemPerfMetrics.TcpListeningPort;
void OnEnable()
{
if (singleton != null)
{
Debug.LogError("Mutiple OVRSystemPerfMetricsTcpServer exists");
return;
}
else
{
singleton = this;
}
if (Application.isEditor)
{
Application.runInBackground = true;
}
tcpServer.StartListening(listeningPort);
}
void OnDisable()
{
tcpServer.StopListening();
singleton = null;
Debug.Log("[OVRSystemPerfMetricsTcpServer] server destroyed");
}
private void Update()
{
if (tcpServer.HasConnectedClient())
{
PerfMetrics metrics = GatherPerfMetrics();
string json = metrics.ToJSON();
byte[] bytes = Encoding.UTF8.GetBytes(json);
tcpServer.Broadcast(OVRSystemPerfMetrics.PayloadTypeMetrics, bytes);
}
}
public PerfMetrics GatherPerfMetrics()
{
PerfMetrics metrics = new PerfMetrics();
metrics.frameCount = Time.frameCount;
metrics.frameTime = Time.unscaledTime;
metrics.deltaFrameTime = Time.unscaledDeltaTime;
float? floatValue;
int? intValue;
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.App_CpuTime_Float);
metrics.appCpuTime_IsValid = floatValue.HasValue;
metrics.appCpuTime = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.App_GpuTime_Float);
metrics.appGpuTime_IsValid = floatValue.HasValue;
metrics.appGpuTime = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Compositor_CpuTime_Float);
metrics.compositorCpuTime_IsValid = floatValue.HasValue;
metrics.compositorCpuTime = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Compositor_GpuTime_Float);
metrics.compositorGpuTime_IsValid = floatValue.HasValue;
metrics.compositorGpuTime = floatValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Compositor_DroppedFrameCount_Int);
metrics.compositorDroppedFrameCount_IsValid = intValue.HasValue;
metrics.compositorDroppedFrameCount = intValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Compositor_SpaceWarp_Mode_Int);
metrics.compositorSpaceWarpMode_IsValid = intValue.HasValue;
metrics.compositorSpaceWarpMode = intValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.System_GpuUtilPercentage_Float);
metrics.systemGpuUtilPercentage_IsValid = floatValue.HasValue;
metrics.systemGpuUtilPercentage = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.System_CpuUtilAveragePercentage_Float);
metrics.systemCpuUtilAveragePercentage_IsValid = floatValue.HasValue;
metrics.systemCpuUtilAveragePercentage = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.System_CpuUtilWorstPercentage_Float);
metrics.systemCpuUtilWorstPercentage_IsValid = floatValue.HasValue;
metrics.systemCpuUtilWorstPercentage = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Device_CpuClockFrequencyInMHz_Float);
metrics.deviceCpuClockFrequencyInMHz_IsValid = floatValue.HasValue;
metrics.deviceCpuClockFrequencyInMHz = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Device_GpuClockFrequencyInMHz_Float);
metrics.deviceGpuClockFrequencyInMHz_IsValid = floatValue.HasValue;
metrics.deviceGpuClockFrequencyInMHz = floatValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Device_CpuClockLevel_Int);
metrics.deviceCpuClockLevel_IsValid = intValue.HasValue;
metrics.deviceCpuClockLevel = intValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Device_GpuClockLevel_Int);
metrics.deviceGpuClockLevel_IsValid = intValue.HasValue;
metrics.deviceGpuClockLevel = intValue.GetValueOrDefault();
for (int i = 0; i < OVRPlugin.MAX_CPU_CORES; i++)
{
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Device_CpuCore0UtilPercentage_Float);
metrics.deviceCpuCoreUtilPercentage_IsValid[i] = floatValue.HasValue;
metrics.deviceCpuCoreUtilPercentage[i] = floatValue.GetValueOrDefault();
}
return metrics;
}
}
}

Some files were not shown because too many files have changed in this diff Show More