Initialer Upload neues Unity-Projekt

This commit is contained in:
Daniel Ocks
2025-07-03 11:02:29 +02:00
commit 27d6b94b7c
8167 changed files with 1116569 additions and 0 deletions

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a5e535a2ca4f1fe488b8e9bc5958d726
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: d2668e173130e8d448b53fd765d6ff11
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,58 @@
using System;
using Convai.Scripts.Utils;
using UnityEngine;
#if ENABLE_INPUT_SYSTEM
using UnityEngine.InputSystem;
#endif
/// <summary>
/// Controls player input to trigger a notification if there is no active NPC available for conversation.
/// </summary>
public class ActiveNPCChecker : MonoBehaviour
{
#if ENABLE_INPUT_SYSTEM
/// <summary>
/// Subscribes to the talk key input action when the script starts.
/// </summary>
private void Start()
{
ConvaiInputManager.Instance.GetTalkKeyAction().started += ConvaiInputManager_TalkKeyActionStarted;
}
/// <summary>
/// Unsubscribes from the talk key input action when the script is destroyed.
/// </summary>
private void OnDestroy()
{
ConvaiInputManager.Instance.GetTalkKeyAction().started -= ConvaiInputManager_TalkKeyActionStarted;
}
/// <summary>
/// Handles the talk key action and triggers a notification if no active NPC is available.
/// </summary>
/// <param name="input">The input context of the talk key action.</param>
private void ConvaiInputManager_TalkKeyActionStarted(InputAction.CallbackContext input)
{
try
{
if (!input.action.WasPressedThisFrame() || UIUtilities.IsAnyInputFieldFocused() || ConvaiNPCManager.Instance.activeConvaiNPC == null ||
ConvaiNPCManager.Instance.CheckForNPCToNPCConversation(ConvaiNPCManager.Instance.activeConvaiNPC))
if (ConvaiNPCManager.Instance.activeConvaiNPC == null && ConvaiNPCManager.Instance.nearbyNPC == null)
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NotCloseEnoughForConversation);
}
catch (NullReferenceException)
{
Debug.Log("No active NPC available for conversation");
}
}
#elif ENABLE_LEGACY_INPUT_MANAGER
private void Update()
{
if (ConvaiInputManager.Instance.WasTalkKeyPressed())
{
if (ConvaiNPCManager.Instance.activeConvaiNPC == null)
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NotCloseEnoughForConversation);
}
}
#endif
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bd477455ea76a6c46b64614d87aa55b9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,93 @@
using System.Collections;
using UnityEngine;
namespace Convai.Scripts.Utils
{
public class MicrophoneInputChecker : MonoBehaviour
{
// Duration for microphone input check.
private const float INPUT_CHECK_DURATION = 3f;
// Microphone sensitivity, adjust as needed.
private const float SENSITIVITY = 10f;
// Threshold level to detect microphone issues.
private const float THRESHOLD = 0.1f;
// Reference to the TalkButtonDurationChecker script to check the talk button status.
private TalkButtonDurationChecker _talkButtonDurationChecker;
private void Awake()
{
// Find and assign the TalkButtonDurationChecker instance in the scene.
_talkButtonDurationChecker = FindObjectOfType<TalkButtonDurationChecker>();
}
/// <summary>
/// Check if the microphone is working by analyzing the provided AudioClip.
/// </summary>
/// <param name="audioClip">The audio clip to analyze.</param>
public void IsMicrophoneWorking(AudioClip audioClip)
{
// Stop any existing coroutines to ensure a clean start.
StopAllCoroutines();
// Start the coroutine to check the microphone device.
StartCoroutine(CheckMicrophoneDevice(audioClip));
}
// Coroutine to check the microphone device after a specified duration.
private IEnumerator CheckMicrophoneDevice(AudioClip audioClip)
{
// Check if the provided AudioClip is null.
if (audioClip == null)
{
// Log an error and abort the microphone check.
Logger.Error("AudioClip is null!", Logger.LogCategory.Character);
yield break;
}
// Wait for the specified duration before analyzing microphone input.
yield return new WaitForSeconds(INPUT_CHECK_DURATION);
// If the talk button was released prematurely, abort the microphone check.
if (_talkButtonDurationChecker.isTalkKeyReleasedEarly) yield break;
// Calculate the range of audio samples to analyze based on the duration.
int sampleStart = 0;
int sampleEnd = (int)(INPUT_CHECK_DURATION * audioClip.frequency * audioClip.channels);
// Initialize an array to store audio samples.
float[] samples = new float[sampleEnd - sampleStart];
int samplesLength = samples.Length;
// Attempt to retrieve audio data from the AudioClip.
if (audioClip.GetData(samples, sampleStart) == false)
{
Logger.Error("Failed to get audio data!", Logger.LogCategory.Character);
yield break;
}
// Initialize a variable to store the total absolute level of audio samples.
float level = 0;
// Calculate the total absolute level of audio samples.
for (int i = 0; i < samplesLength; i++) level += Mathf.Abs(samples[i] * SENSITIVITY);
// Normalize the calculated level by dividing it by the number of samples and then multiply by sensitivity.
level = level / samplesLength * SENSITIVITY;
// Check if the microphone level is below the threshold, indicating a potential issue.
if (level < THRESHOLD)
{
Logger.Warn("Microphone Issue Detected!", Logger.LogCategory.Character);
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.MicrophoneIssue);
}
else
{
// Log that the microphone is working fine.
Logger.Info("Microphone is working fine.", Logger.LogCategory.Character);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 596b00c62fa88c645938df61b488e084
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,34 @@
using UnityEngine;
namespace Convai.Scripts.Utils
{
public class NetworkReachabilityChecker : MonoBehaviour
{
private void Start()
{
// Variable to store the debug text for network reachability status
string networkStatusDebugText = "";
switch (Application.internetReachability)
{
// Check the current internet reachability status
case NetworkReachability.NotReachable:
// If the device is not reachable over the internet, set debug text and send a notification.
networkStatusDebugText = "Not Reachable";
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NetworkReachabilityIssue);
break;
case NetworkReachability.ReachableViaCarrierDataNetwork:
// Reachable via mobile data network
networkStatusDebugText = "Reachable via Carrier Data Network";
break;
case NetworkReachability.ReachableViaLocalAreaNetwork:
// Reachable via local area network
networkStatusDebugText = "Reachable via Local Area Network";
break;
}
// Log the network reachability status for debugging
Logger.Info("Network Reachability: " + networkStatusDebugText, Logger.LogCategory.Character);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0001b07d59270994ba1cacc80c615eb4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,70 @@
using UnityEngine;
using UnityEngine.UI;
// Handles the activation status of the notification system based on Settings Panel Toggle.
public class NotificationSystemActiveStatusHandler : MonoBehaviour
{
[SerializeField] private Toggle _notificationSystemActiveStatusToggle;
private void Awake()
{
// Subscribe to the toggle's value change event.
_notificationSystemActiveStatusToggle.onValueChanged.AddListener(SetActiveStatus);
}
/// <summary>
/// Subscribe to events when this component is enabled.
/// </summary>
private void OnEnable()
{
// Subscribe to the event when saved data is loaded.
UISaveLoadSystem.Instance.OnLoad += UISaveLoadSystem_OnLoad;
// Subscribe to the event when data is saved.
UISaveLoadSystem.Instance.OnSave += UISaveLoadSystem_OnSave;
}
/// <summary>
/// Unsubscribe from events when this component is disabled.
/// </summary>
private void OnDisable()
{
// Subscribe to the event when saved data is loaded.
UISaveLoadSystem.Instance.OnLoad -= UISaveLoadSystem_OnLoad;
// Subscribe to the event when data is saved.
UISaveLoadSystem.Instance.OnSave -= UISaveLoadSystem_OnSave;
}
/// <summary>
/// Event handler for when saved data is loaded.
/// </summary>
private void UISaveLoadSystem_OnLoad()
{
// Retrieve the saved notification system activation status.
bool newValue = UISaveLoadSystem.Instance.NotificationSystemActiveStatus;
// Update the UI and internal status based on the loaded value.
SetActiveStatus(newValue);
_notificationSystemActiveStatusToggle.isOn = newValue;
}
/// <summary>
/// Event handler for when data is saved.
/// </summary>
private void UISaveLoadSystem_OnSave()
{
// Save the current notification system activation status.
UISaveLoadSystem.Instance.NotificationSystemActiveStatus = _notificationSystemActiveStatusToggle.isOn;
}
/// <summary>
/// Set the activation status of the notification system.
/// </summary>
/// <param name="value"> The new activation status. </param>
public void SetActiveStatus(bool value)
{
// Call the NotificationSystemHandler to update the activation status.
NotificationSystemHandler.Instance.SetNotificationSystemActiveStatus(value);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2c5289bfc72186f40b90ff7b9d45894a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,84 @@
using System;
using UnityEngine;
/// <summary>
/// Handles the notification system's behavior and interactions.
/// </summary>
[DefaultExecutionOrder(-100)]
public class NotificationSystemHandler : MonoBehaviour
{
/// <summary>
/// Array containing predefined notification configurations.
/// This array can be modified in the Unity Editor to define different types of notifications.
/// </summary>
[SerializeField] private SONotificationGroup _notificationGroup;
/// <summary>
/// Event triggered when a notification is requested.
/// </summary>
public Action<SONotification> OnNotificationRequested;
/// <summary>
/// Flag indicating whether the notification system is currently active.
/// </summary>
private bool _isNotificationSystemActive = true;
/// <summary>
/// Singleton instance of the NotificationSystemHandler.
/// </summary>
public static NotificationSystemHandler Instance { get; private set; }
/// <summary>
/// Ensure there is only one instance of NotificationSystemHandler.
/// </summary>
private void Awake()
{
if (Instance != null)
{
Debug.Log("<color=red> There's More Than One NotificationSystemHandler </color> " + transform + " - " +
Instance);
Destroy(gameObject);
return;
}
Instance = this;
}
/// <summary>
/// Requests a notification of the specified type.
/// </summary>
/// <param name="notificationType">The type of notification to request.</param>
public void NotificationRequest(NotificationType notificationType)
{
// Check if the notification system is currently active.
if (!_isNotificationSystemActive) return;
// Search for the requested notification type in the predefined array.
SONotification requestedSONotification = null;
foreach (SONotification notification in _notificationGroup.SONotifications)
if (notification.NotificationType == notificationType)
{
requestedSONotification = notification;
break;
}
// If the requested notification is not found, log an error.
if (requestedSONotification == null)
{
Debug.LogError("There is no Notification defined for the selected Notification Type!");
return;
}
// Invoke the OnNotificationRequested event with the requested notification.
OnNotificationRequested?.Invoke(requestedSONotification);
}
/// <summary>
/// Sets the activation status of the notification system.
/// </summary>
/// <param name="value">The new activation status.</param>
public void SetNotificationSystemActiveStatus(bool value)
{
_isNotificationSystemActive = value;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d911410153c6d594098cac3c3bfa456d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,36 @@
/// <summary>
/// Enumeration defining various types of in-app notifications.
/// Each enum value represents a specific scenario or issue that can trigger a notification.
/// </summary>
public enum NotificationType
{
/// <summary>
/// Indicates a notification related to microphone problems.
/// </summary>
MicrophoneIssue,
/// <summary>
/// Indicates a notification related to network reachability issues.
/// </summary>
NetworkReachabilityIssue,
/// <summary>
/// Indicates a notification when the user is not in close proximity to initiate a conversation.
/// </summary>
NotCloseEnoughForConversation,
/// <summary>
/// Indicates a notification when a user releases the talk button prematurely during a conversation.
/// </summary>
TalkButtonReleasedEarly,
/// <summary>
/// Indicates that no microphone device was detected in the system
/// </summary>
NoMicrophoneDetected,
/// <summary>
/// Indicates that no API key was found.
/// </summary>
APIKeyNotFound,
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 18458e12a4b7457da0eb049ea8d56d4c
timeCreated: 1698156821

View File

@ -0,0 +1,31 @@
using UnityEngine;
/// <summary>
/// This class represents a notification in the game.
/// </summary>
[CreateAssetMenu(menuName = "Convai/Notification System/Notification", fileName = "New Notification")]
public class SONotification : ScriptableObject
{
/// <summary>
/// The type of the notification.
/// </summary>
[Tooltip("The type of the notification.")]
public NotificationType NotificationType;
/// <summary>
/// The icon to be displayed with the notification.
/// </summary>
[Tooltip("The icon to be displayed with the notification.")]
public Sprite Icon;
/// <summary>
/// The notification title.
/// </summary>
[Tooltip("The notification title.")]
public string NotificationTitle;
/// <summary>
/// The text content of the notification.
/// </summary>
[TextArea(10, 10)] [Tooltip("The text content of the notification.")]
public string NotificationMessage;
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6b33bf54ff467c742a84ac58d34105ec
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,15 @@
using UnityEngine;
/// <summary>
/// Represents a group of notifications as a ScriptableObject.
/// This allows for easy configuration and management of different notifications in the Unity Editor.
/// </summary>
[CreateAssetMenu(menuName = "Convai/Notification System/Notification Group", fileName = "New Notification Group")]
public class SONotificationGroup : ScriptableObject
{
/// <summary>
/// Array of SONotification objects.
/// Each object represents a unique notification that can be triggered in the application.
/// </summary>
public SONotification[] SONotifications;
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 73c98f07d31af334ba49c31a867600b2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0323cdb3f17fa914cae382e617430dd2
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 87748cd0f7abedf4e8dd7cf60e5fb99a
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: fe9295a7cd110d545b49f77fcc49c489
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 5e8394cce5330644594a848783844973
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7a58608b2e0aa77418e15e4b4ef0a1fa
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: ee7d034a751672c449ab90856e05919c
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 374f6f70a1f7d9546926f20184467b32
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 1dbb77ab53e0d714a9f00cba95a25a46
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,115 @@
using Convai.Scripts;
using Convai.Scripts.Utils;
using TMPro;
using UnityEngine;
/// <summary>
/// Monitors the duration of the talk button press and notifies the Notification System if released prematurely.
/// </summary>
public class TalkButtonDurationChecker : MonoBehaviour
{
/// <summary>
/// Minimum duration required for a valid talk action.
/// </summary>
private const float MIN_TALK_DURATION = 0.5f;
/// <summary>
/// Flag indicating whether the talk button was released prematurely.
/// </summary>
[HideInInspector] public bool isTalkKeyReleasedEarly;
private TMP_InputField _activeInputField;
/// <summary>
/// Timer to track the duration of the talk button press.
/// </summary>
private float _timer;
private UIAppearanceSettings _uiAppearanceSettings;
private void Awake()
{
_uiAppearanceSettings = FindObjectOfType<UIAppearanceSettings>();
}
/// <summary>
/// Update is called once per frame.
/// It checks if the talk button is being held down or released.
/// </summary>
private void Update()
{
// Check if the talk button is being held down.
if (ConvaiInputManager.Instance.IsTalkKeyHeld() && !UIUtilities.IsAnyInputFieldFocused())
// Increment the timer based on the time passed since the last frame.
_timer += Time.deltaTime;
// Check if the talk button is released.
if (ConvaiInputManager.Instance.WasTalkKeyReleased() && !UIUtilities.IsAnyInputFieldFocused())
{
if (_activeInputField != null && _activeInputField.isFocused)
{
_timer = 0;
return;
}
CheckTalkButtonRelease();
// Reset the timer for the next talk action.
_timer = 0;
}
}
private void OnEnable()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged += ConvaiNPCManager_OnActiveNPCChanged;
_uiAppearanceSettings.OnAppearanceChanged += UIAppearanceSettings_OnAppearanceChanged;
}
private void OnDisable()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged -= ConvaiNPCManager_OnActiveNPCChanged;
_uiAppearanceSettings.OnAppearanceChanged -= UIAppearanceSettings_OnAppearanceChanged;
}
private void ConvaiNPCManager_OnActiveNPCChanged(ConvaiNPC convaiNpc)
{
if (convaiNpc == null)
{
_activeInputField = null;
return;
}
_activeInputField = convaiNpc.playerInteractionManager.FindActiveInputField();
}
private void UIAppearanceSettings_OnAppearanceChanged()
{
ConvaiNPC convaiNpc = ConvaiNPCManager.Instance.activeConvaiNPC;
if (convaiNpc == null)
{
_activeInputField = null;
return;
}
_activeInputField = convaiNpc.playerInteractionManager.FindActiveInputField();
}
/// <summary>
/// Checks if the talk button was released prematurely and triggers a notification if so.
/// </summary>
private void CheckTalkButtonRelease()
{
// Initialize the flag to false.
isTalkKeyReleasedEarly = false;
// Trigger a notification if the talk button is released before reaching the minimum required duration.
if (_timer < MIN_TALK_DURATION)
{
// Check if there is an active ConvaiNPC.
if (ConvaiNPCManager.Instance.activeConvaiNPC == null) return;
// Set the flag to true and request a notification.
isTalkKeyReleasedEarly = true;
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.TalkButtonReleasedEarly);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ce5db0c0354de754f99bd35c9f7fb96a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,67 @@
using System;
using TMPro;
using UnityEngine;
using UnityEngine.UI;
/// <summary>
/// Represents a UI notification element that can be activated or deactivated.
/// </summary>
public class UINotification : MonoBehaviour
{
/// <summary>
/// The RectTransform of the notification UI element.
/// </summary>
public RectTransform NotificationRectTransform;
/// <summary>
/// The image component for displaying the notification icon.
/// </summary>
[SerializeField] private Image _notificationIcon;
/// <summary>
/// The TextMeshProUGUI component for displaying the notification title.
/// </summary>
[SerializeField] private TextMeshProUGUI _notificationTitleText;
/// <summary>
/// The TextMeshProUGUI component for displaying the notification text.
/// </summary>
[SerializeField] private TextMeshProUGUI _notificationMessageText;
/// <summary>
/// Deactivates the notification UI element on awake.
/// </summary>
private void Awake()
{
SetActive(false);
}
/// <summary>
/// Initializes the UI notification with the provided Notification data.
/// </summary>
/// <param name="soNotification">The notification data to initialize the UI notification with.</param>
public void Initialize(SONotification soNotification)
{
if (soNotification == null)
{
throw new ArgumentNullException(nameof(soNotification), "SONotification is null.");
}
// Set the notification icon and text based on the provided Notification.
_notificationIcon.sprite = soNotification.Icon;
_notificationTitleText.text = soNotification.NotificationTitle;
_notificationMessageText.text = soNotification.NotificationMessage;
// Activate the notification UI element.
SetActive(true);
}
/// <summary>
/// Sets the active state of the notification UI element.
/// </summary>
/// <param name="value">The new active state.</param>
public void SetActive(bool value)
{
gameObject.SetActive(value);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 894cc2b4c2298fb4a98bd3d9f2e8d6ba
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,316 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// This class is responsible for controlling the UI notifications in the game.
/// It handles the creation, activation, deactivation, and animation of notifications.
/// </summary>
public class UINotificationController : MonoBehaviour
{
/// <summary>
/// Maximum number of notifications that can be displayed at the same time.
/// </summary>
private const int MAX_NUMBER_OF_NOTIFICATION_AT_SAME_TIME = 3;
/// <summary>
/// References to the UI notification prefab and other necessary components.
/// </summary>
[Header("References")]
[SerializeField] private UINotification _uiNotificationPrefab;
/// <summary>
/// Spacing between Notifications
/// </summary>
[Header("Configurations")]
[SerializeField] private int _spacing = 100;
/// <summary>
/// Position for Active Notification
/// </summary>
[Tooltip("Starting position for the first notification; Y value adjusts sequentially for each subsequent notification.")]
[SerializeField] private Vector2 _activeNotificationPos;
/// <summary>
/// Position for Deactivated Notification
/// </summary>
[SerializeField] private Vector2 _deactivatedNotificationPos;
[Header("UI Notification Animation Values")]
[SerializeField] private float _activeDuration = 4f;
[SerializeField] private float _slipDuration = 0.3f;
[SerializeField] private float _delay = 0.3f;
[SerializeField] private AnimationCurve _slipAnimationCurve;
private readonly float _fadeInDuration = 0.35f;
private readonly float _fadeOutDuration = 0.2f;
/// <summary>
/// Flag indicating whether a UI notification movement animation is currently in progress.
/// Used to prevent overlapping animation coroutines for UI notifications.
/// </summary>
private bool _isNotificationAnimationInProgress;
private Queue<UINotification> _activeUINotifications;
private Queue<UINotification> _deactivatedUINotifications;
private CanvasGroup _canvasGroup;
private FadeCanvas _fadeCanvas;
/// <summary>
/// List to keep track of the order in which pending notifications were requested.
/// </summary>
private readonly List<SONotification> _pendingNotificationsOrder = new();
/// <summary>
/// Awake is called when the script instance is being loaded.
/// It is used to initialize any variables or game state before the game starts.
/// </summary>
private void Awake()
{
// Get necessary components and initialize UI notifications.
_canvasGroup = GetComponent<CanvasGroup>();
_fadeCanvas = GetComponent<FadeCanvas>();
InitializeUINotifications();
}
/// <summary>
/// This function is called when the object becomes enabled and active.
/// It is used to subscribe to the OnNotificationRequested event.
/// </summary>
private void OnEnable()
{
NotificationSystemHandler.Instance.OnNotificationRequested += NotificationSystemHandler_OnNotificationRequested;
}
/// <summary>
/// This function is called when the behaviour becomes disabled or inactive.
/// It is used to unsubscribe from the OnNotificationRequested event.
/// </summary>
private void OnDisable()
{
NotificationSystemHandler.Instance.OnNotificationRequested -= NotificationSystemHandler_OnNotificationRequested;
}
/// <summary>
/// Handles a new notification request by adding it to the order list and attempting to initialize it.
/// If a notification animation is already in progress, waits for it to complete before processing the new request.
/// </summary>
/// <param name="SONotification">The requested SONotification to be processed.</param>
private void NotificationSystemHandler_OnNotificationRequested(SONotification SONotification)
{
// Add the requested notification to the order list and try to initialize it.
_pendingNotificationsOrder.Add(SONotification);
// If a notification animation is already in progress, wait for it to complete before processing the new request.
if (_isNotificationAnimationInProgress) return;
// If initialization fails, return
if (TryInitializeNewNotification(SONotification, out UINotification uiNotification) == false) return;
// Start the coroutine for UI notification animations
StartNotificationUICoroutine(uiNotification);
}
/// <summary>
/// This function is used to initialize the UI notifications.
/// It initializes the queues for active and deactivated UI notifications and instantiates and enqueues deactivated UI
/// notifications.
/// </summary>
private void InitializeUINotifications()
{
// Initialize the queues for active and deactivated UI notifications.
_activeUINotifications = new Queue<UINotification>();
_deactivatedUINotifications = new Queue<UINotification>();
// Instantiate and enqueue deactivated UI notifications.
for (int i = 0; i < MAX_NUMBER_OF_NOTIFICATION_AT_SAME_TIME; i++)
{
UINotification uiNotification = Instantiate(_uiNotificationPrefab, transform);
// Initialize Position
uiNotification.NotificationRectTransform.anchoredPosition = _deactivatedNotificationPos;
_deactivatedUINotifications.Enqueue(uiNotification);
}
}
/// <summary>
/// Attempts to initialize a new UI notification using the provided SONotification.
/// Tries to get an available UI notification and initializes it with the given SONotification.
/// </summary>
/// <param name="SONotification">The SONotification to be used for initializing the UI notification.</param>
/// <param name="uiNotification">The initialized UINotification if successful, otherwise null.</param>
/// <returns>True if initialization is successful, false otherwise.</returns>
private bool TryInitializeNewNotification(SONotification SONotification, out UINotification uiNotification)
{
// Try to get an available UI notification and initialize it with the given SONotification.
uiNotification = GetAvailableUINotification();
if (uiNotification == null) return false;
uiNotification.Initialize(SONotification);
return true;
}
/// <summary>
/// Initiates the coroutine for UI notification animations and adds the notification to the active queue.
/// </summary>
/// <param name="uiNotification">The UINotification to be animated and added to the active queue.</param>
private void StartNotificationUICoroutine(UINotification uiNotification)
{
// Define additional delay for smoother notification end transition
float extraDelayForNotificationEndTransition = 0.5f;
// Calculate the total duration including fadeIn, activeDuration, slipDuration (for both activation and deactivation), delay, and extra delay
float totalAnimationDuration = _fadeInDuration + _activeDuration + (2 * _slipDuration) + _delay + extraDelayForNotificationEndTransition;
// Start the fade animation for the canvas group
_fadeCanvas.StartFadeInFadeOutWithGap(_canvasGroup, _fadeInDuration, _fadeOutDuration, totalAnimationDuration);
// Enqueue the notification to the active queue
_activeUINotifications.Enqueue(uiNotification);
// Start the coroutine for individual UI notification animations
StartCoroutine(StartNotificationUI(uiNotification));
}
/// <summary>
/// Coroutine for managing the lifecycle of a UI notification, including its activation, display duration, and deactivation.
/// </summary>
/// <param name="uiNotification">The UINotification to be managed.</param>
private IEnumerator StartNotificationUI(UINotification uiNotification)
{
// Remove the notification from the pending list
int firstIndex = 0;
_pendingNotificationsOrder.RemoveAt(firstIndex);
// Move to the active position
yield return MoveUINotificationToActivePosition(uiNotification);
// Wait for the active duration
yield return new WaitForSeconds(_activeDuration);
UpdateUINotificationPositions();
// Move to the hidden position
yield return MoveUINotificationToHiddenPosition(uiNotification);
// Deactivate the UI notification, update positions, and check for pending notifications.
DeactivateAndEnqueueUINotification(uiNotification);
// If there are pending notifications, initialize and start a new one
if (AreTherePendingNotifications())
{
TryInitializeAndStartNewNotification();
}
// Update UI notification positions after the lifecycle is complete
UpdateUINotificationPositions();
}
/// <summary>
/// Moves the UI notification to its active position.
/// </summary>
private IEnumerator MoveUINotificationToActivePosition(UINotification uiNotification)
{
float targetY = _activeNotificationPos.y - _spacing * (_activeUINotifications.Count - 1);
Vector2 targetPos = new Vector2(_activeNotificationPos.x, targetY);
yield return StartCoroutine(MoveUINotificationToTargetPos(uiNotification, targetPos));
}
/// <summary>
/// Moves the UI notification to its hidden position.
/// </summary>
private IEnumerator MoveUINotificationToHiddenPosition(UINotification uiNotification)
{
Vector2 targetPos = _deactivatedNotificationPos;
yield return StartCoroutine(MoveUINotificationToTargetPos(uiNotification, targetPos));
}
/// <summary>
/// Deactivates the UI notification, updates positions, and enqueues it for later use.
/// </summary>
private void DeactivateAndEnqueueUINotification(UINotification uiNotification)
{
uiNotification.SetActive(false);
_activeUINotifications.Dequeue();
_deactivatedUINotifications.Enqueue(uiNotification);
UpdateUINotificationPositions();
}
/// <summary>
/// Checks if there are pending notifications and initializes and starts a new one if available.
/// </summary>
private void TryInitializeAndStartNewNotification()
{
if (TryInitializeNewNotification(_pendingNotificationsOrder[0], out UINotification newUiNotification))
{
StartNotificationUICoroutine(newUiNotification);
}
}
/// <summary>
/// Smoothly moves the UI notification to the target position over a specified duration.
/// </summary>
/// <param name="uiNotification">The UINotification to be moved.</param>
/// <param name="targetPos">The target position to move the UINotification to.</param>
private IEnumerator MoveUINotificationToTargetPos(UINotification uiNotification, Vector2 targetPos)
{
// Set flag to indicate that a notification animation is in progress
_isNotificationAnimationInProgress = true;
float elapsedTime = 0f;
Vector2 startPos = uiNotification.NotificationRectTransform.anchoredPosition;
// Move the UI notification smoothly to the target position over the specified duration
while (elapsedTime <= _slipDuration + _delay)
{
elapsedTime += Time.deltaTime;
float percent = Mathf.Clamp01(elapsedTime / _slipDuration);
float curvePercent = _slipAnimationCurve.Evaluate(percent);
uiNotification.NotificationRectTransform.anchoredPosition = Vector2.Lerp(startPos, targetPos, curvePercent);
yield return null;
}
// Reset the flag once the animation is complete
_isNotificationAnimationInProgress = false;
}
/// <summary>
/// Updates the positions of active UI notifications along the Y-axis.
/// </summary>
private void UpdateUINotificationPositions()
{
float targetX = _activeNotificationPos.x;
float targetY = _activeNotificationPos.y;
// Iterate through active UI notifications and move them to their respective positions
foreach (UINotification activeUINotification in _activeUINotifications)
{
Vector2 targetPos = new Vector2(targetX, targetY);
StartCoroutine(MoveUINotificationToTargetPos(activeUINotification, targetPos));
targetY -= _spacing;
}
}
/// <summary>
/// Gets an available UI notification from the deactivated queue.
/// </summary>
/// <returns>The available UI notification, or null if the deactivated queue is empty.</returns>
private UINotification GetAvailableUINotification()
{
// Check if there are available deactivated UI notifications
if (_deactivatedUINotifications.Count == 0) return null;
// Dequeue and return an available UI notification
return _deactivatedUINotifications.Dequeue();
}
/// <summary>
/// Checks if there are pending notifications in the order list.
/// </summary>
/// <returns>True if there are pending notifications, false otherwise.</returns>
private bool AreTherePendingNotifications()
{
// Check if there are any pending notifications in the order list
return _pendingNotificationsOrder.Count >= 1;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ad1fae26184a1504bbf417585440fe12
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: fe6af3c5595f83b4aaf1e1c05ef9b819
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,25 @@
using UnityEngine;
#if ENABLE_INPUT_SYSTEM
using UnityEngine.InputSystem.UI;
#elif ENABLE_LEGACY_INPUT_MANAGER
using UnityEngine.EventSystems;
#endif
namespace Convai.Scripts.Utils
{
public class ConvaiDynamicInputSystem : MonoBehaviour
{
private void Awake()
{
#if ENABLE_INPUT_SYSTEM
if (FindObjectOfType<InputSystemUIInputModule>() == null) gameObject.AddComponent<InputSystemUIInputModule>();
#elif ENABLE_LEGACY_INPUT_MANAGER
if (FindObjectOfType<StandaloneInputModule>() == null)
{
gameObject.AddComponent<StandaloneInputModule>();
}
#endif
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7611c6ad1a67fed44afca249d0bcd288
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,134 @@
using UnityEngine;
using UnityEngine.EventSystems;
namespace Convai.Scripts.Utils
{
/// <summary>
/// Class for handling player movement including walking, running, jumping, and looking around.
/// </summary>
[RequireComponent(typeof(CharacterController))]
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Player Movement")]
[HelpURL("https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview")]
public class ConvaiPlayerMovement : MonoBehaviour
{
[Header("Movement Parameters")] [SerializeField] [Tooltip("The speed at which the player walks.")] [Range(1, 10)]
private float walkingSpeed = 3f;
[SerializeField] [Tooltip("The speed at which the player runs.")] [Range(1, 10)]
private float runningSpeed = 8f;
[SerializeField] [Tooltip("The speed at which the player jumps.")] [Range(1, 10)]
private float jumpSpeed = 4f;
[Header("Gravity & Grounding")] [SerializeField] [Tooltip("The gravity applied to the player.")] [Range(1, 10)]
private float gravity = 9.8f;
[Header("Camera Parameters")] [SerializeField] [Tooltip("The main camera the player uses.")]
private Camera playerCamera;
[SerializeField] [Tooltip("Speed at which the player can look around.")] [Range(0, 1)]
private float lookSpeedMultiplier = 0.05f;
[SerializeField] [Tooltip("Limit of upwards and downwards look angles.")] [Range(1, 90)]
private float lookXLimit = 45.0f;
[HideInInspector] public bool canMove = true;
private CharacterController _characterController;
private Vector3 _moveDirection = Vector3.zero;
private float _rotationX;
//Singleton Instance
public static ConvaiPlayerMovement Instance { get; private set; }
private void Awake()
{
// Singleton pattern to ensure only one instance exists
if (Instance == null)
Instance = this;
else
Destroy(gameObject);
}
private void Start()
{
_characterController = GetComponent<CharacterController>();
LockCursor();
}
private void Update()
{
// Handle cursor locking/unlocking
HandleCursorLocking();
// Check for running state and move the player
MovePlayer();
// Handle the player and camera rotation
RotatePlayerAndCamera();
}
/// <summary>
/// Unlock the cursor when the ESC key is pressed, Re-lock the cursor when the left mouse button is pressed
/// </summary>
private void HandleCursorLocking()
{
if (ConvaiInputManager.Instance.WasCursorLockKeyPressed())
{
Cursor.lockState = CursorLockMode.None;
Cursor.visible = true;
}
if (ConvaiInputManager.Instance.WasMouseLeftButtonPressed() && !EventSystem.current.IsPointerOverGameObject()) LockCursor();
}
private static void LockCursor()
{
Cursor.lockState = CursorLockMode.Locked;
Cursor.visible = false;
}
private void MovePlayer()
{
Vector3 horizontalMovement = Vector3.zero;
if (canMove && !EventSystem.current.IsPointerOverGameObject())
{
Vector3 forward = transform.TransformDirection(Vector3.forward);
Vector3 right = transform.TransformDirection(Vector3.right);
float speed = ConvaiInputManager.Instance.IsRunKeyHeld() ? runningSpeed : walkingSpeed;
Vector2 moveVector = ConvaiInputManager.Instance.GetPlayerMoveVector();
float curSpeedX = speed * moveVector.x;
float curSpeedY = speed * moveVector.y;
horizontalMovement = forward * curSpeedY + right * curSpeedX;
if (_characterController.isGrounded && ConvaiInputManager.Instance.WasJumpKeyPressed()) _moveDirection.y = jumpSpeed;
}
if (canMove && !_characterController.isGrounded)
// Apply gravity only when canMove is true
_moveDirection.y -= gravity * Time.deltaTime;
// Move the character
_characterController.Move((_moveDirection + horizontalMovement) * Time.deltaTime);
}
private void RotatePlayerAndCamera()
{
if (Cursor.lockState != CursorLockMode.Locked) return;
// Vertical rotation
_rotationX -= ConvaiInputManager.Instance.GetMouseYAxis() * lookSpeedMultiplier;
_rotationX = Mathf.Clamp(_rotationX, -lookXLimit, lookXLimit);
playerCamera.transform.localRotation = Quaternion.Euler(_rotationX, 0, 0);
// Horizontal rotation
float rotationY = ConvaiInputManager.Instance.GetMouseXAxis() * lookSpeedMultiplier;
transform.rotation *= Quaternion.Euler(0, rotationY, 0);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: adc3b3c371ebd1543ad6696b74dbbe9f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 189248cf557957840a0084f28183b3f9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
using UnityEngine;
namespace Convai.Scripts
{
public class ReadOnlyAttribute : PropertyAttribute
{
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 4af0f963530e4aeca5f5747085ac74fb
timeCreated: 1701083156

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7d932a943c13cad4381fdb6714489c14
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,235 @@
using System.Collections;
using System.Text.RegularExpressions;
using UnityEngine;
namespace Convai.Scripts.Utils
{
// TODO: Change URL to point to the blinking script documentation after it is created
/// <summary>
/// Controls the blinking behavior of a character model in Unity.
/// </summary>
/// <remarks>
/// Instructions to find the index of left / right eyelids in BlendShapes:
/// <list type="bullet">
/// <item>
/// <description>Select your character model in the scene which has the SkinnedMeshRenderer component.</description>
/// </item>
/// <item>
/// <description>Look for the blend shapes in the SkinnedMeshRenderer component in the Inspector window.</description>
/// </item>
/// <item>
/// <description>
/// The count (from 0) of blend shape until "EyeBlink_L" or similar is the index of the lef
/// eyelid.
/// </description>
/// </item>
/// <item>
/// <description>
/// The count (from 0) of blend shape until "EyeBlink_R" or similar is the index of the right
/// eyelid.
/// </description>
/// </item>
/// </list>
/// </remarks>
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Character Blinking")]
[HelpURL("https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview")]
public class ConvaiBlinkingHandler : MonoBehaviour
{
[SerializeField]
[Tooltip("The SkinnedMeshRenderer for the character's face")]
private SkinnedMeshRenderer faceSkinnedMeshRenderer;
[SerializeField]
[Tooltip("The index of the left eyelid blend shape in the SkinnedMeshRenderer")]
private int indexOfLeftEyelid = -1;
[SerializeField]
[Tooltip("The index of the right eyelid blend shape in the SkinnedMeshRenderer")]
private int indexOfRightEyelid = -1;
[SerializeField]
[Tooltip("Maximum value of the blendshape of the eye lid")]
private float maxBlendshapeWeight = 1;
[SerializeField]
[Tooltip("The minimum amount of time, in seconds, for a blink. Positive values only.")]
[Range(0.1f, 1f)]
private float minBlinkDuration = 0.2f;
[SerializeField]
[Tooltip(
"The maximum amount of time, in seconds, for a blink. Must be greater than the minimum blink duration.")]
[Range(0.1f, 1f)]
private float maxBlinkDuration = 0.3f;
[SerializeField]
[Tooltip("The minimum amount of time, in seconds, between blinks. Positive values only.")]
[Range(1f, 10f)]
private float minBlinkInterval = 2;
[SerializeField]
[Tooltip(
"The maximum amount of time, in seconds, between blinks. Must be greater than the minimum blink interval.")]
[Range(1f, 10f)]
private float maxBlinkInterval = 3;
/// <summary>
/// Initializes the settings for eyelid blinking on a character's SkinnedMeshRenderer blend shapes.
/// </summary>
/// <remarks>
/// This method executes the following sequence of operations:
/// <list type="bullet">
/// <item>
/// <description>
/// Checks if the SkinnedMeshRenderer is associated with the character's face. If it is not found,
/// it logs an error and returns.
/// </description>
/// </item>
/// <item>
/// <description>
/// If the indices of the left and right eyelids are not set (i.e., they are -1), it iterates over
/// the blend shapes of the SkinnedMeshRenderer to find these indices. It uses regex to match blend shapes'
/// names, looking for "eye" and "blink" in combination with either "_l" for left or "_r" for right
/// indicators. The appropriate indices found are stored in PlayerPrefs for caching purposes.
/// </description>
/// </item>
/// </list>
/// </remarks>
private void Start()
{
string npcName = GetComponent<ConvaiNPC>().characterName; // fetch NPC name from ConvaiNPC script
string leftBlinkKey = npcName + "LeftEyelid";
string rightBlinkKey = npcName + "RightEyelid";
if (indexOfLeftEyelid == -1)
indexOfLeftEyelid = PlayerPrefs.GetInt(leftBlinkKey, -1);
if (indexOfRightEyelid == -1)
indexOfRightEyelid = PlayerPrefs.GetInt(rightBlinkKey, -1);
if (faceSkinnedMeshRenderer == null)
faceSkinnedMeshRenderer = GetSkinnedMeshRendererWithRegex(transform);
if (faceSkinnedMeshRenderer != null)
{
// If we couldn't retrieve the indices from cache, we search for them in our mesh
if (indexOfLeftEyelid == -1 || indexOfRightEyelid == -1)
{
for (int i = 0; i < faceSkinnedMeshRenderer.sharedMesh.blendShapeCount; i++)
{
string blendShapeName = faceSkinnedMeshRenderer.sharedMesh.GetBlendShapeName(i).ToLower();
if (indexOfLeftEyelid == -1 && Regex.IsMatch(blendShapeName, @"(eye).*(blink).*(l|left)"))
{
indexOfLeftEyelid = i;
PlayerPrefs.SetInt(leftBlinkKey, i);
}
else if (indexOfRightEyelid == -1 && Regex.IsMatch(blendShapeName, @"(eye).*(blink).*(r|right)"))
{
indexOfRightEyelid = i;
PlayerPrefs.SetInt(rightBlinkKey, i);
}
}
if (indexOfLeftEyelid == -1 || indexOfRightEyelid == -1)
{
Logger.Error("Left and/or Right eyelid blend shapes not found!", Logger.LogCategory.Character);
return;
}
}
}
else
{
Logger.Error("No SkinnedMeshRenderer found with matching name.", Logger.LogCategory.Character);
}
StartCoroutine(BlinkCoroutine());
}
private void OnValidate()
{
maxBlinkDuration = Mathf.Max(minBlinkDuration, maxBlinkDuration);
maxBlinkInterval = Mathf.Max(minBlinkInterval, maxBlinkInterval);
}
private SkinnedMeshRenderer GetSkinnedMeshRendererWithRegex(Transform parentTransform)
{
SkinnedMeshRenderer findFaceSkinnedMeshRenderer = null;
Regex regexPattern = new("(.*_Head|CC_Base_Body)");
foreach (Transform child in parentTransform)
if (regexPattern.IsMatch(child.name))
{
findFaceSkinnedMeshRenderer = child.GetComponent<SkinnedMeshRenderer>();
if (findFaceSkinnedMeshRenderer != null) break;
}
return findFaceSkinnedMeshRenderer;
}
/// <summary>
/// Coroutine that controls the blinking behavior of the character.
/// </summary>
/// <remarks>
/// This coroutine is designed to perform a sequence of blinking actions where it:
/// <list type="bullet">
/// <item>
/// <description>Closes the eyes smoothly over half of the defined 'blinkDuration'</description>
/// </item>
/// <item>
/// <description>Waits for the defined 'blinkDuration'</description>
/// </item>
/// <item>
/// <description>Opens the eyes smoothly over half of the defined 'blinkDuration'</description>
/// </item>
/// <item>
/// <description>Waits for a randomized interval time before repeating the blinking process</description>
/// </item>
/// </list>
/// </remarks>
/// <returns>Enumerator to control the sequence of this coroutine</returns>
private IEnumerator BlinkCoroutine()
{
while (true)
{
float blinkDuration = Random.Range(minBlinkDuration, maxBlinkDuration);
float blinkInterval = Random.Range(minBlinkInterval, maxBlinkInterval);
// Blink the character's eyes over the course of the blinkDuration
for (float t = 0.0f; t < blinkDuration; t += Time.deltaTime)
{
float normalizedTime = t / blinkDuration;
SetEyelidsBlendShapeWeight(maxBlendshapeWeight * normalizedTime); // Increase the weight of the blend shape to affect the character's model
yield return null;
}
SetEyelidsBlendShapeWeight(maxBlendshapeWeight);
// Wait for blinkDuration seconds, this gives the impression of the eyelids being naturally closed
yield return new WaitForSeconds(blinkDuration);
// Now we 'un-blink' the character's eyes over the course of the blinkDuration
for (float t = 0.0f; t < blinkDuration; t += Time.deltaTime)
{
float normalizedTime = t / blinkDuration;
SetEyelidsBlendShapeWeight(maxBlendshapeWeight - maxBlendshapeWeight * normalizedTime);
yield return null;
}
yield return new WaitForSeconds(blinkInterval);
}
}
/// <summary>
/// Sets the same weight to both eyelids' blend shape.
/// </summary>
private void SetEyelidsBlendShapeWeight(float weight)
{
faceSkinnedMeshRenderer.SetBlendShapeWeight(indexOfLeftEyelid, weight);
faceSkinnedMeshRenderer.SetBlendShapeWeight(indexOfRightEyelid, weight);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b64bad04a93295642a4486f9899f8734
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,785 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Convai.Scripts.Runtime.Utils;
using Convai.Scripts.Utils.LipSync;
using Google.Protobuf;
using Grpc.Core;
using Service;
using UnityEngine;
using static Service.GetResponseRequest.Types;
namespace Convai.Scripts.Utils
{
public class WavHeaderParser
{
public WavHeaderParser(byte[] wavBytes)
{
// Ensure the byte array is not null and has enough bytes to contain a header
if (wavBytes == null || wavBytes.Length < 44)
throw new ArgumentException("Invalid WAV byte array.");
// Parse the number of channels (2 bytes at offset 22)
NumChannels = BitConverter.ToInt16(wavBytes, 22);
// Parse the sample rate (4 bytes at offset 24)
SampleRate = BitConverter.ToInt32(wavBytes, 24);
// Parse the bits per sample (2 bytes at offset 34)
BitsPerSample = BitConverter.ToInt16(wavBytes, 34);
// Parse the Subchunk2 size (data size) to help calculate the data length
DataSize = BitConverter.ToInt32(wavBytes, 40);
}
public int SampleRate { get; }
public int NumChannels { get; }
public int BitsPerSample { get; }
public int DataSize { get; }
public float CalculateDurationSeconds()
{
// Calculate the total number of samples in the data chunk
int totalSamples = DataSize / (NumChannels * (BitsPerSample / 8));
// Calculate the duration in seconds
return (float)totalSamples / SampleRate;
}
}
/// <summary>
/// This class is dedicated to manage all communications between the Convai server and plugin, in addition to
/// processing any data transmitted during these interactions. It abstracts the underlying complexities of the plugin,
/// providing a seamless interface for users. Modifications to this class are discouraged as they may impact the
/// stability and functionality of the system. This class is maintained by the development team to ensure compatibility
/// and performance.
/// </summary>
[DisallowMultipleComponent]
[RequireComponent(typeof(ConvaiNPCManager))]
[AddComponentMenu("Convai/Convai GRPC API")]
[HelpURL(
"https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview/convaigrpcapi.cs")]
public class ConvaiGRPCAPI : MonoBehaviour
{
public static ConvaiGRPCAPI Instance;
private readonly List<string> _stringUserText = new();
private ConvaiNPC _activeConvaiNPC;
private string _apiKey;
private CancellationTokenSource _cancellationTokenSource;
private ConvaiChatUIHandler _chatUIHandler;
private void Awake()
{
// Singleton pattern: Ensure only one instance of this script is active.
if (Instance != null && Instance != this)
{
Destroy(gameObject);
return;
}
Instance = this;
// Load API key from a ScriptableObject in Resources folder.
ConvaiAPIKeySetup.GetAPIKey(out _apiKey);
// Find and store a reference to the ConvaiChatUIHandler component in the scene.
_chatUIHandler = FindObjectOfType<ConvaiChatUIHandler>();
}
private void Start()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged += HandleActiveNPCChanged;
_cancellationTokenSource = new CancellationTokenSource();
MainThreadDispatcher.CreateInstance();
}
private void FixedUpdate()
{
// Check if there are pending user texts to display
// If chatUIHandler is available, send the first user text in the list
if (_stringUserText.Count > 0 && _chatUIHandler != null)
{
_chatUIHandler.SendPlayerText(_stringUserText[0]);
// Remove the displayed user text from the list
_stringUserText.RemoveAt(0);
}
}
private void OnDestroy()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged -= HandleActiveNPCChanged;
InterruptCharacterSpeech(_activeConvaiNPC);
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception ex)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
Logger.Warn("Exception in OnDestroy: " + ex.Message, Logger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
}
}
/// <summary>
/// Asynchronously initializes a session ID by communicating with a gRPC service and returns the session ID if
/// successful.
/// </summary>
/// <param name="characterName">The name of the character for which the session is being initialized.</param>
/// <param name="client">The gRPC service client used to make the call to the server.</param>
/// <param name="characterID">The unique identifier for the character.</param>
/// <param name="sessionID">The session ID that may be updated during the initialization process.</param>
/// <returns>
/// A task that represents the asynchronous operation. The task result contains the initialized session ID if
/// successful, or null if the initialization fails.
/// </returns>
public static async Task<string> InitializeSessionIDAsync(string characterName, ConvaiService.ConvaiServiceClient client, string characterID, string sessionID)
{
Logger.DebugLog("Initializing SessionID for character: " + characterName, Logger.LogCategory.Character);
if (client == null)
{
Logger.Error("gRPC client is not initialized.", Logger.LogCategory.Character);
return null;
}
using AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call = client.GetResponse();
GetResponseRequest getResponseConfigRequest = new()
{
GetResponseConfig = new GetResponseConfig
{
CharacterId = characterID,
ApiKey = Instance._apiKey,
SessionId = sessionID,
AudioConfig = new AudioConfig { DisableAudio = true }
}
};
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
TextData = "Repeat the following exactly as it is: [Hii]"
}
});
await call.RequestStream.CompleteAsync();
while (await call.ResponseStream.MoveNext())
{
GetResponseResponse result = call.ResponseStream.Current;
if (!string.IsNullOrEmpty(result.SessionId))
{
Logger.DebugLog("SessionID Initialization SUCCESS for: " + characterName,
Logger.LogCategory.Character);
sessionID = result.SessionId;
return sessionID;
}
}
Logger.Exception("SessionID Initialization FAILED for: " + characterName, Logger.LogCategory.Character);
}
catch (RpcException rpcException)
{
switch (rpcException.StatusCode)
{
case StatusCode.Cancelled:
Logger.Exception(rpcException, Logger.LogCategory.Character);
break;
case StatusCode.Unknown:
Logger.Error($"Unknown error from server: {rpcException.Status.Detail}",
Logger.LogCategory.Character);
break;
default:
throw;
}
}
catch (Exception ex)
{
Logger.Exception(ex, Logger.LogCategory.Character);
}
return null;
}
/// <summary>
/// Sends text data to the server and processes the response.
/// </summary>
/// <param name="client">The gRPC client used to communicate with the server.</param>
/// <param name="userText">The text data to send to the server.</param>
/// <param name="characterID">The ID of the character that is sending the text.</param>
/// <param name="isActionActive">Indicates whether actions are active.</param>
/// <param name="isLipSyncActive">Indicates whether lip sync is active.</param>
/// <param name="actionConfig">The action configuration.</param>
/// <param name="faceModel">The face model.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task SendTextData(ConvaiService.ConvaiServiceClient client, string userText, string characterID, bool isActionActive, bool isLipSyncActive,
ActionConfig actionConfig, FaceModel faceModel)
{
AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call =
GetAsyncDuplexStreamingCallOptions(client);
GetResponseRequest getResponseConfigRequest = CreateGetResponseRequest(
isActionActive,
isLipSyncActive,
0,
characterID,
actionConfig,
faceModel);
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
TextData = userText
}
});
await call.RequestStream.CompleteAsync();
// Store the task that receives results from the server.
Task receiveResultsTask = Task.Run(
async () => { await ReceiveResultFromServer(call, _cancellationTokenSource.Token); },
_cancellationTokenSource.Token);
// Await the task if needed to ensure it completes before this method returns [OPTIONAL]
await receiveResultsTask.ConfigureAwait(false);
}
catch (Exception ex)
{
Logger.Error(ex, Logger.LogCategory.Character);
}
}
// This method will be called whenever the active NPC changes.
private void HandleActiveNPCChanged(ConvaiNPC newActiveNPC)
{
if (newActiveNPC != null)
InterruptCharacterSpeech(newActiveNPC);
// Cancel the ongoing gRPC call
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception e)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
Logger.Warn("Exception in GRPCAPI:HandleActiveNPCChanged: " + e.Message,
Logger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
Logger.Info("The Cancellation Token Source was Disposed in GRPCAPI:HandleActiveNPCChanged",
Logger.LogCategory.Character);
}
_cancellationTokenSource = new CancellationTokenSource(); // Create a new token for future calls
_activeConvaiNPC = newActiveNPC;
}
/// <summary>
/// Starts recording audio and sends it to the server for processing.
/// </summary>
/// <param name="client">gRPC service Client object</param>
/// <param name="isActionActive">Bool specifying whether we are expecting action responses</param>
/// <param name="isLipSyncActive"></param>
/// <param name="recordingFrequency">Frequency of the audio being sent</param>
/// <param name="recordingLength">Length of the recording from the microphone</param>
/// <param name="characterID">Character ID obtained from the playground</param>
/// <param name="actionConfig">Object containing the action configuration</param>
/// <param name="faceModel"></param>
public async Task StartRecordAudio(ConvaiService.ConvaiServiceClient client, bool isActionActive, bool isLipSyncActive, int recordingFrequency, int recordingLength,
string characterID, ActionConfig actionConfig, FaceModel faceModel)
{
AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call = GetAsyncDuplexStreamingCallOptions(client);
GetResponseRequest getResponseConfigRequest = CreateGetResponseRequest(isActionActive, isLipSyncActive, recordingFrequency, characterID, actionConfig, faceModel);
Logger.DebugLog(getResponseConfigRequest.ToString(), Logger.LogCategory.Character);
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
}
catch (Exception ex)
{
Logger.Error(ex, Logger.LogCategory.Character);
return; // early return on error
}
AudioClip audioClip = Microphone.Start(MicrophoneManager.Instance.SelectedMicrophoneName, false, recordingLength, recordingFrequency);
MicrophoneTestController.Instance.CheckMicrophoneDeviceWorkingStatus(audioClip);
Logger.Info(_activeConvaiNPC.characterName + " is now listening", Logger.LogCategory.Character);
OnPlayerSpeakingChanged?.Invoke(true);
await ProcessAudioContinuously(call, recordingFrequency, recordingLength, audioClip);
}
private AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> GetAsyncDuplexStreamingCallOptions(ConvaiService.ConvaiServiceClient client)
{
Metadata headers = new()
{
{ "source", "Unity" },
{ "version", "3.0.0" }
};
CallOptions options = new(headers);
return client.GetResponse(options);
}
/// <summary>
/// Creates a GetResponseRequest object configured with the specified parameters for initiating a gRPC call.
/// </summary>
/// <param name="isActionActive">Indicates whether actions are enabled for the character.</param>
/// <param name="isLipSyncActive">Indicates whether lip sync is enabled for the character.</param>
/// <param name="recordingFrequency">The frequency at which the audio is recorded.</param>
/// <param name="characterID">The unique identifier for the character.</param>
/// <param name="actionConfig">The configuration for character actions.</param>
/// <param name="faceModel">The facial model configuration for the character.</param>
/// <returns>A GetResponseRequest object configured with the provided settings.</returns>
private GetResponseRequest CreateGetResponseRequest(bool isActionActive, bool isLipSyncActive, int recordingFrequency, string characterID, ActionConfig actionConfig = null,
FaceModel faceModel = FaceModel.OvrModelName)
{
GetResponseRequest getResponseConfigRequest = new()
{
GetResponseConfig = new GetResponseConfig
{
CharacterId = characterID,
ApiKey = _apiKey, // Assumes apiKey is available
SessionId = _activeConvaiNPC.sessionID, // Assumes _activeConvaiNPC would not be null, else this will throw NullReferenceException
AudioConfig = new AudioConfig
{
SampleRateHertz = recordingFrequency,
EnableFacialData = isLipSyncActive,
FaceModel = faceModel
}
}
};
if (isActionActive || _activeConvaiNPC != null) getResponseConfigRequest.GetResponseConfig.ActionConfig = actionConfig;
return getResponseConfigRequest;
}
/// <summary>
/// Processes audio data continuously from a microphone input and sends it to the server via a gRPC call.
/// </summary>
/// <param name="call">The streaming call to send audio data to the server.</param>
/// <param name="recordingFrequency">The frequency at which the audio is recorded.</param>
/// <param name="recordingLength">The length of the audio recording in seconds.</param>
/// <param name="audioClip">The AudioClip object that contains the audio data from the microphone.</param>
/// <returns>A task that represents the asynchronous operation of processing and sending audio data.</returns>
private async Task ProcessAudioContinuously(AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, int recordingFrequency, int recordingLength,
AudioClip audioClip)
{
// Run the receiving results from the server in the background without awaiting it here.
Task receiveResultsTask = Task.Run(async () => { await ReceiveResultFromServer(call, _cancellationTokenSource.Token); }, _cancellationTokenSource.Token);
int pos = 0;
float[] audioData = new float[recordingFrequency * recordingLength];
while (Microphone.IsRecording(MicrophoneManager.Instance.SelectedMicrophoneName))
{
await Task.Delay(200);
int newPos = Microphone.GetPosition(MicrophoneManager.Instance.SelectedMicrophoneName);
int diff = newPos - pos;
if (diff > 0)
{
if (audioClip == null)
{
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception e)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
Logger.Warn("Exception when Audio Clip is null: " + e.Message,
Logger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
Logger.Info("The Cancellation Token Source was Disposed because the Audio Clip was empty.",
Logger.LogCategory.Character);
}
break;
}
audioClip.GetData(audioData, pos);
await ProcessAudioChunk(call, diff, audioData);
pos = newPos;
}
}
// Process any remaining audio data.
await ProcessAudioChunk(call,
Microphone.GetPosition(MicrophoneManager.Instance.SelectedMicrophoneName) - pos,
audioData).ConfigureAwait(false);
await call.RequestStream.CompleteAsync();
}
/// <summary>
/// Stops recording and processing the audio.
/// </summary>
public void StopRecordAudio()
{
// End microphone recording
Microphone.End(MicrophoneManager.Instance.SelectedMicrophoneName);
try
{
Logger.Info(_activeConvaiNPC.characterName + " has stopped listening", Logger.LogCategory.Character);
OnPlayerSpeakingChanged?.Invoke(false);
}
catch (Exception)
{
Logger.Error("No active NPC found", Logger.LogCategory.Character);
}
}
/// <summary>
/// Processes each audio chunk and sends it to the server.
/// </summary>
/// <param name="call">gRPC Streaming call connecting to the getResponse function</param>
/// <param name="diff">Length of the audio data from the current position to the position of the last sent chunk</param>
/// <param name="audioData">Chunk of audio data that we want to be processed</param>
private static async Task ProcessAudioChunk(AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, int diff, IReadOnlyList<float> audioData)
{
if (diff > 0)
{
// Convert audio data to byte array
byte[] audioByteArray = new byte[diff * sizeof(short)];
for (int i = 0; i < diff; i++)
{
float sample = audioData[i];
short shortSample = (short)(sample * short.MaxValue);
byte[] shortBytes = BitConverter.GetBytes(shortSample);
audioByteArray[i * sizeof(short)] = shortBytes[0];
audioByteArray[i * sizeof(short) + 1] = shortBytes[1];
}
// Send audio data to the gRPC server
try
{
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
AudioData = ByteString.CopyFrom(audioByteArray)
}
});
}
catch (RpcException rpcException)
{
if (rpcException.StatusCode == StatusCode.Cancelled)
Logger.Error(rpcException, Logger.LogCategory.Character);
else
throw;
}
catch (Exception ex)
{
Logger.Error(ex, Logger.LogCategory.Character);
}
}
}
/// <summary>
/// </summary>
/// <param name="newActiveNPC"></param>
public void InterruptCharacterSpeech(ConvaiNPC newActiveNPC)
{
// If the active NPC is speaking, cancel the ongoing gRPC call,
// clear the response queue, and reset the character's speaking state, lip-sync, animation, and audio playback
if (newActiveNPC != null)
{
// Cancel the ongoing gRPC call
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception e)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
Logger.Warn("Exception in Interrupt Character Speech: " + e.Message, Logger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
Logger.Info($"The Cancellation Token Source for {newActiveNPC} was Disposed in ConvaiGRPCAPI:InterruptCharacterSpeech.", Logger.LogCategory.Character);
}
_cancellationTokenSource = new CancellationTokenSource(); // Create a new token for future calls
CharacterInterrupted?.Invoke();
// Clear the response queue
newActiveNPC.ClearResponseQueue();
// Reset the character's speaking state
newActiveNPC.SetCharacterTalking(false);
// Stop any ongoing audio playback
newActiveNPC.StopAllAudioPlayback();
// Stop any ongoing lip sync for active NPC
newActiveNPC.StopLipSync();
// Reset the character's animation to idle
newActiveNPC.ResetCharacterAnimation();
}
}
/// <summary>
/// Periodically receives responses from the server and adds it to a static list in streaming NPC
/// </summary>
/// <param name="call">gRPC Streaming call connecting to the getResponse function</param>
/// <param name="cancellationToken"></param>
private async Task ReceiveResultFromServer(AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, CancellationToken cancellationToken)
{
Queue<LipSyncBlendFrameData> lipSyncBlendFrameQueue = new();
bool firstSilFound = false;
while (!cancellationToken.IsCancellationRequested && await call.ResponseStream.MoveNext(cancellationToken).ConfigureAwait(false))
try
{
// Get the response from the server
GetResponseResponse result = call.ResponseStream.Current;
OnResultReceived?.Invoke(result);
// Process different types of responses
if (result.UserQuery != null)
if (_chatUIHandler != null)
// Add user query to the list
_stringUserText.Add(result.UserQuery.TextData);
// Trigger the current section of the narrative design manager in the active NPC
if (result.BtResponse != null) TriggerNarrativeSection(result);
// Add action response to the list in the active NPC
if (result.ActionResponse != null)
if (_activeConvaiNPC.actionsHandler != null)
_activeConvaiNPC.actionsHandler.actionResponseList.Add(result.ActionResponse.Action);
// Add audio response to the list in the active NPC
if (result.AudioResponse != null)
{
if (result.AudioResponse.AudioData != null)
{
// Add response to the list in the active NPC
if (result.AudioResponse.AudioData.ToByteArray().Length > 46)
{
byte[] wavBytes = result.AudioResponse.AudioData.ToByteArray();
// will only work for wav files
WavHeaderParser parser = new(wavBytes);
if (_activeConvaiNPC.convaiLipSync == null)
{
Logger.DebugLog($"Enqueuing responses: {result.AudioResponse.TextData}", Logger.LogCategory.LipSync);
_activeConvaiNPC.EnqueueResponse(result);
}
else
{
LipSyncBlendFrameData.FrameType frameType =
_activeConvaiNPC.convaiLipSync.faceModel == FaceModel.OvrModelName
? LipSyncBlendFrameData.FrameType.Visemes
: LipSyncBlendFrameData.FrameType.Blendshape;
lipSyncBlendFrameQueue.Enqueue(
new LipSyncBlendFrameData(
(int)(parser.CalculateDurationSeconds() * 30),
result,
frameType
)
);
}
}
// Check if the response contains visemes data and the active NPC has a LipSync component
if (result.AudioResponse.VisemesData != null)
if (_activeConvaiNPC.convaiLipSync != null)
{
// Logger.Info(result.AudioResponse.VisemesData, Logger.LogCategory.LipSync);
if (result.AudioResponse.VisemesData.Visemes.Sil == -2 || result.AudioResponse.EndOfResponse)
{
if (firstSilFound) lipSyncBlendFrameQueue.Dequeue().Process(_activeConvaiNPC);
firstSilFound = true;
}
else
{
lipSyncBlendFrameQueue.Peek().Enqueue(result.AudioResponse.VisemesData);
}
}
// Check if the response contains blendshapes data and the active NPC has a LipSync component
if (result.AudioResponse.BlendshapesFrame != null)
if (_activeConvaiNPC.convaiLipSync != null)
{
if (lipSyncBlendFrameQueue.Peek().CanProcess() || result.AudioResponse.EndOfResponse)
{
lipSyncBlendFrameQueue.Dequeue().Process(_activeConvaiNPC);
}
else
{
lipSyncBlendFrameQueue.Peek().Enqueue(result.AudioResponse.BlendshapesFrame);
if (lipSyncBlendFrameQueue.Peek().CanPartiallyProcess()) lipSyncBlendFrameQueue.Peek().ProcessPartially(_activeConvaiNPC);
}
}
}
//
if (result.AudioResponse == null && result.DebugLog != null)
_activeConvaiNPC.EnqueueResponse(call.ResponseStream.Current);
// Check if the session id of active NPC is -1 then only update it
if (_activeConvaiNPC.sessionID == "-1")
// Update session ID in the active NPC
_activeConvaiNPC.sessionID = call.ResponseStream.Current.SessionId;
}
}
catch (RpcException rpcException)
{
// Handle RpcExceptions, log or throw if necessary
if (rpcException.StatusCode == StatusCode.Cancelled)
Logger.Error(rpcException, Logger.LogCategory.Character);
else
throw;
}
catch (Exception ex)
{
Logger.DebugLog(ex, Logger.LogCategory.Character);
}
if (cancellationToken.IsCancellationRequested) await call.RequestStream.CompleteAsync();
}
/// <summary>
/// </summary>
/// <param name="result"></param>
private void TriggerNarrativeSection(GetResponseResponse result)
{
// Trigger the current section of the narrative design manager in the active NPC
if (result.BtResponse != null)
{
Debug.Log("Narrative Design SectionID: " + result.BtResponse.NarrativeSectionId);
// Get the NarrativeDesignManager component from the active NPC
NarrativeDesignManager narrativeDesignManager = _activeConvaiNPC.narrativeDesignManager;
if (narrativeDesignManager != null)
MainThreadDispatcher.Instance.RunOnMainThread(() => { narrativeDesignManager.UpdateCurrentSection(result.BtResponse.NarrativeSectionId); });
else
Debug.Log("NarrativeDesignManager component not found in the active NPC");
}
}
/// <summary>
/// </summary>
/// <param name="client"></param>
/// <param name="characterID"></param>
/// <param name="triggerConfig"></param>
public async Task SendTriggerData(ConvaiService.ConvaiServiceClient client, string characterID, TriggerConfig triggerConfig)
{
AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call = GetAsyncDuplexStreamingCallOptions(client);
GetResponseRequest getResponseConfigRequest = CreateGetResponseRequest(true, true, 0, characterID);
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
TriggerData = triggerConfig
}
});
await call.RequestStream.CompleteAsync();
// Store the task that receives results from the server.
Task receiveResultsTask = Task.Run(
async () => { await ReceiveResultFromServer(call, _cancellationTokenSource.Token); },
_cancellationTokenSource.Token);
// Await the task if needed to ensure it completes before this method returns [OPTIONAL]
await receiveResultsTask.ConfigureAwait(false);
}
catch (Exception ex)
{
Logger.Error(ex, Logger.LogCategory.Character);
}
}
/// <summary>
/// Asynchronously sends feedback to the server.
/// </summary>
/// <param name="thumbsUp">Indicates whether the feedback is a thumbs up or thumbs down.</param>
/// <param name="interactionID">The ID associated with the interaction.</param>
/// <param name="feedbackText">The text content of the feedback.</param>
/// <returns>A Task representing the asynchronous operation.</returns>
public async Task SendFeedback(bool thumbsUp, string interactionID, string feedbackText)
{
// Create a FeedbackRequest object with the provided parameters.
FeedbackRequest request = new()
{
InteractionId = interactionID,
CharacterId = _activeConvaiNPC.characterID,
SessionId = _activeConvaiNPC.sessionID,
TextFeedback = new FeedbackRequest.Types.Feedback
{
FeedbackText = feedbackText,
ThumbsUp = thumbsUp
}
};
try
{
// Send the feedback request asynchronously and await the response.
FeedbackResponse response = await _activeConvaiNPC.GetClient().SubmitFeedbackAsync(request, cancellationToken: _cancellationTokenSource.Token);
// Log the feedback response.
Logger.Info(response.FeedbackResponse_, Logger.LogCategory.Character);
}
catch (RpcException rpcException)
{
// Log an exception if there is an error in sending the feedback.
Logger.Exception(rpcException, Logger.LogCategory.Character);
}
}
#region Events
public event Action CharacterInterrupted; // Event to notify when the character's speech is interrupted
public event Action<GetResponseResponse> OnResultReceived; // Event to notify when a response is received from the server
public event Action<bool> OnPlayerSpeakingChanged; // Event to notify when the player starts or stops speaking
#endregion
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ac3bfdb7f1f556540bc41acc9a375817
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,274 @@
using UnityEngine;
namespace Convai.Scripts.Utils
{
/// <summary>
/// This class provides head tracking functionalities for an object (like a character) with an Animator.
/// It requires the Animator component to be attached to the same GameObject.
/// </summary>
[RequireComponent(typeof(Animator))]
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Character Head & Eye Tracking")]
[HelpURL(
"https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview/convaiheadtracking")]
public class ConvaiHeadTracking : MonoBehaviour
{
private const float POSITION_UPDATE_DELAY = 2f;
[field: Header("Tracking Properties")]
[Tooltip("The object that the head should track.")]
[field: SerializeField]
public Transform TargetObject { get; set; }
[Range(0.0f, 100.0f)]
[Tooltip("The maximum distance at which the head must still track target.")]
[SerializeField]
private float trackingDistance = 10f;
[Tooltip("Speed at which character turns towards the target.")]
[Range(1f, 10f)]
[SerializeField]
private float turnSpeed = 5.0f;
[Header("Look At Weights")]
[Range(0f, 1f)]
[Tooltip(
"Controls the amount of rotation applied to the body to achieve the 'Look At' target. The closer to 1, the more the body will rotate to follow the target.")]
[SerializeField]
private float bodyLookAtWeight = 0.6f;
[Range(0f, 1f)]
[Tooltip(
"Controls the amount of rotation applied to the head to achieve the 'Look At' target. The closer to 1, the more the head will rotate to follow the target.")]
[SerializeField]
private float headLookAtWeight = 0.8f;
[Range(0f, 1f)]
[Tooltip(
"Controls the amount of rotation applied to the eyes to achieve the 'Look At' target. The closer to 1, the more the eyes will rotate to follow the target.")]
[SerializeField]
private float eyesLookAtWeight = 1f;
[Space(10)]
[Tooltip(
"Set this to true if you want the character to look away randomly, false to always look at the target")]
[SerializeField]
private bool lookAway;
private Animator _animator;
private float _appliedBodyLookAtWeight;
private ConvaiActionsHandler _convaiActionsHandler;
private float _currentLookAtWeight;
private float _desiredLookAtWeight = 1f;
private Transform _headPivot;
private bool _isActionRunning;
private void Start()
{
InitializeComponents();
InitializeHeadPivot();
InvokeRepeating(nameof(UpdateTarget), 0, POSITION_UPDATE_DELAY);
}
private void OnDisable()
{
if (_convaiActionsHandler != null)
_convaiActionsHandler.UnregisterForActionEvents(ConvaiActionsHandler_OnActionStarted, ConvaiActionsHandler_OnActionEnded);
}
/// <summary>
/// Unity's built-in method called during the IK pass.
/// </summary>
public void OnAnimatorIK(int layerIndex)
{
PerformHeadTracking();
}
private void InitializeComponents()
{
if (!_animator) _animator = GetComponent<Animator>();
InitializeTargetObject();
if (TryGetComponent(out _convaiActionsHandler))
_convaiActionsHandler.RegisterForActionEvents(ConvaiActionsHandler_OnActionStarted, ConvaiActionsHandler_OnActionEnded);
}
private void ConvaiActionsHandler_OnActionStarted(string action, GameObject target)
{
SetActionRunning(true);
}
private void ConvaiActionsHandler_OnActionEnded(string action, GameObject target)
{
SetActionRunning(false);
}
private void InitializeHeadPivot()
{
// Check if the pivot already exists
if (_headPivot) return;
// Create a new GameObject for the pivot
_headPivot = new GameObject("HeadPivot").transform;
// Set the new GameObject as a child of this character object
_headPivot.transform.parent = transform;
// Position the pivot appropriately, in this case, it seems like it's a bit above the base (probably around the character's neck/head)
_headPivot.localPosition = new Vector3(0, 1.6f, 0);
}
private void RotateCharacterTowardsTarget()
{
Vector3 toTarget = TargetObject.position - transform.position;
float distance = toTarget.magnitude;
// Calculate the angle difference between the character's forward direction and the direction towards the target.
float angleDifference = Vector3.Angle(transform.forward, toTarget);
// Adjust turn speed based on distance to target.
float adjustedTurnSpeed = turnSpeed * 4 * (1f / distance);
// If the angle difference exceeds the limit, we turn the character smoothly towards the target.
if (Mathf.Abs(angleDifference) > 0.65f)
{
Vector3 targetDirection = toTarget.normalized;
// Zero out the y-component (up-down direction) to only rotate on the horizontal plane.
targetDirection.y = 0;
Quaternion targetRotation = Quaternion.LookRotation(targetDirection);
transform.rotation = Quaternion.RotateTowards(transform.rotation, targetRotation,
adjustedTurnSpeed * Time.deltaTime);
// Ensure that the character doesn't tilt on the X and Z axis.
transform.eulerAngles = new Vector3(0, transform.eulerAngles.y, 0);
}
}
private void InitializeTargetObject()
{
if (TargetObject != null) return;
Logger.Warn("No target object set for head tracking. Setting default target as main camera",
Logger.LogCategory.Character);
if (Camera.main != null) TargetObject = Camera.main.transform;
}
/// <summary>
/// Updates the target weight for the look-at.
/// </summary>
private void UpdateTarget()
{
_desiredLookAtWeight = lookAway ? Random.Range(0.2f, 1.0f) : 1f;
}
/// <summary>
/// Performs the head tracking towards the target object.
/// </summary>
private void PerformHeadTracking()
{
if (_isActionRunning) return;
float distance = Vector3.Distance(transform.position, TargetObject.position);
DrawRayToTarget();
// only perform head tracking if within threshold distance
if (!(distance < trackingDistance / 2))
{
_desiredLookAtWeight = 0;
if (_currentLookAtWeight > 0)
SetCurrentLookAtWeight();
}
SetCurrentLookAtWeight();
_headPivot.transform.LookAt(TargetObject); // orient the pivot towards the target object
// set the current look at weight based on how much rotation is needed
// limit the head rotation
float headRotation = _headPivot.localRotation.y;
if (Mathf.Abs(headRotation) > 0.70f)
{
// clamp rotation if more than 80 degrees
headRotation = Mathf.Sign(headRotation) * 0.70f;
Quaternion localRotation = _headPivot.localRotation;
localRotation.y = headRotation;
_headPivot.localRotation = localRotation;
}
// adjust body rotation weight based on how much the head is rotated
float targetBodyLookAtWeight = Mathf.Abs(_headPivot.localRotation.y) > 0.45f
? bodyLookAtWeight / 3f
: 0f;
// smooth transition between current and target body rotation weight
_appliedBodyLookAtWeight = Mathf.Lerp(_appliedBodyLookAtWeight, targetBodyLookAtWeight, Time.deltaTime);
// Apply rotation weights to the Animator
RotateCharacterTowardsTarget();
AdjustAnimatorLookAt();
}
/// <summary>
/// Method to set the current look at weight based on the desired look at weight.
/// </summary>
private void SetCurrentLookAtWeight()
{
float angleDifference = _headPivot.localRotation.y;
// Lerp the currentLookAtWeight towards the desiredLookAtWeight or towards 0 if above a certain threshold.
_currentLookAtWeight = Mathf.Abs(angleDifference) < 0.55f
? Mathf.Lerp(Mathf.Clamp(_currentLookAtWeight, 0, 1), Mathf.Clamp(_desiredLookAtWeight, 0, 1),
Time.deltaTime * POSITION_UPDATE_DELAY)
: Mathf.Lerp(Mathf.Clamp(_currentLookAtWeight, 0, 1), 0, Time.deltaTime * POSITION_UPDATE_DELAY);
}
/// <summary>
/// Method to apply rotation weights to the Animator
/// </summary>
private void AdjustAnimatorLookAt()
{
// Check if Animator or TargetObject are null
if (!_animator || TargetObject == null)
{
// If either is null, set the look-at weight to 0 and return, effectively ending the method early
_animator.SetLookAtWeight(0);
return;
}
// Set the look-at weights in the Animator.
// This is used to dictate how much the body, head or eyes should turn to "look at" the target.
// `Mathf.Clamp` is used to ensure the weight values lie between 0 and 1 (inclusive).
// The body weight is clamped between 0 to 0.5 since it's less advisable to rotate the body too much versus the head or eyes.
_animator.SetLookAtWeight(Mathf.Clamp(
_currentLookAtWeight, 0, 1),
Mathf.Clamp(_appliedBodyLookAtWeight, 0, .5f),
Mathf.Clamp(headLookAtWeight / 1.25f, 0, .8f),
Mathf.Clamp(eyesLookAtWeight, 0, 1));
// Set the look-at position for the Animator (where the body/head/eyes will turn toward)
_animator.SetLookAtPosition(TargetObject.position);
}
/// <summary>
/// DebugLog utility to visualize the tracking mechanism
/// </summary>
private void DrawRayToTarget()
{
Vector3 pos = transform.position;
// Draw a debug ray from our position to the normalized direction towards the target, scaled by half of the tracking distance threshold.
// The purpose is to visualize the direction and focus of the head tracking, and it's a useful debug tool in Unity's Scene view.
// "Normalized" ensures that the vector has a magnitude (length) of 1, keeping the scaling of the vector consistent.
// This ray appears red in the Scene view.
Debug.DrawRay(pos,
(TargetObject.position - pos).normalized * trackingDistance / 2, Color.red);
}
public void SetActionRunning(bool newValue)
{
_isActionRunning = newValue;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 47bf09eafaaeed940ab9e5531a64790c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,336 @@
using System;
using UnityEngine;
#if ENABLE_INPUT_SYSTEM
using UnityEngine.InputSystem;
#endif
/// <summary>
/// The Input Manager class for Convai, allowing you to control inputs in your project through this class.
/// It supports both the New Input System and Old Input System.
/// </summary>
[DefaultExecutionOrder(-105)]
public class ConvaiInputManager : MonoBehaviour
{
#if ENABLE_LEGACY_INPUT_MANAGER
[Serializable]
public class FourDirectionalMovementKeys
{
public KeyCode Forward = KeyCode.W;
public KeyCode Backward = KeyCode.S;
public KeyCode Right = KeyCode.D;
public KeyCode Left = KeyCode.A;
}
#endif
#if ENABLE_INPUT_SYSTEM
/// <summary>
/// Input Action for player movement.
/// </summary>
[Header("Player Related")] public InputAction PlayerMovementKeyAction;
/// <summary>
/// Input Action for player jumping.
/// </summary>
public InputAction PlayerJumpKeyAction;
/// <summary>
/// Input Action for player running.
/// </summary>
public InputAction PlayerRunKeyAction;
/// <summary>
/// Input Action for locking the cursor.
/// </summary>
[Header("General")] public InputAction CursorLockKeyAction;
/// <summary>
/// Input Action for sending text.
/// </summary>
public InputAction TextSendKeyAction;
/// <summary>
/// Input Action for talk functionality.
/// </summary>
public InputAction TalkKeyAction;
/// <summary>
/// Action to open the Settings Panel.
/// </summary>
public InputAction SettingsKeyAction;
#elif ENABLE_LEGACY_INPUT_MANAGER
/// <summary>
/// Key used to manage cursor lock
/// </summary>
public KeyCode CursorLockKey = KeyCode.Escape;
/// <summary>
/// Key used to manage text send
/// </summary>
public KeyCode TextSendKey = KeyCode.Return;
/// <summary>
/// Key used to manage text send
/// </summary>
public KeyCode TextSendAltKey = KeyCode.KeypadEnter;
/// <summary>
/// Key used to manage record user audio
/// </summary>
public KeyCode TalkKey = KeyCode.T;
/// <summary>
/// Key used to manage setting panel toggle
/// </summary>
public KeyCode OpenSettingPanelKey = KeyCode.F10;
/// <summary>
/// Key used to manage running
/// </summary>
public KeyCode RunKey = KeyCode.LeftShift;
/// <summary>
/// Keys used to manage movement
/// </summary>
public FourDirectionalMovementKeys MovementKeys;
#endif
/// <summary>
/// Singleton instance providing easy access to the ConvaiInputManager from other scripts.
/// </summary>
public static ConvaiInputManager Instance { get; private set; }
/// <summary>
/// Awake is called when the script instance is being loaded.
/// </summary>
private void Awake()
{
// Ensure only one instance of ConvaiInputManager exists
if (Instance != null)
{
Debug.LogError("There's more than one ConvaiInputManager! " + transform + " - " + Instance);
Destroy(gameObject);
return;
}
Instance = this;
}
/// <summary>
/// Enable input actions when the object is enabled.
/// </summary>
private void OnEnable()
{
#if ENABLE_INPUT_SYSTEM
PlayerMovementKeyAction.Enable();
PlayerJumpKeyAction.Enable();
PlayerRunKeyAction.Enable();
CursorLockKeyAction.Enable();
TextSendKeyAction.Enable();
TalkKeyAction.Enable();
SettingsKeyAction.Enable();
#endif
}
/// <summary>
/// Checks if the left mouse button was pressed.
/// </summary>
public bool WasMouseLeftButtonPressed()
{
// Check if the left mouse button was pressed this frame
#if ENABLE_INPUT_SYSTEM && (!UNITY_ANDROID || !UNITY_IOS)
return Mouse.current.leftButton.wasPressedThisFrame;
#else
return Input.GetMouseButtonDown(0);
#endif
}
/// <summary>
/// Gets the current mouse position.
/// </summary>
public Vector2 GetMousePosition()
{
// Get the current mouse position
#if ENABLE_INPUT_SYSTEM
return Mouse.current.position.ReadValue();
#else
return Input.mousePosition;
#endif
}
/// <summary>
/// Gets the vertical movement of the mouse.
/// </summary>
public float GetMouseYAxis()
{
// Get the vertical movement of the mouse
#if ENABLE_INPUT_SYSTEM && (!UNITY_ANDROID || !UNITY_IOS)
return Mouse.current.delta.y.ReadValue();
#else
return Input.GetAxis("Mouse Y");
#endif
}
/// <summary>
/// Gets the horizontal movement of the mouse.
/// </summary>
public float GetMouseXAxis()
{
// Get the horizontal movement of the mouse
#if ENABLE_INPUT_SYSTEM && (!UNITY_ANDROID || !UNITY_IOS)
return Mouse.current.delta.x.ReadValue();
#else
return Input.GetAxis("Mouse X");
#endif
}
// General input methods
/// <summary>
/// Checks if the cursor lock key was pressed.
/// </summary>
public bool WasCursorLockKeyPressed()
{
// Check if the cursor lock key was pressed this frame
#if ENABLE_INPUT_SYSTEM
return CursorLockKeyAction.WasPressedThisFrame();
#else
return Input.GetKeyDown(CursorLockKey);
#endif
}
/// <summary>
/// Checks if the text send key was pressed.
/// </summary>
public bool WasTextSendKeyPressed()
{
// Check if the text send key was pressed this frame
#if ENABLE_INPUT_SYSTEM
return TextSendKeyAction.WasPressedThisFrame();
#else
return Input.GetKeyDown(TextSendKey) || Input.GetKeyDown(TextSendAltKey);
#endif
}
/// <summary>
/// Checks if the talk key was pressed.
/// </summary>
public bool WasTalkKeyPressed()
{
// Check if the talk key was pressed this frame
#if ENABLE_INPUT_SYSTEM
return TalkKeyAction.WasPressedThisFrame();
#else
return Input.GetKeyDown(TalkKey);
#endif
}
/// <summary>
/// Checks if the talk key is being held down.
/// </summary>
public bool IsTalkKeyHeld()
{
// Check if the talk key is being held down
#if ENABLE_INPUT_SYSTEM
return TalkKeyAction.IsPressed();
#else
return Input.GetKey(TalkKey);
#endif
}
#if ENABLE_INPUT_SYSTEM
/// <summary>
/// Retrieves the InputAction associated with the talk key.
/// </summary>
/// <returns>The InputAction for handling talk-related input.</returns>
public InputAction GetTalkKeyAction() => TalkKeyAction;
#endif
/// <summary>
/// Checks if the talk key was released.
/// </summary>
public bool WasTalkKeyReleased()
{
// Check if the talk key was released this frame
#if ENABLE_INPUT_SYSTEM
return TalkKeyAction.WasReleasedThisFrame();
#else
return Input.GetKeyUp(TalkKey);
#endif
}
/// <summary>
/// Checks if the Settings key was pressed.
/// </summary>
public bool WasSettingsKeyPressed()
{
// Check if the Settings key was pressed this frame
#if ENABLE_INPUT_SYSTEM
return SettingsKeyAction.WasPressedThisFrame();
#else
return Input.GetKeyDown(OpenSettingPanelKey);
#endif
}
// Player related input methods
/// <summary>
/// Checks if the jump key was pressed.
/// </summary>
public bool WasJumpKeyPressed()
{
// Check if the jump key was pressed this frame
#if ENABLE_INPUT_SYSTEM
return PlayerJumpKeyAction.WasPressedThisFrame();
#else
return Input.GetButton("Jump");
#endif
}
/// <summary>
/// Checks if the run key is being held down.
/// </summary>
public bool IsRunKeyHeld()
{
// Check if the run key is being held down
#if ENABLE_INPUT_SYSTEM
return PlayerRunKeyAction.IsPressed();
#else
return Input.GetKey(RunKey);
#endif
}
/// <summary>
/// Gets the player's movement input vector.
/// </summary>
public Vector2 GetPlayerMoveVector()
{
// Get the player's movement input vector
#if ENABLE_INPUT_SYSTEM
return PlayerMovementKeyAction.ReadValue<Vector2>();
#else
Vector2 inputMoveDir = new Vector2(0, 0);
// Manual input for player movement
if (Input.GetKey(MovementKeys.Forward))
{
inputMoveDir.y += 1f;
}
if (Input.GetKey(MovementKeys.Backward))
{
inputMoveDir.y -= 1f;
}
if (Input.GetKey(MovementKeys.Left))
{
inputMoveDir.x -= 1f;
}
if (Input.GetKey(MovementKeys.Right))
{
inputMoveDir.x += 1f;
}
return inputMoveDir;
#endif
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7a69a6bc2bf58e64883c79cf732d1bfd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,482 @@
using System;
using System.Collections;
using System.Collections.Generic;
using Convai.Scripts.Utils;
using Convai.Scripts.Utils.LipSync;
using Grpc.Core;
using Service;
using TMPro;
using UnityEngine;
using UnityEngine.Events;
using Logger = Convai.Scripts.Utils.Logger;
using System.Threading.Tasks;
// ReSharper disable CompareOfFloatsByEqualityOperator
#if UNITY_ANDROID
using UnityEngine.Android;
#endif
namespace Convai.Scripts
{
/// <summary>
/// The ConvaiNPC class is a MonoBehaviour script that gives a GameObject the ability to interact with the Convai API.
/// </summary>
[RequireComponent(typeof(Animator), typeof(AudioSource))]
[AddComponentMenu("Convai/ConvaiNPC")]
[HelpURL(
"https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/overview-of-the-convainpc.cs-script")]
public class ConvaiNPC : MonoBehaviour
{
private const int AUDIO_SAMPLE_RATE = 44100;
private const string GRPC_API_ENDPOINT = "stream.convai.com";
private const int RECORDING_FREQUENCY = AUDIO_SAMPLE_RATE;
private const int RECORDING_LENGTH = 30;
private static readonly int Talk = Animator.StringToHash("Talk");
[Header("Character Information")]
[Tooltip("Enter the character name for this NPC.")]
public string characterName;
[Tooltip("Enter the character ID for this NPC.")]
public string characterID;
[Tooltip("The current session ID for the chat with this NPC.")]
[ReadOnly]
public string sessionID = "-1";
[Tooltip("Is this character active?")]
[ReadOnly]
public bool isCharacterActive;
[HideInInspector] public ConvaiActionsHandler actionsHandler;
[HideInInspector] public ConvaiLipSync convaiLipSync;
[Tooltip("Is this character talking?")]
[SerializeField]
[ReadOnly]
private bool isCharacterTalking;
[Header("Session Initialization")]
[Tooltip("Enable/disable initializing session ID by sending a text request to the server")]
public bool initializeSessionID = true;
[HideInInspector] public ConvaiPlayerInteractionManager playerInteractionManager;
[HideInInspector] public NarrativeDesignManager narrativeDesignManager;
[HideInInspector] public TriggerUnityEvent onTriggerSent;
private readonly Queue<GetResponseResponse> _getResponseResponses = new();
private bool _animationPlaying;
private Channel _channel;
private Animator _characterAnimator;
private ConvaiService.ConvaiServiceClient _client;
private ConvaiChatUIHandler _convaiChatUIHandler;
private ConvaiCrosshairHandler _convaiCrosshairHandler;
private ConvaiGroupNPCController _convaiGroupNPCController;
private TMP_InputField _currentInputField;
private bool _groupNPCComponentNotFound;
private ConvaiGRPCAPI _grpcAPI;
private bool _isActionActive;
private bool _isLipSyncActive;
private bool _stopAudioPlayingLoop;
private bool _stopHandlingInput;
public ActionConfig ActionConfig;
private bool IsInConversationWithAnotherNPC
{
get
{
if (_groupNPCComponentNotFound) return false;
if (_convaiGroupNPCController == null)
{
if (TryGetComponent(out ConvaiGroupNPCController component))
_convaiGroupNPCController = component;
else
_groupNPCComponentNotFound = true;
}
return _convaiGroupNPCController != null && _convaiGroupNPCController.IsInConversationWithAnotherNPC;
}
}
public bool IsCharacterTalking
{
get => isCharacterTalking;
private set => isCharacterTalking = value;
}
private FaceModel FaceModel => convaiLipSync == null ? FaceModel.OvrModelName : convaiLipSync.faceModel;
public string GetEndPointURL => GRPC_API_ENDPOINT;
// Properties with getters and setters
[field: NonSerialized] public bool IncludeActionsHandler { get; set; }
[field: NonSerialized] public bool LipSync { get; set; }
[field: NonSerialized] public bool HeadEyeTracking { get; set; }
[field: NonSerialized] public bool EyeBlinking { get; set; }
[field: NonSerialized] public bool NarrativeDesignManager { get; set; }
[field: NonSerialized] public bool ConvaiGroupNPCController { get; set; }
public ConvaiNPCAudioManager AudioManager { get; private set; }
private void Awake()
{
Logger.Info("Initializing ConvaiNPC : " + characterName, Logger.LogCategory.Character);
InitializeComponents();
Logger.Info("ConvaiNPC component initialized", Logger.LogCategory.Character);
}
private async void Start()
{
// Assign the ConvaiGRPCAPI component in the scene
_grpcAPI = ConvaiGRPCAPI.Instance;
// Start the coroutine that plays audio clips in order
StartCoroutine(AudioManager.PlayAudioInOrder());
InvokeRepeating(nameof(ProcessResponse), 0f, 1 / 100f);
// Check if the platform is Android
#if UNITY_ANDROID
// Check if the user has not authorized microphone permission
if (!Permission.HasUserAuthorizedPermission(Permission.Microphone))
// Request microphone permission from the user
Permission.RequestUserPermission(Permission.Microphone);
#endif
// DO NOT EDIT
// gRPC setup configuration
#region GRPC_SETUP
SslCredentials credentials = new(); // Create SSL credentials for secure communication
_channel = new Channel(GRPC_API_ENDPOINT, credentials); // Initialize a gRPC channel with the specified endpoint and credentials
_client = new ConvaiService.ConvaiServiceClient(_channel); // Initialize the gRPC client for the ConvaiService using the channel
#endregion
if (initializeSessionID)
{
sessionID = await ConvaiGRPCAPI.InitializeSessionIDAsync(characterName, _client, characterID, sessionID);
}
_convaiChatUIHandler = ConvaiChatUIHandler.Instance;
}
private void Update()
{
if(Input.GetKeyDown(KeyCode.U)) {
characterID = "fb0d9902-4fde-11ef-bfa5-42010a7be011";
}
if (Input.GetKeyDown(KeyCode.I))
{
characterID = "3e40947a-4e47-11ef-832b-42010a7be011";
}
playerInteractionManager.UpdateUserInput();
}
private void OnEnable()
{
AudioManager.OnCharacterTalkingChanged += HandleIsCharacterTalkingAnimation;
AudioManager.OnAudioTranscriptAvailable += HandleAudioTranscriptAvailable;
AudioManager.OnCharacterTalkingChanged += SetCharacterTalking;
ConvaiNPCManager.Instance.OnActiveNPCChanged += HandleActiveNPCChanged;
if (_convaiChatUIHandler != null) _convaiChatUIHandler.UpdateCharacterList();
}
private void OnDestroy()
{
if (AudioManager != null)
{
AudioManager.OnCharacterTalkingChanged -= HandleIsCharacterTalkingAnimation;
AudioManager.OnAudioTranscriptAvailable -= HandleAudioTranscriptAvailable;
AudioManager.OnCharacterTalkingChanged -= SetCharacterTalking;
AudioManager.PurgeExcessLipSyncFrames -= PurgeLipSyncFrames;
}
ConvaiNPCManager.Instance.OnActiveNPCChanged -= HandleActiveNPCChanged;
if (_convaiChatUIHandler != null) _convaiChatUIHandler.UpdateCharacterList();
}
/// <summary>
/// Unity callback that is invoked when the application is quitting.
/// Stops the loop that plays audio in order.
/// </summary>
private void OnApplicationQuit()
{
AudioManager.StopAudioLoop();
}
private void OnValidate()
{
_convaiChatUIHandler = ConvaiChatUIHandler.Instance;
if (_convaiChatUIHandler != null) _convaiChatUIHandler.UpdateCharacterList();
}
public async void TriggerEvent(string triggerName, string triggerMessage = "")
{
TriggerConfig trigger = new()
{
TriggerName = triggerName,
TriggerMessage = triggerMessage
};
// Send the trigger to the server using GRPC
await ConvaiGRPCAPI.Instance.SendTriggerData(_client, characterID, trigger);
// Invoke the UnityEvent
onTriggerSent.Invoke(triggerMessage, triggerName);
}
private event Action<bool> OnCharacterTalking;
private void UpdateWaitUntilLipSync(bool value)
{
AudioManager.SetWaitForCharacterLipSync(value);
}
private void HandleActiveNPCChanged(ConvaiNPC newActiveNPC)
{
// If this NPC is no longer the active NPC, interrupt its speech
if (this != newActiveNPC && !IsInConversationWithAnotherNPC && ConvaiInputManager.Instance.WasTalkKeyPressed()) InterruptCharacterSpeech();
}
private void InitializeComponents()
{
_convaiChatUIHandler = FindObjectOfType<ConvaiChatUIHandler>();
_convaiCrosshairHandler = FindObjectOfType<ConvaiCrosshairHandler>();
_characterAnimator = GetComponent<Animator>();
AudioManager = gameObject.AddComponent<ConvaiNPCAudioManager>();
narrativeDesignManager = GetComponent<NarrativeDesignManager>();
InitializePlayerInteractionManager();
InitializeLipSync();
StartCoroutine(InitializeActionsHandler());
}
private IEnumerator InitializeActionsHandler()
{
yield return new WaitForSeconds(1);
actionsHandler = GetComponent<ConvaiActionsHandler>();
if (actionsHandler != null)
{
_isActionActive = true;
ActionConfig = actionsHandler.ActionConfig;
}
}
private void InitializePlayerInteractionManager()
{
playerInteractionManager = gameObject.AddComponent<ConvaiPlayerInteractionManager>();
playerInteractionManager.Initialize(this, _convaiCrosshairHandler, _convaiChatUIHandler);
}
private void InitializeLipSync()
{
convaiLipSync = GetComponent<ConvaiLipSync>();
if (convaiLipSync != null)
{
_isLipSyncActive = true;
convaiLipSync = GetComponent<ConvaiLipSync>();
convaiLipSync.OnCharacterLipSyncing += UpdateWaitUntilLipSync;
}
}
private void HandleAudioTranscriptAvailable(string transcript)
{
if (isCharacterActive) _convaiChatUIHandler.SendCharacterText(characterName, transcript);
}
/// <summary>
/// Handles the character's talking animation based on whether the character is currently talking.
/// </summary>
private void HandleIsCharacterTalkingAnimation(bool isTalking)
{
if (isTalking)
{
if (!_animationPlaying)
{
_animationPlaying = true;
_characterAnimator.SetBool(Talk, true);
}
}
else
{
_animationPlaying = false;
_characterAnimator.SetBool(Talk, false);
}
}
/// <summary>
/// Sends message data to the server asynchronously.
/// </summary>
/// <param name="text">The message to send.</param>
public async void SendTextDataAsync(string text)
{
try
{
await ConvaiGRPCAPI.Instance.SendTextData(_client, text, characterID,
_isActionActive, _isLipSyncActive, ActionConfig, FaceModel);
}
catch (Exception ex)
{
Logger.Error(ex, Logger.LogCategory.Character);
// Handle the exception, e.g., show a message to the user.
}
}
/// <summary>
/// Initializes the session in an asynchronous manner and handles the receiving of results from the server.
/// Initiates the audio recording process using the gRPC API.
/// </summary>
public async void StartListening()
{
if (!MicrophoneManager.Instance.HasAnyMicrophoneDevices())
{
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NoMicrophoneDetected);
return;
}
await _grpcAPI.StartRecordAudio(_client, _isActionActive, _isLipSyncActive, RECORDING_FREQUENCY,
RECORDING_LENGTH, characterID, ActionConfig, FaceModel);
}
/// <summary>
/// Stops the ongoing audio recording process.
/// </summary>
public void StopListening()
{
// Stop the audio recording process using the ConvaiGRPCAPI StopRecordAudio method
_grpcAPI.StopRecordAudio();
}
/// <summary>
/// Add response to the GetResponseResponse Queue
/// </summary>
/// <param name="response"></param>
public void EnqueueResponse(GetResponseResponse response)
{
if (response == null || response.AudioResponse == null) return;
//Logger.DebugLog($"Adding Response for Processing: {response.AudioResponse.TextData}", Logger.LogCategory.LipSync);
_getResponseResponses.Enqueue(response);
}
public void ClearResponseQueue()
{
_getResponseResponses.Clear();
}
private void PurgeLipSyncFrames()
{
if (convaiLipSync == null) return;
convaiLipSync.PurgeExcessFrames();
}
/// <summary>
/// Processes a response fetched from a character.
/// </summary>
/// <remarks>
/// 1. Processes audio/message/face data from the response and adds it to _responseAudios.
/// 2. Identifies actions from the response and parses them for execution.
/// </remarks>
private void ProcessResponse()
{
// Check if the character is active and should process the response
if (isCharacterActive || IsInConversationWithAnotherNPC)
if (_getResponseResponses.Count > 0)
{
GetResponseResponse getResponseResponse = _getResponseResponses.Dequeue();
if (getResponseResponse?.AudioResponse != null)
{
// Check if text data exists in the response
if (getResponseResponse.AudioResponse.AudioData.ToByteArray().Length > 46)
{
// Initialize empty string for text
string textDataString = getResponseResponse.AudioResponse.TextData;
byte[] byteAudio = getResponseResponse.AudioResponse.AudioData.ToByteArray();
AudioClip clip = AudioManager.ProcessByteAudioDataToAudioClip(byteAudio,
getResponseResponse.AudioResponse.AudioConfig.SampleRateHertz.ToString());
// Add the response audio along with associated data to the list
AudioManager.AddResponseAudio(new ConvaiNPCAudioManager.ResponseAudio
{
AudioClip = clip,
AudioTranscript = textDataString,
IsFinal = false
});
}
else if (getResponseResponse.AudioResponse.EndOfResponse)
{
Logger.DebugLog("We have received end of response", Logger.LogCategory.LipSync);
// Handle the case where there is a DebugLog but no audio response
AudioManager.AddResponseAudio(new ConvaiNPCAudioManager.ResponseAudio
{
AudioClip = null,
AudioTranscript = null,
IsFinal = true
});
}
}
}
}
public int GetAudioResponseCount()
{
return AudioManager.GetAudioResponseCount();
}
public void StopAllAudioPlayback()
{
AudioManager.StopAllAudioPlayback();
AudioManager.ClearResponseAudioQueue();
}
public void ResetCharacterAnimation()
{
if (_characterAnimator != null)
_characterAnimator.SetBool(Talk, false);
if (convaiLipSync != null)
convaiLipSync.ConvaiLipSyncApplicationBase.ClearQueue();
}
public void SetCharacterTalking(bool isTalking)
{
if (IsCharacterTalking != isTalking)
{
Logger.Info($"Character {characterName} is talking: {isTalking}", Logger.LogCategory.Character);
IsCharacterTalking = isTalking;
OnCharacterTalking?.Invoke(IsCharacterTalking);
}
}
public void StopLipSync()
{
if (convaiLipSync != null) convaiLipSync.StopLipSync();
}
public void InterruptCharacterSpeech()
{
_grpcAPI.InterruptCharacterSpeech(this);
}
public ConvaiService.ConvaiServiceClient GetClient()
{
return _client;
}
public void UpdateSessionID(string newSessionID)
{
sessionID = newSessionID;
}
[Serializable]
public class TriggerUnityEvent : UnityEvent<string, string>
{
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4b38e4bc919e1f040ba78aea7472893e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,189 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Logger = Convai.Scripts.Utils.Logger;
namespace Convai.Scripts
{
public class ConvaiNPCAudioManager : MonoBehaviour
{
private readonly Queue<ResponseAudio> _responseAudios = new();
private AudioSource _audioSource;
private ConvaiNPC _convaiNPC;
private bool _lastTalkingState;
private bool _stopAudioPlayingLoop;
private bool _waitForCharacterLipSync;
private void Awake()
{
_audioSource = GetComponent<AudioSource>();
_convaiNPC = GetComponent<ConvaiNPC>();
_lastTalkingState = false;
}
public event Action<string> OnAudioTranscriptAvailable;
public event Action<bool> OnCharacterTalkingChanged;
public event Action PurgeExcessLipSyncFrames;
public void StopAllAudioPlayback()
{
if (_audioSource != null && _audioSource.isPlaying) _audioSource.Stop();
}
public void ClearResponseAudioQueue()
{
_responseAudios.Clear();
}
private void SetCharacterTalking(bool isTalking)
{
if (_lastTalkingState != isTalking)
{
OnCharacterTalkingChanged?.Invoke(isTalking);
_lastTalkingState = isTalking;
}
}
private void PurgeLipSyncFrames()
{
PurgeExcessLipSyncFrames?.Invoke();
}
public void AddResponseAudio(ResponseAudio responseAudio)
{
_responseAudios.Enqueue(responseAudio);
}
public int GetAudioResponseCount()
{
return _responseAudios.Count;
}
public bool SetWaitForCharacterLipSync(bool value)
{
_waitForCharacterLipSync = value;
return value;
}
public IEnumerator PlayAudioInOrder()
{
while (!_stopAudioPlayingLoop)
if (_responseAudios.Count > 0)
{
ResponseAudio currentResponseAudio = _responseAudios.Dequeue();
if (!currentResponseAudio.IsFinal)
{
_audioSource.clip = currentResponseAudio.AudioClip;
while (_waitForCharacterLipSync)
yield return new WaitForSeconds(0.01f);
_audioSource.Play();
//Logger.DebugLog($"Playing: {currentResponseAudio.AudioTranscript}", Logger.LogCategory.LipSync);
SetCharacterTalking(true);
OnAudioTranscriptAvailable?.Invoke(currentResponseAudio.AudioTranscript.Trim());
yield return new WaitForSeconds(currentResponseAudio.AudioClip.length);
_audioSource.Stop();
_audioSource.clip = null;
PurgeLipSyncFrames();
if (_responseAudios.Count == 0 && _convaiNPC.convaiLipSync != null)
SetWaitForCharacterLipSync(true);
}
else
{
Logger.DebugLog($"Final Playing: {currentResponseAudio.AudioTranscript}", Logger.LogCategory.LipSync);
SetCharacterTalking(false);
}
}
else
{
yield return new WaitForSeconds(1f);
SetCharacterTalking(false);
}
}
/// <summary>
/// Converts a byte array containing audio data into an AudioClip.
/// </summary>
/// <param name="byteAudio">Byte array containing the audio data</param>
/// <param name="stringSampleRate">String containing the sample rate of the audio</param>
/// <returns>AudioClip containing the decoded audio data</returns>
public AudioClip ProcessByteAudioDataToAudioClip(byte[] byteAudio, string stringSampleRate)
{
try
{
if (byteAudio.Length <= 44)
throw new ArgumentException("Not enough data in byte audio to trim the header.", nameof(byteAudio));
// Trim the 44 bytes WAV header from the byte array to get the actual audio data
byte[] trimmedByteAudio = new byte[byteAudio.Length - 44];
for (int i = 0, j = 44; i < byteAudio.Length - 44; i++, j++) trimmedByteAudio[i] = byteAudio[j];
// Convert the trimmed byte audio data to a float array of audio samples
float[] samples = Convert16BitByteArrayToFloatAudioClipData(trimmedByteAudio);
if (samples.Length <= 0) throw new Exception("No samples created after conversion from byte array.");
const int channels = 1; // Mono audio
int sampleRate = int.Parse(stringSampleRate); // Convert the sample rate string to an integer
// Create an AudioClip using the converted audio samples and other parameters
AudioClip clip = AudioClip.Create("Audio Response", samples.Length, channels, sampleRate, false);
// Set the audio data for the AudioClip
clip.SetData(samples, 0);
return clip;
}
catch (Exception)
{
// Log or handle exceptions appropriately
return null;
}
}
/// <summary>
/// Converts a byte array representing 16-bit audio samples to a float array.
/// </summary>
/// <param name="source">Byte array containing 16-bit audio data</param>
/// <returns>Float array containing audio samples in the range [-1, 1]</returns>
private static float[] Convert16BitByteArrayToFloatAudioClipData(byte[] source)
{
const int x = sizeof(short); // Size of a short in bytes
int convertedSize = source.Length / x; // Number of short samples
float[] data = new float[convertedSize]; // Float array to hold the converted data
int byteIndex = 0; // Index for the byte array
int dataIndex = 0; // Index for the float array
// Convert each pair of bytes to a short and then to a float
while (byteIndex < source.Length)
{
byte firstByte = source[byteIndex];
byte secondByte = source[byteIndex + 1];
byteIndex += 2;
// Combine the two bytes to form a short (little endian)
short s = (short)((secondByte << 8) | firstByte);
// Convert the short value to a float in the range [-1, 1]
data[dataIndex] = s / 32768.0F; // Dividing by 32768.0 to normalize the range
dataIndex++;
}
return data;
}
public void StopAudioLoop()
{
_stopAudioPlayingLoop = true;
}
public class ResponseAudio
{
public AudioClip AudioClip;
public string AudioTranscript;
public bool IsFinal;
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 07c6d33f72d441209362753c7f5759bd
timeCreated: 1705425029

View File

@ -0,0 +1,215 @@
using System;
using System.Collections.Generic;
using UnityEngine;
namespace Convai.Scripts.Utils
{
[DefaultExecutionOrder(-101)]
public class ConvaiNPCManager : MonoBehaviour
{
private static readonly RaycastHit[] RaycastHits = new RaycastHit[1];
[Tooltip("Length of the ray used for detecting NPCs.")] [SerializeField]
private float rayLength = 2.0f;
[Tooltip("Angle from the ray's direction to keep the NPC active, even if not directly hit by the ray.")] [SerializeField]
private float visionConeAngle = 45f;
[Tooltip("Reference to the currently active NPC.")] [ReadOnly]
public ConvaiNPC activeConvaiNPC;
[Tooltip("Reference to the NPC that is currently near the player.")] [ReadOnly]
public ConvaiNPC nearbyNPC;
// Cache used to store NPC references and avoid redundant GetComponent calls.
private readonly Dictionary<GameObject, ConvaiNPC> _convaiNPCCache = new();
// Reference to the NPC that was last hit by the raycast.
private ConvaiNPC _lastHitNpc;
// Reference to the main camera used for ray casting
[SerializeField]
private Camera _mainCamera;
// Singleton instance of the NPC manager.
public static ConvaiNPCManager Instance { get; private set; }
private void Awake()
{
// Singleton pattern to ensure only one instance exists
if (Instance == null)
Instance = this;
else
Destroy(gameObject);
_mainCamera = Camera.main;
}
private void LateUpdate()
{
Ray ray = new(_mainCamera.transform.position, _mainCamera.transform.forward);
bool foundConvaiNPC = false;
if (Physics.RaycastNonAlloc(ray, RaycastHits, rayLength) > 0)
{
RaycastHit hit = RaycastHits[0];
nearbyNPC = GetConvaiNPC(hit.transform.gameObject);
if (nearbyNPC != null)
{
foundConvaiNPC = true;
if (_lastHitNpc != nearbyNPC && !CheckForNPCToNPCConversation(nearbyNPC))
{
Logger.DebugLog($"Player is near {nearbyNPC.gameObject.name}", Logger.LogCategory.Character);
UpdateActiveNPC(nearbyNPC);
}
}
}
if (!foundConvaiNPC && _lastHitNpc != null)
{
Vector3 toLastHitNPC = _lastHitNpc.transform.position - ray.origin;
float angleToLastHitNPC = Vector3.Angle(ray.direction, toLastHitNPC.normalized);
float distanceToLastHitNPC = toLastHitNPC.magnitude;
if (angleToLastHitNPC > visionConeAngle || distanceToLastHitNPC > rayLength * 1.2f)
{
Logger.DebugLog($"Player left {_lastHitNpc.gameObject.name}", Logger.LogCategory.Character);
UpdateActiveNPC(null);
}
}
}
private void OnDrawGizmos()
{
if (_mainCamera == null)
_mainCamera = Camera.main;
if (_mainCamera == null)
return;
Transform cameraTransform = _mainCamera.transform;
Vector3 rayOrigin = cameraTransform.position;
Vector3 rayDirection = cameraTransform.forward;
// Drawing the main ray
Gizmos.color = Color.blue;
Gizmos.DrawRay(rayOrigin, rayDirection.normalized * rayLength);
if (_lastHitNpc != null) DrawVisionConeArc(rayOrigin, rayDirection, cameraTransform.up);
}
private void DrawVisionConeArc(Vector3 rayOrigin, Vector3 rayDirection, Vector3 up)
{
const int arcResolution = 50; // number of segments to use for arc
float angleStep = 2 * visionConeAngle / arcResolution; // angle between each segment
Vector3 previousPoint = Quaternion.AngleAxis(-visionConeAngle, up) * rayDirection * rayLength;
for (int i = 1; i <= arcResolution; i++)
{
Vector3 nextPoint = Quaternion.AngleAxis(-visionConeAngle + angleStep * i, up) * rayDirection * rayLength;
Gizmos.DrawLine(rayOrigin + previousPoint, rayOrigin + nextPoint);
previousPoint = nextPoint;
}
Quaternion leftRotation = Quaternion.AngleAxis(-visionConeAngle, up);
Quaternion rightRotation = Quaternion.AngleAxis(visionConeAngle, up);
Vector3 leftDirection = leftRotation * rayDirection;
Vector3 rightDirection = rightRotation * rayDirection;
Gizmos.color = Color.yellow;
Gizmos.DrawLine(rayOrigin, rayOrigin + leftDirection.normalized * rayLength);
Gizmos.DrawLine(rayOrigin, rayOrigin + rightDirection.normalized * rayLength);
}
/// <summary>
/// Checks if the specified NPC is in conversation with another NPC.
/// </summary>
/// <param name="npc">The NPC to check.</param>
/// <returns>True if the NPC is in conversation with another NPC; otherwise, false.</returns>
public bool CheckForNPCToNPCConversation(ConvaiNPC npc)
{
return npc.TryGetComponent(out ConvaiGroupNPCController convaiGroupNPC) && convaiGroupNPC.IsInConversationWithAnotherNPC;
}
private void UpdateActiveNPC(ConvaiNPC newActiveNPC)
{
// Check if the new active NPC is different from the current active NPC.
if (activeConvaiNPC != newActiveNPC)
{
// Deactivate the currently active NPC, if any.
if (activeConvaiNPC != null) activeConvaiNPC.isCharacterActive = false;
// Update the reference to the new active NPC.
activeConvaiNPC = newActiveNPC;
_lastHitNpc = newActiveNPC; // Ensure the _lastHitNpc reference is updated accordingly.
// Activate the new NPC, if any.
if (newActiveNPC != null)
{
newActiveNPC.isCharacterActive = true;
Debug.Log($"Active NPC changed to {newActiveNPC.gameObject.name}");
}
// Invoke the OnActiveNPCChanged event, notifying other parts of the system of the change.
OnActiveNPCChanged?.Invoke(newActiveNPC);
}
}
/// <summary>
/// Sets the active NPC to the specified NPC.
/// </summary>
/// <param name="newActiveNPC">The NPC to set as active.</param>
public void SetActiveConvaiNPC(ConvaiNPC newActiveNPC)
{
if (activeConvaiNPC != newActiveNPC)
{
if (activeConvaiNPC != null)
// Deactivate the previous NPC
activeConvaiNPC.isCharacterActive = false;
activeConvaiNPC = newActiveNPC;
_lastHitNpc = newActiveNPC;
if (newActiveNPC != null)
{
// Activate the new NPC
newActiveNPC.isCharacterActive = true;
Debug.Log($"Active NPC changed to {newActiveNPC.gameObject.name}");
}
OnActiveNPCChanged?.Invoke(newActiveNPC);
}
}
/// <summary>
/// Event that's triggered when the active NPC changes.
/// </summary>
public event Action<ConvaiNPC> OnActiveNPCChanged;
private ConvaiNPC GetConvaiNPC(GameObject obj)
{
if (!_convaiNPCCache.TryGetValue(obj, out ConvaiNPC npc))
{
npc = obj.GetComponent<ConvaiNPC>();
if (npc != null)
_convaiNPCCache[obj] = npc;
}
return npc;
}
/// <summary>
/// Gets the currently active ConvaiNPC.
/// </summary>
/// <returns>The currently active ConvaiNPC.</returns>
public ConvaiNPC GetActiveConvaiNPC()
{
return activeConvaiNPC;
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: c5777a8a41e942f5a05df965241ef598
timeCreated: 1696103142

View File

@ -0,0 +1,154 @@
using System;
using System.Collections;
using System.Linq;
using Convai.Scripts.Utils;
using TMPro;
using UnityEngine;
namespace Convai.Scripts
{
public class ConvaiPlayerInteractionManager : MonoBehaviour
{
private ConvaiChatUIHandler _convaiChatUIHandler;
private ConvaiCrosshairHandler _convaiCrosshairHandler;
private ConvaiNPC _convaiNPC;
private TMP_InputField _currentInputField;
private ConvaiInputManager _inputManager;
private bool _stopHandlingInput;
private void Start()
{
StartCoroutine(WatchForInputSubmission());
}
private void OnDestroy()
{
UnsubscribeFromInputFieldEvents();
StopHandlingInput();
}
public void Initialize(ConvaiNPC convaiNPC, ConvaiCrosshairHandler convaiCrosshairHandler, ConvaiChatUIHandler convaiChatUIHandler)
{
_convaiNPC = convaiNPC ? convaiNPC : throw new ArgumentNullException(nameof(convaiNPC));
_convaiCrosshairHandler = convaiCrosshairHandler ? convaiCrosshairHandler : throw new ArgumentNullException(nameof(convaiCrosshairHandler));
_convaiChatUIHandler = convaiChatUIHandler ? convaiChatUIHandler : throw new ArgumentNullException(nameof(convaiChatUIHandler));
_inputManager = ConvaiInputManager.Instance ? ConvaiInputManager.Instance : throw new InvalidOperationException("ConvaiInputManager instance not found.");
}
public void UpdateUserInput()
{
if (!UIUtilities.IsAnyInputFieldFocused()) HandleNPCInteraction();
if (!_convaiNPC.isCharacterActive) return;
HandleTextInput();
HandleVoiceInput();
}
private IEnumerator WatchForInputSubmission()
{
while (!_stopHandlingInput)
{
TMP_InputField inputFieldInScene = FindActiveInputField();
UpdateCurrentInputField(inputFieldInScene);
yield return null;
}
}
private void StopHandlingInput()
{
_stopHandlingInput = true;
}
private void UpdateCurrentInputField(TMP_InputField inputFieldInScene)
{
if (inputFieldInScene != null && _currentInputField != inputFieldInScene)
{
UnsubscribeFromInputFieldEvents();
_currentInputField = inputFieldInScene;
SubscribeToInputFieldEvents();
}
}
private void HandleInputSubmission(string input)
{
if (!_convaiNPC.isCharacterActive) return;
_convaiNPC.SendTextDataAsync(input);
_convaiChatUIHandler.SendPlayerText(input);
ClearInputField();
}
public TMP_InputField FindActiveInputField()
{
return _convaiChatUIHandler.GetCurrentUI().GetCanvasGroup().gameObject.GetComponentsInChildren<TMP_InputField>(true)
.FirstOrDefault(inputField => inputField.interactable);
}
private void ClearInputField()
{
if (_currentInputField != null)
{
_currentInputField.text = string.Empty;
_currentInputField.DeactivateInputField();
}
}
private void HandleTextInput()
{
if (_currentInputField != null && _currentInputField.isFocused)
{
if (_inputManager.WasTextSendKeyPressed())
HandleInputSubmission(_currentInputField.text);
else if (_inputManager.WasCursorLockKeyPressed())
ClearInputField();
}
}
private void HandleVoiceInput()
{
if (_inputManager.WasTalkKeyPressed() && !UIUtilities.IsAnyInputFieldFocused())
{
_convaiNPC.InterruptCharacterSpeech();
UpdateActionConfig();
_convaiNPC.StartListening();
}
else if (_inputManager.WasTalkKeyReleased() && !UIUtilities.IsAnyInputFieldFocused())
{
if (_convaiNPC.isCharacterActive && (_currentInputField == null || !_currentInputField.isFocused)) _convaiNPC.StopListening();
}
}
private void HandleNPCInteraction()
{
bool isNpcInConversation;
if (TryGetComponent(out ConvaiGroupNPCController convaiGroupNPC))
isNpcInConversation = convaiGroupNPC.IsInConversationWithAnotherNPC && ConvaiNPCManager.Instance.nearbyNPC == _convaiNPC;
else
isNpcInConversation = false;
if (isNpcInConversation && _inputManager.WasTalkKeyPressed())
{
NPC2NPCConversationManager.Instance.EndConversation(_convaiNPC.GetComponent<ConvaiGroupNPCController>());
_convaiNPC.InterruptCharacterSpeech();
_convaiNPC.StartListening();
}
}
public void UpdateActionConfig()
{
if (_convaiNPC.ActionConfig != null && _convaiCrosshairHandler != null)
_convaiNPC.ActionConfig.CurrentAttentionObject = _convaiCrosshairHandler.FindPlayerReferenceObject();
}
private void SubscribeToInputFieldEvents()
{
if (_currentInputField != null)
_currentInputField.onSubmit.AddListener(HandleInputSubmission);
}
private void UnsubscribeFromInputFieldEvents()
{
if (_currentInputField != null)
_currentInputField.onSubmit.RemoveListener(HandleInputSubmission);
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 2fc37bad9e2b4a5ab1f28e78cd40203a
timeCreated: 1705427710

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a9460cc9d8cacc34791f7a22e8b2eceb
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2df3c3aabcfefe74cb44efb454f919da
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f49deed203eacd243907160ac61a5a0b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,234 @@
// <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grpc/service.proto
// </auto-generated>
// Original file comments:
// service.proto
#pragma warning disable 0414, 1591
#region Designer generated code
using grpc = global::Grpc.Core;
namespace Service {
public static partial class ConvaiService
{
static readonly string __ServiceName = "service.ConvaiService";
static readonly grpc::Marshaller<global::Service.HelloRequest> __Marshaller_service_HelloRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.HelloRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.HelloResponse> __Marshaller_service_HelloResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.HelloResponse.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.STTRequest> __Marshaller_service_STTRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.STTRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.STTResponse> __Marshaller_service_STTResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.STTResponse.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.GetResponseRequest> __Marshaller_service_GetResponseRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.GetResponseRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.GetResponseResponse> __Marshaller_service_GetResponseResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.GetResponseResponse.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.GetResponseRequestSingle> __Marshaller_service_GetResponseRequestSingle = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.GetResponseRequestSingle.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.FeedbackRequest> __Marshaller_service_FeedbackRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.FeedbackRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.FeedbackResponse> __Marshaller_service_FeedbackResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.FeedbackResponse.Parser.ParseFrom);
static readonly grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse> __Method_Hello = new grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse>(
grpc::MethodType.Unary,
__ServiceName,
"Hello",
__Marshaller_service_HelloRequest,
__Marshaller_service_HelloResponse);
static readonly grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse> __Method_HelloStream = new grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse>(
grpc::MethodType.DuplexStreaming,
__ServiceName,
"HelloStream",
__Marshaller_service_HelloRequest,
__Marshaller_service_HelloResponse);
static readonly grpc::Method<global::Service.STTRequest, global::Service.STTResponse> __Method_SpeechToText = new grpc::Method<global::Service.STTRequest, global::Service.STTResponse>(
grpc::MethodType.DuplexStreaming,
__ServiceName,
"SpeechToText",
__Marshaller_service_STTRequest,
__Marshaller_service_STTResponse);
static readonly grpc::Method<global::Service.GetResponseRequest, global::Service.GetResponseResponse> __Method_GetResponse = new grpc::Method<global::Service.GetResponseRequest, global::Service.GetResponseResponse>(
grpc::MethodType.DuplexStreaming,
__ServiceName,
"GetResponse",
__Marshaller_service_GetResponseRequest,
__Marshaller_service_GetResponseResponse);
static readonly grpc::Method<global::Service.GetResponseRequestSingle, global::Service.GetResponseResponse> __Method_GetResponseSingle = new grpc::Method<global::Service.GetResponseRequestSingle, global::Service.GetResponseResponse>(
grpc::MethodType.ServerStreaming,
__ServiceName,
"GetResponseSingle",
__Marshaller_service_GetResponseRequestSingle,
__Marshaller_service_GetResponseResponse);
static readonly grpc::Method<global::Service.FeedbackRequest, global::Service.FeedbackResponse> __Method_SubmitFeedback = new grpc::Method<global::Service.FeedbackRequest, global::Service.FeedbackResponse>(
grpc::MethodType.Unary,
__ServiceName,
"SubmitFeedback",
__Marshaller_service_FeedbackRequest,
__Marshaller_service_FeedbackResponse);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Service.ServiceReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of ConvaiService</summary>
[grpc::BindServiceMethod(typeof(ConvaiService), "BindService")]
public abstract partial class ConvaiServiceBase
{
public virtual global::System.Threading.Tasks.Task<global::Service.HelloResponse> Hello(global::Service.HelloRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task HelloStream(grpc::IAsyncStreamReader<global::Service.HelloRequest> requestStream, grpc::IServerStreamWriter<global::Service.HelloResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task SpeechToText(grpc::IAsyncStreamReader<global::Service.STTRequest> requestStream, grpc::IServerStreamWriter<global::Service.STTResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task GetResponse(grpc::IAsyncStreamReader<global::Service.GetResponseRequest> requestStream, grpc::IServerStreamWriter<global::Service.GetResponseResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task GetResponseSingle(global::Service.GetResponseRequestSingle request, grpc::IServerStreamWriter<global::Service.GetResponseResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task<global::Service.FeedbackResponse> SubmitFeedback(global::Service.FeedbackRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for ConvaiService</summary>
public partial class ConvaiServiceClient : grpc::ClientBase<ConvaiServiceClient>
{
/// <summary>Creates a new client for ConvaiService</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public ConvaiServiceClient(grpc::ChannelBase channel) : base(channel)
{
}
/// <summary>Creates a new client for ConvaiService that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public ConvaiServiceClient(grpc::CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected ConvaiServiceClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected ConvaiServiceClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
public virtual global::Service.HelloResponse Hello(global::Service.HelloRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return Hello(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual global::Service.HelloResponse Hello(global::Service.HelloRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_Hello, null, options, request);
}
public virtual grpc::AsyncUnaryCall<global::Service.HelloResponse> HelloAsync(global::Service.HelloRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return HelloAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncUnaryCall<global::Service.HelloResponse> HelloAsync(global::Service.HelloRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_Hello, null, options, request);
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.HelloRequest, global::Service.HelloResponse> HelloStream(grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return HelloStream(new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.HelloRequest, global::Service.HelloResponse> HelloStream(grpc::CallOptions options)
{
return CallInvoker.AsyncDuplexStreamingCall(__Method_HelloStream, null, options);
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.STTRequest, global::Service.STTResponse> SpeechToText(grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return SpeechToText(new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.STTRequest, global::Service.STTResponse> SpeechToText(grpc::CallOptions options)
{
return CallInvoker.AsyncDuplexStreamingCall(__Method_SpeechToText, null, options);
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.GetResponseRequest, global::Service.GetResponseResponse> GetResponse(grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return GetResponse(new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.GetResponseRequest, global::Service.GetResponseResponse> GetResponse(grpc::CallOptions options)
{
return CallInvoker.AsyncDuplexStreamingCall(__Method_GetResponse, null, options);
}
public virtual grpc::AsyncServerStreamingCall<global::Service.GetResponseResponse> GetResponseSingle(global::Service.GetResponseRequestSingle request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return GetResponseSingle(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncServerStreamingCall<global::Service.GetResponseResponse> GetResponseSingle(global::Service.GetResponseRequestSingle request, grpc::CallOptions options)
{
return CallInvoker.AsyncServerStreamingCall(__Method_GetResponseSingle, null, options, request);
}
public virtual global::Service.FeedbackResponse SubmitFeedback(global::Service.FeedbackRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return SubmitFeedback(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual global::Service.FeedbackResponse SubmitFeedback(global::Service.FeedbackRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_SubmitFeedback, null, options, request);
}
public virtual grpc::AsyncUnaryCall<global::Service.FeedbackResponse> SubmitFeedbackAsync(global::Service.FeedbackRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return SubmitFeedbackAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncUnaryCall<global::Service.FeedbackResponse> SubmitFeedbackAsync(global::Service.FeedbackRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_SubmitFeedback, null, options, request);
}
/// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary>
protected override ConvaiServiceClient NewInstance(ClientBaseConfiguration configuration)
{
return new ConvaiServiceClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
/// <param name="serviceImpl">An object implementing the server-side handling logic.</param>
public static grpc::ServerServiceDefinition BindService(ConvaiServiceBase serviceImpl)
{
return grpc::ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_Hello, serviceImpl.Hello)
.AddMethod(__Method_HelloStream, serviceImpl.HelloStream)
.AddMethod(__Method_SpeechToText, serviceImpl.SpeechToText)
.AddMethod(__Method_GetResponse, serviceImpl.GetResponse)
.AddMethod(__Method_GetResponseSingle, serviceImpl.GetResponseSingle)
.AddMethod(__Method_SubmitFeedback, serviceImpl.SubmitFeedback).Build();
}
/// <summary>Register service method with a service binder with or without implementation. Useful when customizing the service binding logic.
/// Note: this method is part of an experimental API that can change or be removed without any prior notice.</summary>
/// <param name="serviceBinder">Service methods will be bound by calling <c>AddMethod</c> on this object.</param>
/// <param name="serviceImpl">An object implementing the server-side handling logic.</param>
public static void BindService(grpc::ServiceBinderBase serviceBinder, ConvaiServiceBase serviceImpl)
{
serviceBinder.AddMethod(__Method_Hello, serviceImpl == null ? null : new grpc::UnaryServerMethod<global::Service.HelloRequest, global::Service.HelloResponse>(serviceImpl.Hello));
serviceBinder.AddMethod(__Method_HelloStream, serviceImpl == null ? null : new grpc::DuplexStreamingServerMethod<global::Service.HelloRequest, global::Service.HelloResponse>(serviceImpl.HelloStream));
serviceBinder.AddMethod(__Method_SpeechToText, serviceImpl == null ? null : new grpc::DuplexStreamingServerMethod<global::Service.STTRequest, global::Service.STTResponse>(serviceImpl.SpeechToText));
serviceBinder.AddMethod(__Method_GetResponse, serviceImpl == null ? null : new grpc::DuplexStreamingServerMethod<global::Service.GetResponseRequest, global::Service.GetResponseResponse>(serviceImpl.GetResponse));
serviceBinder.AddMethod(__Method_GetResponseSingle, serviceImpl == null ? null : new grpc::ServerStreamingServerMethod<global::Service.GetResponseRequestSingle, global::Service.GetResponseResponse>(serviceImpl.GetResponseSingle));
serviceBinder.AddMethod(__Method_SubmitFeedback, serviceImpl == null ? null : new grpc::UnaryServerMethod<global::Service.FeedbackRequest, global::Service.FeedbackResponse>(serviceImpl.SubmitFeedback));
}
}
}
#endregion

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 04b2abb2eec68fa4d8d9f3f1c40b8ad9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 883d1528e5d0ee54ab5519d64712aeca
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
using UnityEngine;
public static class GameObjectExtension
{
public static T GetOrAddComponent<T>(this GameObject gameObject) where T : Component
{
return gameObject.TryGetComponent(out T t) ? t : gameObject.AddComponent<T>();
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 60b5cd27ba404cd42a0608590d0d6b96
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,11 @@
using UnityEngine;
using UnityEngine.UI;
public static class ImageExtensions
{
public static Image WithColorValue(this Image image, float? r = null, float? g = null, float? b = null, float? a = null)
{
image.color = new Color(r ?? image.color.r, g ?? image.color.g, b ?? image.color.b, a ?? image.color.a);
return image;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bde7d53e94e232744aa2b649f44847f5
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,12 @@
using UnityEngine;
namespace Convai.Scripts.Utils
{
public static class RectExtension
{
public static Rect With(this Rect rect, float? x = null, float? y = null, float? height = null, float? width = null)
{
return new Rect(x ?? rect.x, y ?? rect.y, width ?? rect.width, height ?? rect.height);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e7bf1bb407e9f614ba1cdbc8ff01bdb0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,9 @@
using UnityEngine;
public static class SkinnedMeshRendererExtension
{
public static void SetBlendShapeWeightInterpolate(this SkinnedMeshRenderer renderer, int index, float value, float weight)
{
renderer.SetBlendShapeWeight(index, Mathf.Lerp(renderer.GetBlendShapeWeight(index), value, weight));
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e3db3cb9745d4844fa122272c2f7b11d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: addfb9a10dbf1064d9ea03fa2a2cf452
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f7ee91cfe70e2af439a2ed9b7557855d
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,840 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Service;
using UnityEngine;
using UnityEngine.AI;
using UnityEngine.Serialization;
using Valve.VR.InteractionSystem;
namespace Convai.Scripts.Utils
{
// STEP 1: Add the enum for your custom action here.
public enum ActionChoice
{
None,
Jump,
Crouch,
MoveTo,
PickUp,
Drop,
Bring,
OpenWeb,
Throw
}
/// <summary>
/// DISCLAIMER: The action API is in experimental stages and can misbehave. Meanwhile, feel free to try it out and play
/// around with it.
/// </summary>
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Convai Actions Handler")]
[HelpURL(
"https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview/convaiactionshandler.cs")]
public class ConvaiActionsHandler : MonoBehaviour
{
public GameObject player;
public GameObject start;
[SerializeField] public ActionMethod[] actionMethods;
public List<string> actionResponseList = new();
private readonly List<ConvaiAction> _actionList = new();
public readonly ActionConfig ActionConfig = new();
private List<string> _actions = new();
private ConvaiNPC _currentNPC;
private ConvaiInteractablesData _interactablesData;
// Awake is called when the script instance is being loaded
private void Awake()
{
// Find the global action settings object in the scene
_interactablesData = FindObjectOfType<ConvaiInteractablesData>();
// Check if the global action settings object is missing
if (_interactablesData == null)
// Log an error message to indicate missing Convai Action Settings
Logger.Error("Convai Action Settings missing. Please create a game object that handles actions.",
Logger.LogCategory.Character);
// Check if this GameObject has a ConvaiNPC component attached
if (TryGetComponent(out ConvaiNPC npc))
// If it does, set the current NPC to this GameObject
_currentNPC = npc;
// Iterate through each action method and add its name to the action configuration
foreach (ActionMethod actionMethod in actionMethods) ActionConfig.Actions.Add(actionMethod.action);
if (_interactablesData != null)
{
// Iterate through each character in global action settings and add them to the action configuration
foreach (ConvaiInteractablesData.Character character in _interactablesData.Characters)
{
ActionConfig.Types.Character rpcCharacter = new()
{
Name = character.Name,
Bio = character.Bio
};
ActionConfig.Characters.Add(rpcCharacter);
}
// Iterate through each object in global action settings and add them to the action configuration
foreach (ConvaiInteractablesData.Object eachObject in _interactablesData.Objects)
{
ActionConfig.Types.Object rpcObject = new()
{
Name = eachObject.Name,
Description = eachObject.Description
};
ActionConfig.Objects.Add(rpcObject);
}
}
}
private void Reset()
{
actionMethods = new ActionMethod[]
{
new() { action = "Move To", actionChoice = ActionChoice.MoveTo },
new() { action = "Pick Up", actionChoice = ActionChoice.PickUp },
new() { action = "Dance", animationName = "Dance", actionChoice = ActionChoice.None },
new() { action = "Drop", actionChoice = ActionChoice.Drop },
new() { action = "Jump", actionChoice = ActionChoice.Jump },
new() { action = "Throw", animationName = "Throwing", actionChoice = ActionChoice.Throw },
new() { action = "Bring", actionChoice = ActionChoice.Bring }
};
}
// Start is called before the first frame update
private void Start()
{
// Set up the action configuration
#region Actions Setup
// Set the classification of the action configuration to "multistep"
ActionConfig.Classification = "multistep";
// Log the configured action information
Logger.DebugLog(ActionConfig, Logger.LogCategory.Actions);
#endregion
// Start playing the action list using a coroutine
StartCoroutine(PlayActionList());
Debug.Log("xxx");
}
private void Update()
{
if (actionResponseList.Count > 0)
{
ParseActions(actionResponseList[0]);
actionResponseList.RemoveAt(0);
}
}
public void ParseActions(string actionsString)
{
// Trim the input string to remove leading and trailing spaces
actionsString = actionsString.Trim();
Logger.DebugLog($"Parsing actions from: {actionsString}", Logger.LogCategory.Actions);
// Split the trimmed actions string into a list of individual actions
_actions = new List<string>(actionsString.Split(", "));
// Iterate through each action in the list of actions
foreach (List<string> actionWords in _actions.Select(t => new List<string>(t.Split(" "))))
// Iterate through the words in the current action
{
Logger.Info(
$"Processing action: {string.Join(" ", actionWords)}",
Logger.LogCategory.Actions); // Info: Checking each action being processed
for (int j = 0; j < actionWords.Count; j++)
{
// Separate the words into two parts: verb and object
string[] tempString1 = new string[j + 1];
string[] tempString2 = new string[actionWords.Count - j - 1];
Array.Copy(actionWords.ToArray(), tempString1, j + 1);
Array.Copy(actionWords.ToArray(), j + 1, tempString2,
0, actionWords.Count - j - 1);
// Check if any verb word ends with "s" and remove it
for (int k = 0; k < tempString1.Length; k++)
if (tempString1[k].EndsWith("s"))
tempString1[k] = tempString1[k].Remove(tempString1[k].Length - 1);
// Iterate through each defined Convai action
foreach (ActionMethod convaiAction in actionMethods)
// Check if the parsed verb matches any defined action
if (string.Equals(convaiAction.action, string.Join(" ", tempString1),
StringComparison.CurrentCultureIgnoreCase))
{
GameObject tempGameObject = null;
// Iterate through each object in global action settings to find a match
foreach (ConvaiInteractablesData.Object @object in _interactablesData.Objects)
if (string.Equals(@object.Name, string.Join(" ", tempString2),
StringComparison.CurrentCultureIgnoreCase))
{
Logger.DebugLog($"Active Target: {string.Join(" ", tempString2).ToLower()}",
Logger.LogCategory.Actions);
tempGameObject = @object.gameObject;
}
// Iterate through each character in global action settings to find a match
foreach (ConvaiInteractablesData.Character character in _interactablesData.Characters)
if (string.Equals(character.Name, string.Join(" ", tempString2),
StringComparison.CurrentCultureIgnoreCase))
{
Logger.DebugLog($"Active Target: {string.Join(" ", tempString2).ToLower()}",
Logger.LogCategory.Actions);
tempGameObject = character.gameObject;
}
if (tempGameObject != null)
Logger.DebugLog(
$"Found matching target: {tempGameObject.name} for action: {string.Join(" ", tempString1).ToLower()}",
Logger.LogCategory.Actions); // DebugLog: For successful matching
else
Logger.Warn(
$"No matching target found for action: {string.Join(" ", tempString1).ToLower()}",
Logger.LogCategory.Actions); // Warning: When expected matches aren't found
// Add the parsed action to the action list
_actionList.Add(new ConvaiAction(convaiAction.actionChoice, tempGameObject,
convaiAction.animationName));
break; // Break the loop as the action is found
}
}
}
}
/// <summary>
/// Event that is triggered when an action starts.
/// </summary>
/// <remarks>
/// This event can be subscribed to in order to perform custom logic when an action starts.
/// The event provides the name of the action and the GameObject that the action is targeting.
/// </remarks>
public event Action<string, GameObject> ActionStarted;
/// <summary>
/// Event that is triggered when an action ends.
/// </summary>
/// <remarks>
/// This event can be subscribed to in order to perform custom logic when an action ends.
/// The event provides the name of the action and the GameObject that the action was targeting.
/// </remarks>
public event Action<string, GameObject> ActionEnded;
/// <summary>
/// This coroutine handles playing the actions in the action list.
/// </summary>
/// <returns></returns>
private IEnumerator PlayActionList()
{
while (true)
// Check if there are actions in the action list
if (_actionList.Count > 0)
{
// Call the DoAction function for the first action in the list and wait until it's done
yield return DoAction(_actionList[0]);
// Remove the completed action from the list
_actionList.RemoveAt(0);
}
else
{
// If there are no actions in the list, yield to wait for the next frame
yield return null;
}
}
private IEnumerator DoAction(ConvaiAction action)
{
// STEP 2: Add the function call for your action here corresponding to your enum.
// Remember to yield until its return if it is a Enumerator function.
// Use a switch statement to handle different action choices based on the ActionChoice enum
switch (action.Verb)
{
case ActionChoice.MoveTo:
// Call the MoveTo function and yield until it's completed
yield return MoveTo(action.Target);
break;
case ActionChoice.PickUp:
// Call the PickUp function and yield until it's completed
yield return PickUp(action.Target);
break;
case ActionChoice.Drop:
// Call the Drop function
yield return Drop(action.Target);
break;
case ActionChoice.Jump:
// Call the Jump function
Jump();
break;
case ActionChoice.Crouch:
// Call the Crouch function and yield until it's completed
yield return Crouch();
break;
case ActionChoice.None:
// Call the AnimationActions function and yield until it's completed
yield return AnimationActions(action.Animation);
break;
case ActionChoice.OpenWeb:
OpenWebsite();
break;
case ActionChoice.Throw:
yield return Throw(action.Target);
break;
case ActionChoice.Bring:
yield return Bring(action.Target);
break;
}
// Yield once to ensure the coroutine advances to the next frame
yield return null;
}
private void OpenWebsite()
{
// Specify the URL you want to open
string url = "https://www.convai.com";
// Use Application.OpenURL to open the web page
Application.OpenURL(url);
}
/// <summary>
/// This method is a coroutine that handles playing an animation for Convai NPC.
/// The method takes in the name of the animation to be played as a string parameter.
/// </summary>
/// <param name="animationName"> The name of the animation to be played. </param>
/// <returns> A coroutine that plays the animation. </returns>
private IEnumerator AnimationActions(string animationName)
{
// Logging the action of initiating the animation with the provided animation name.
Logger.DebugLog("Doing animation: " + animationName, Logger.LogCategory.Actions);
// Attempting to get the Animator component attached to the current NPC object.
// The Animator component is responsible for controlling animations on the GameObject.
Animator animator = _currentNPC.GetComponent<Animator>();
// Converting the provided animation name to its corresponding hash code.
// This is a more efficient way to refer to animations and Animator states.
int animationHash = Animator.StringToHash(animationName);
// Check if the Animator component has a state with the provided hash code.
// This is a safety check to prevent runtime errors if the animation is not found.
if (!animator.HasState(0, animationHash))
{
// Logging a message to indicate that the animation was not found.
Logger.DebugLog("Could not find an animator state named: " + animationName, Logger.LogCategory.Actions);
// Exiting the coroutine early since the animation is not available.
yield break;
}
// Playing the animation with a cross-fade transition.
// The second parameter '0.1f' specifies the duration of the cross-fade.
animator.CrossFadeInFixedTime(animationHash, 0.1f);
// Waiting for a short duration (just over the cross-fade time) to allow the animation transition to start.
// This ensures that subsequent code runs after the animation has started playing.
yield return new WaitForSeconds(0.11f);
// Getting information about the current animation clip that is playing.
AnimatorClipInfo[] clipInfo = animator.GetCurrentAnimatorClipInfo(0);
// Checking if there is no animation clip information available.
if (clipInfo == null || clipInfo.Length == 0)
{
// Logging a message to indicate that there are no animation clips associated with the state.
Logger.DebugLog("Animator state named: " + animationName + " has no associated animation clips",
Logger.LogCategory.Actions);
// Exiting the coroutine as there is no animation to play.
yield break;
}
// Defining variables to store the length and name of the animation clip.
float length = 0;
string animationClipName = "";
// Iterating through the array of animation clips to find the one that is currently playing.
foreach (AnimatorClipInfo clipInf in clipInfo)
{
// Logging the name of the animation clip for debugging purposes.
Logger.DebugLog("Clip name: " + clipInf.clip.name, Logger.LogCategory.Actions);
// Storing the current animation clip in a local variable for easier access.
AnimationClip clip = clipInf.clip;
// Checking if the animation clip is valid.
if (clip != null)
{
// Storing the length and name of the animation clip.
length = clip.length;
animationClipName = clip.name;
// Exiting the loop as we've found the information we need.
break;
}
}
// Checking if a valid animation clip was found.
if (length > 0.0f)
{
// Logging a message indicating that the animation is now playing.
Logger.DebugLog(
"Playing the animation " + animationClipName + " from the Animator State " + animationName +
" for " + length + " seconds", Logger.LogCategory.Actions);
// Waiting for the duration of the animation to allow it to play out.
yield return new WaitForSeconds(length);
}
else
{
// Logging a message to indicate that no valid animation clips were found or their length was zero.
Logger.DebugLog(
"Animator state named: " + animationName +
" has no valid animation clips or they have a length of 0", Logger.LogCategory.Actions);
// Exiting the coroutine early.
yield break;
}
// Transitioning back to the idle animation.
// It is assumed that an "Idle" animation exists and is set up in your Animator Controller.
animator.CrossFadeInFixedTime(Animator.StringToHash("Idle"), 0.1f);
// Yielding to wait for one frame to ensure that the coroutine progresses to the next frame.
// This is often done at the end of a coroutine to prevent issues with Unity's execution order.
yield return null;
}
/// <summary>
/// Registers the provided methods to the ActionStarted and ActionEnded events.
/// This allows external code to subscribe to these events and react when they are triggered.
/// </summary>
/// <param name="onActionStarted">
/// The method to be called when an action starts. It should accept a string (the action
/// name) and a GameObject (the target of the action).
/// </param>
/// <param name="onActionEnded">
/// The method to be called when an action ends. It should accept a string (the action name)
/// and a GameObject (the target of the action).
/// </param>
public void RegisterForActionEvents(Action<string, GameObject> onActionStarted,
Action<string, GameObject> onActionEnded)
{
ActionStarted += onActionStarted;
ActionEnded += onActionEnded;
}
/// <summary>
/// Unregisters the provided methods from the ActionStarted and ActionEnded events.
/// This allows external code to unsubscribe from these events when they are no longer interested in them.
/// </summary>
/// <param name="onActionStarted">
/// The method to be removed from the ActionStarted event. It should be the same method that
/// was previously registered.
/// </param>
/// <param name="onActionEnded">
/// The method to be removed from the ActionEnded event. It should be the same method that was
/// previously registered.
/// </param>
public void UnregisterForActionEvents(Action<string, GameObject> onActionStarted,
Action<string, GameObject> onActionEnded)
{
ActionStarted -= onActionStarted;
ActionEnded -= onActionEnded;
}
[Serializable]
public class ActionMethod
{
[FormerlySerializedAs("Action")]
[SerializeField]
public string action;
// feels unnecessary
// [SerializeField] public ActionType actionType;
[SerializeField] public string animationName;
[SerializeField] public ActionChoice actionChoice;
}
private class ConvaiAction
{
public ConvaiAction(ActionChoice verb, GameObject target, string animation)
{
Verb = verb;
Target = target;
Animation = animation;
}
#region 04. Public variables
public readonly string Animation;
public readonly GameObject Target;
public readonly ActionChoice Verb;
#endregion
}
// STEP 3: Add the function for your action here.
#region Action Implementation Methods
private IEnumerator Throw(GameObject target)
{
StartCoroutine(MoveTo(target));
yield return new WaitForSeconds(2f);
_currentNPC.transform.LookAt(player.transform.position);
_currentNPC.GetComponent<Animator>().CrossFade(Animator.StringToHash(name: "Throwing"), normalizedTransitionDuration: 0.05f);
yield return new WaitForSeconds(1.5f);
target.transform.parent = null;
target.SetActive(true);
target.transform.position += new Vector3(x: 0.5f, y: 1.0f, z: 0.5f);
target.transform.rotation = Quaternion.identity;
Vector3 throwDirection = (player.transform.position - target.transform.position).normalized;
target.GetComponent<Rigidbody>().AddForce(throwDirection * 10f, ForceMode.VelocityChange);
yield return new WaitForSeconds(1.5f);
_currentNPC.GetComponent<Animator>().CrossFade(Animator.StringToHash(name: "Idle"), normalizedTransitionDuration: 0.05f);
yield return new WaitForSeconds(2.0f);
}
private IEnumerator Bring(GameObject target)
{
// Notify that the action has started
ActionStarted?.Invoke("Bring", target);
// Move to the target and wait for it to finish
yield return StartCoroutine(MoveTo(target));
Debug.Log("xxx");
// Wait for 2 seconds after reaching the target
// Pick up the target
yield return StartCoroutine(PickUp(target));
// Wait for 2 seconds after reaching the target
//yield return new WaitForSeconds(1f);
// Move back to the start and wait for it to finish
yield return StartCoroutine(MoveTo(start));
// Wait for 4 seconds after reaching the start
// Drop the target
yield return StartCoroutine(Drop(target));
// Notify that the action has ended
ActionEnded?.Invoke("Bring", target);
}
private IEnumerator Crouch()
{
ActionStarted?.Invoke("Crouch", _currentNPC.gameObject);
Logger.DebugLog("Crouching!", Logger.LogCategory.Actions);
Animator animator = _currentNPC.GetComponent<Animator>();
animator.CrossFadeInFixedTime(Animator.StringToHash("Crouch"), 0.1f);
// Wait for the next frame to ensure the Animator has transitioned to the new state.
yield return new WaitForSeconds(0.11f);
AnimatorClipInfo[] clipInfo = animator.GetCurrentAnimatorClipInfo(0);
if (clipInfo == null || clipInfo.Length == 0)
{
Logger.DebugLog("No animation clips found for crouch state!", Logger.LogCategory.Actions);
yield break;
}
float length = clipInfo[0].clip.length;
_currentNPC.GetComponents<CapsuleCollider>()[0].height = 1.2f;
_currentNPC.GetComponents<CapsuleCollider>()[0].center = new Vector3(0, 0.6f, 0);
if (_currentNPC.GetComponents<CapsuleCollider>().Length > 1)
{
_currentNPC.GetComponents<CapsuleCollider>()[1].height = 1.2f;
_currentNPC.GetComponents<CapsuleCollider>()[1].center = new Vector3(0, 0.6f, 0);
}
yield return new WaitForSeconds(length);
animator.CrossFadeInFixedTime(Animator.StringToHash("Idle"), 0.1f);
yield return null;
ActionEnded?.Invoke("Crouch", _currentNPC.gameObject);
}
/// <summary>
/// Coroutine to move the NPC towards a specified target using NavMeshAgent.
/// </summary>
/// <param name="target">The target GameObject to move towards.</param>
private IEnumerator MoveTo(GameObject target)
{
// Invoke the ActionStarted event with the "MoveTo" action and the target GameObject.
ActionStarted?.Invoke("MoveTo", target);
// Check if the target is null or inactive, and log an error if so.
if (target == null || !target.activeInHierarchy)
{
target = start;
Logger.DebugLog("MoveTo target is null or inactive.", Logger.LogCategory.Actions);
}
// Log that the NPC is starting the movement towards the target.
Logger.DebugLog($"Moving to Target: {target.name}", Logger.LogCategory.Actions);
// Start the "Walking" animation.
Animator animator = _currentNPC.GetComponent<Animator>();
// Crossfade to the "Walking" animation state with a slight transition delay for smoother visual effect.
animator.CrossFade(Animator.StringToHash("Walking"), 0.01f);
// Disable root motion to prevent conflicts between AI navigation and the automatic application of root motion.
// This helps avoid unexpected behaviors and ensures smoother control during AI navigation.
animator.applyRootMotion = false;
// Get the NavMeshAgent component from the NPC.
NavMeshAgent navMeshAgent = _currentNPC.GetComponent<NavMeshAgent>();
navMeshAgent.updateRotation = false;
// Calculate the target destination, considering the target's forward direction or default offset.
Vector3 targetDestination = target.transform.position;
// Check if the target has a Renderer component.
if (target.TryGetComponent(out Renderer renderer))
{
// If yes, calculate the offset based on the size of the renderer along the z-axis.
float zOffset = renderer.bounds.size.z;
targetDestination += zOffset * target.transform.forward;
}
else
{
// If no Renderer component is found, use a default offset of half the target's forward direction.
// This is useful when the target object does not have a visual representation (Renderer).
Vector3 standardOffset = 0.5f * target.transform.forward;
targetDestination += standardOffset;
}
// Set the destination for the NavMeshAgent and yield for one frame.
navMeshAgent.SetDestination(targetDestination);
yield return null;
// Set the rotation speed for the character's rotation towards the movement direction.
float rotationSpeed = 5;
// Loop until the character is within the stopping distance to the target.
while (navMeshAgent.remainingDistance > navMeshAgent.stoppingDistance)
{
// Ensure the target is still active during the movement.
if (!target.activeInHierarchy)
{
// Log and break if the target has been deactivated during the movement.
Logger.DebugLog("Target deactivated during movement.", Logger.LogCategory.Actions);
yield break;
}
// Check if the NPC is not moving, and yield until the next frame.
if (navMeshAgent.velocity.sqrMagnitude < Mathf.Epsilon) yield return null;
// Calculate the rotation towards the movement direction.
Quaternion rotation = Quaternion.LookRotation(navMeshAgent.velocity.normalized);
rotation.x = 0;
rotation.z = 0;
// Smoothly rotate the NPC towards the calculated rotation.
transform.rotation = Quaternion.Slerp(transform.rotation, rotation, rotationSpeed * Time.deltaTime);
// Yield until the next frame.
yield return null;
}
// Transition to the "Idle" animation once the NPC has reached the target.
animator.CrossFade(Animator.StringToHash("Idle"), 0.1f);
// If this is the only action in the queue, rotate the NPC to face the camera.
if (_actions.Count == 1)
{
Vector3 direction = (Camera.main.transform.position - transform.position).normalized;
Quaternion targetRotation = Quaternion.LookRotation(direction);
float elapsedTime = 0f;
float rotationTime = 2f;
// Smoothly rotate the NPC towards the camera direction.
while (elapsedTime < rotationTime)
{
targetRotation.x = 0;
targetRotation.z = 0;
transform.rotation =
Quaternion.Slerp(transform.rotation, targetRotation, elapsedTime / rotationTime);
elapsedTime += Time.deltaTime;
yield return null;
}
}
// Re-enabling root motion, which was disabled during AI navigation movement to avoid conflicts.
animator.applyRootMotion = true;
// Invoke the ActionEnded event with the "MoveTo" action and the target GameObject.
ActionEnded?.Invoke("MoveTo", target);
}
/// <summary>
/// Coroutine to pick up a target GameObject, adjusting the NPC's rotation and playing animations.
/// </summary>
/// <param name="target">The target GameObject to pick up.</param>
private IEnumerator PickUp(GameObject target)
{
// Invoke the ActionStarted event with the "PickUp" action and the target GameObject.
ActionStarted?.Invoke("PickUp", target);
// Check if the target GameObject is null. If it is, log an error and exit the coroutine.
if (target == null)
{
Logger.DebugLog("Target is null! Exiting PickUp coroutine.", Logger.LogCategory.Actions);
yield break;
}
// Check if the target GameObject is active. If not, log an error and exit the coroutine.
if (!target.activeInHierarchy)
{
Logger.DebugLog($"Target: {target.name} is inactive! Exiting PickUp coroutine.",
Logger.LogCategory.Actions);
yield break;
}
StartCoroutine(
MoveTo(target));
yield return new WaitForSeconds(1);
// Calculate the direction from the NPC to the target, ignoring the vertical (y) component.
Vector3 direction = (target.transform.position - transform.position).normalized;
direction.y = 0;
// Calculate the target rotation to face the target direction.
Quaternion targetRotation = Quaternion.LookRotation(direction);
float elapsedTime = 0f;
float rotationTime = 0.5f;
// Smoothly rotate the NPC towards the target direction over a specified time.
while (elapsedTime < rotationTime)
{
targetRotation.x = 0;
targetRotation.z = 0;
transform.rotation = Quaternion.Slerp(transform.rotation, targetRotation, elapsedTime / rotationTime);
elapsedTime += Time.deltaTime;
yield return null;
}
// Log the action of picking up the target along with its name.
Logger.DebugLog($"Picking up Target: {target.name}", Logger.LogCategory.Actions);
// Retrieve the Animator component from the current NPC.
Animator animator = _currentNPC.GetComponent<Animator>();
// Start the "Picking Up" animation with a cross-fade transition.
animator.CrossFade(Animator.StringToHash("Picking Up"), 0.1f);
// Wait for one second to ensure that the Animator has had time to transition to the "Picking Up" animation state.
yield return new WaitForSeconds(1);
// Define the time it takes for the hand to reach the object in the "Picking Up" animation.
// This is a specific point in time during the animation that we are interested in.
float timeToReachObject = 0.5f;
// Wait for the time it takes for the hand to reach the object.
yield return new WaitForSeconds(timeToReachObject);
// Check again if the target is still active before attempting to pick it up.
if (!target.activeInHierarchy)
{
Logger.DebugLog(
$"Target: {target.name} became inactive during the pick up animation! Exiting PickUp coroutine.",
Logger.LogCategory.Actions);
yield break;
}
// Once the hand has reached the object, set the target's parent to the NPC's transform,
// effectively "picking up" the object, and then deactivate the object.
target.transform.parent = gameObject.transform;
target.SetActive(false);
// Transition back to the "Idle" animation.
animator.CrossFade(Animator.StringToHash("Idle"), 0.1f);
// Invoke the ActionEnded event with the "PickUp" action and the target GameObject.
ActionEnded?.Invoke("PickUp", target);
}
private IEnumerator Drop(GameObject target)
{
Debug.Log("Entering Drop Coroutine");
// Notify that the drop action has started
ActionStarted?.Invoke("Drop", target);
if (target == null)
{
Debug.Log("Target is null");
yield break; // Exit the coroutine if target is null
}
// Log the drop action
Logger.DebugLog($"Dropping Target: {target.name}", Logger.LogCategory.Actions);
// Detach the target and enable it
target.transform.parent = null;
target.SetActive(true);
// Wait for 0.5 seconds before proceeding (if needed)
yield return new WaitForSeconds(0.5f);
// Notify that the drop action has ended
ActionEnded?.Invoke("Drop", target);
}
private void Jump()
{
ActionStarted?.Invoke("Jump", _currentNPC.gameObject);
float jumpForce = 5f;
GetComponent<Rigidbody>().AddForce(new Vector3(0f, jumpForce, 0f), ForceMode.Impulse);
_currentNPC.GetComponent<Animator>().CrossFade(Animator.StringToHash("Dance"), 1);
ActionEnded?.Invoke("Jump", _currentNPC.gameObject);
}
// STEP 3: Add the function for your action here.
#endregion
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 744cc0ebc534aa44b8b0871ca72570b9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,40 @@
using System;
using UnityEngine;
namespace Convai.Scripts.Utils
{
/// <summary>
/// This script defines global actions and settings for Convai.
/// </summary>
[AddComponentMenu("Convai/Convai Interactables Data")]
[HelpURL("https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview")]
public class ConvaiInteractablesData : MonoBehaviour
{
[Tooltip("Array of Characters in the environment")]
[SerializeField] public Character[] Characters;
[Tooltip("Array of Objects in the environment")]
[SerializeField] public Object[] Objects;
public Transform DynamicMoveTargetIndicator;
/// <summary>
/// Represents a character in the environment.
/// </summary>
[Serializable]
public class Character
{
[SerializeField] public string Name;
[SerializeField] public string Bio;
[SerializeField] public GameObject gameObject;
}
[Serializable]
public class Object
{
[SerializeField] public string Name;
[SerializeField] public string Description;
[SerializeField] public GameObject gameObject;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b3b04a8e56d4a394585355d57021ba7a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 79e17fbbffadd7f47b7b345e0be9a750
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,203 @@
using System;
using System.Text.RegularExpressions;
using Convai.Scripts.Utils.LipSync.Types;
using Service;
using UnityEngine;
namespace Convai.Scripts.Utils.LipSync
{
public class ConvaiLipSync : MonoBehaviour
{
public enum LipSyncBlendshapeType
{
None, // Default Value
OVR, // Oculus
ReallusionPlus, // Reallusion Extended
ARKit, // AR Kit - Translated from Oculus
}
[Tooltip(
"The type of facial blend-shapes in the character. Select OVR for Oculus and ReallusionPlus for Reallusion Extended visemes.")]
public LipSyncBlendshapeType BlendshapeType = LipSyncBlendshapeType.OVR;
[Tooltip("Skinned Mesh Renderer Component for the head of the character.")]
public SkinnedMeshRenderer HeadSkinnedMeshRenderer;
[Tooltip("Skinned Mesh Renderer Component for the teeth of the character, if available. Leave empty if not.")]
public SkinnedMeshRenderer TeethSkinnedMeshRenderer;
[Tooltip("Skinned Mesh Renderer Component for the tongue of the character, if available. Leave empty if not.")]
public SkinnedMeshRenderer TongueSkinnedMeshRenderer;
[Tooltip("Game object with the bone of the jaw for the character, if available. Leave empty if not.")]
public GameObject jawBone;
[Tooltip("Game object with the bone of the tongue for the character, if available. Leave empty if not.")]
public GameObject tongueBone; // even though actually tongue doesn't have a bone
[HideInInspector]
public FaceModel faceModel = FaceModel.OvrModelName;
[Tooltip("The index of the first blendshape that will be manipulated.")]
public int firstIndex;
[Tooltip("This will multiply the weights of the incoming frames to the lipsync")]
[field: SerializeField] public float WeightMultiplier { get; private set; } = 1f;
private ConvaiNPC _convaiNPC;
public event Action<bool> OnCharacterLipSyncing;
private ConvaiLipSyncApplicationBase convaiLipSyncApplicationBase;
public ConvaiLipSyncApplicationBase ConvaiLipSyncApplicationBase { get => convaiLipSyncApplicationBase; private set => convaiLipSyncApplicationBase = value; }
private void Awake()
{
switch (BlendshapeType)
{
case LipSyncBlendshapeType.None:
break;
case LipSyncBlendshapeType.OVR:
ConvaiLipSyncApplicationBase = gameObject.GetOrAddComponent<ConvaiOVRLipsync>();
break;
case LipSyncBlendshapeType.ReallusionPlus:
ConvaiLipSyncApplicationBase = gameObject.GetOrAddComponent<ConvaiReallusionLipSync>();
break;
case LipSyncBlendshapeType.ARKit:
ConvaiLipSyncApplicationBase = gameObject.GetOrAddComponent<ConvaiARKitLipSync>();
break;
}
}
/// <summary>
/// This function will automatically set any of the unassigned skinned mesh renderers
/// to appropriate values using regex based functions.
/// It also invokes the LipSyncCharacter() function every one hundredth of a second.
/// </summary>
private void Start()
{
// regex search for SkinnedMeshRenderers: head, teeth, tongue
if (HeadSkinnedMeshRenderer == null)
HeadSkinnedMeshRenderer = GetHeadSkinnedMeshRendererWithRegex(transform);
if (TeethSkinnedMeshRenderer == null)
TeethSkinnedMeshRenderer = GetTeethSkinnedMeshRendererWithRegex(transform);
if (TongueSkinnedMeshRenderer == null)
TongueSkinnedMeshRenderer = GetTongueSkinnedMeshRendererWithRegex(transform);
_convaiNPC = GetComponent<ConvaiNPC>();
ConvaiLipSyncApplicationBase.Initialize(this, _convaiNPC);
SetCharacterLipSyncing(true);
}
/// <summary>
/// Fires an event with update the Character Lip Syncing State
/// </summary>
/// <param name="value"></param>
private void SetCharacterLipSyncing(bool value)
{
OnCharacterLipSyncing?.Invoke(value);
}
private void OnApplicationQuit()
{
StopLipSync();
}
/// <summary>
/// This function finds the Head skinned mesh renderer components, if present,
/// in the children of the parentTransform using regex.
/// </summary>
/// <param name="parentTransform">The parent transform whose children are searched.</param>
/// <returns>The SkinnedMeshRenderer component of the Head, if found; otherwise, null.</returns>
private SkinnedMeshRenderer GetHeadSkinnedMeshRendererWithRegex(Transform parentTransform)
{
// Initialize a variable to store the found SkinnedMeshRenderer.
SkinnedMeshRenderer findFaceSkinnedMeshRenderer = null;
// Define a regular expression pattern for matching child object names.
Regex regexPattern = new("(.*_Head|CC_Base_Body)");
// Iterate through each child of the parentTransform.
foreach (Transform child in parentTransform)
// Check if the child's name matches the regex pattern.
if (regexPattern.IsMatch(child.name))
{
// If a match is found, get the SkinnedMeshRenderer component of the child.
findFaceSkinnedMeshRenderer = child.GetComponent<SkinnedMeshRenderer>();
// If a SkinnedMeshRenderer is found, break out of the loop.
if (findFaceSkinnedMeshRenderer != null) break;
}
// Return the found SkinnedMeshRenderer (or null if none is found).
return findFaceSkinnedMeshRenderer;
}
/// <summary>
/// This function finds the Teeth skinned mesh renderer components, if present,
/// in the children of the parentTransform using regex.
/// </summary>
/// <param name="parentTransform">The parent transform whose children are searched.</param>
/// <returns>The SkinnedMeshRenderer component of the Teeth, if found; otherwise, null.</returns>
private SkinnedMeshRenderer GetTeethSkinnedMeshRendererWithRegex(Transform parentTransform)
{
// Initialize a variable to store the found SkinnedMeshRenderer for teeth.
SkinnedMeshRenderer findTeethSkinnedMeshRenderer = null;
// Define a regular expression pattern for matching child object names.
Regex regexPattern = new("(.*_Teeth|CC_Base_Body)");
// Iterate through each child of the parentTransform.
foreach (Transform child in parentTransform)
// Check if the child's name matches the regex pattern.
if (regexPattern.IsMatch(child.name))
{
// If a match is found, get the SkinnedMeshRenderer component of the child.
findTeethSkinnedMeshRenderer = child.GetComponent<SkinnedMeshRenderer>();
// If a SkinnedMeshRenderer is found, break out of the loop.
if (findTeethSkinnedMeshRenderer != null) break;
}
// Return the found SkinnedMeshRenderer for teeth (or null if none is found).
return findTeethSkinnedMeshRenderer;
}
/// <summary>
/// This function finds the Tongue skinned mesh renderer components, if present,
/// in the children of the parentTransform using regex.
/// </summary>
/// <param name="parentTransform">The parent transform whose children are searched.</param>
/// <returns>The SkinnedMeshRenderer component of the Tongue, if found; otherwise, null.</returns>
private SkinnedMeshRenderer GetTongueSkinnedMeshRendererWithRegex(Transform parentTransform)
{
// Initialize a variable to store the found SkinnedMeshRenderer for the tongue.
SkinnedMeshRenderer findTongueSkinnedMeshRenderer = null;
// Define a regular expression pattern for matching child object names.
Regex regexPattern = new("(.*_Tongue|CC_Base_Body)");
// Iterate through each child of the parentTransform.
foreach (Transform child in parentTransform)
// Check if the child's name matches the regex pattern.
if (regexPattern.IsMatch(child.name))
{
// If a match is found, get the SkinnedMeshRenderer component of the child.
findTongueSkinnedMeshRenderer = child.GetComponent<SkinnedMeshRenderer>();
// If a SkinnedMeshRenderer is found, break out of the loop.
if (findTongueSkinnedMeshRenderer != null) break;
}
// Return the found SkinnedMeshRenderer for the tongue (or null if none is found).
return findTongueSkinnedMeshRenderer;
}
/// <summary>
/// Purges the latest chuck of lipsync frames
/// </summary>
public void PurgeExcessFrames()
{
ConvaiLipSyncApplicationBase?.PurgeExcessBlendShapeFrames();
}
/// <summary>
/// Stops the Lipsync by clearing the frames queue
/// </summary>
public void StopLipSync()
{
ConvaiLipSyncApplicationBase?.ClearQueue();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 88bce56f6985ef84f8835a0152628fa1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,159 @@
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using Service;
using UnityEngine;
namespace Convai.Scripts.Utils.LipSync
{
/// <summary>
/// This Class will serve as a base for any method of Lipsync that Convai will develop or use
/// </summary>
public abstract class ConvaiLipSyncApplicationBase : MonoBehaviour
{
/// <summary>
/// This stores a dictionary of blendshape name and index of the Blendweight it will affect
/// </summary>
protected Dictionary<string, int> _headMapping;
/// <summary>
/// Reference to the Head Skin Mesh Renderer used for lipsync
/// </summary>
protected SkinnedMeshRenderer _headSkinMeshRenderer;
/// <summary>
/// Reference to the Teeth Skin Mesh Renderer used for lipsync
/// </summary>
protected SkinnedMeshRenderer _teethSkinMeshRenderer;
/// <summary>
/// Reference to the Jaw bone gameobject used for lipsync
/// </summary>
private GameObject _jawBone;
/// <summary>
/// Reference to the Tongue bone gameobject used for lipsync
/// </summary>
private GameObject _tongueBone;
/// <summary>
/// Reference to the NPC on which lipsync will be applied
/// </summary>
protected ConvaiNPC _convaiNPC;
protected float _weightMultiplier { get; private set; }
#region Null States of References
protected bool HasHeadSkinnedMeshRenderer { get; private set; }
protected bool HasTeethSkinnedMeshRenderer { get; private set; }
protected bool HasJawBone { get; private set; }
protected bool HasTongueBone { get; private set; }
#endregion
/// <summary>
/// Initializes and setup up of the things necessary for lipsync to work
/// </summary>
/// <param name="convaiLipSync"></param>
/// <param name="convaiNPC"></param>
public virtual void Initialize(ConvaiLipSync convaiLipSync, ConvaiNPC convaiNPC)
{
_headSkinMeshRenderer = convaiLipSync.HeadSkinnedMeshRenderer;
HasHeadSkinnedMeshRenderer = _headSkinMeshRenderer != null;
_teethSkinMeshRenderer = convaiLipSync.TeethSkinnedMeshRenderer;
HasTeethSkinnedMeshRenderer = _teethSkinMeshRenderer != null;
_jawBone = convaiLipSync.jawBone;
HasJawBone = _jawBone != null;
_tongueBone = convaiLipSync.tongueBone;
HasTongueBone = _tongueBone != null;
_convaiNPC = convaiNPC;
_weightMultiplier = convaiLipSync != null ? convaiLipSync.WeightMultiplier : 1;
if (HasHeadSkinnedMeshRenderer)
_headMapping = SetupMapping(GetHeadRegexMapping, _headSkinMeshRenderer);
}
/// <summary>
/// Creates the mapping of blendshape and index it affects during lipsync
/// </summary>
protected Dictionary<string, int> SetupMapping(Func<Dictionary<string, string>> finder, SkinnedMeshRenderer skinnedMeshRenderer)
{
Dictionary<string, int> mapping = new Dictionary<string, int>();
Dictionary<string, string> regexMapping = finder();
foreach (KeyValuePair<string, string> pair in regexMapping)
{
for (int i = 0; i < skinnedMeshRenderer.sharedMesh.blendShapeCount; i++)
{
string blendShapeName = skinnedMeshRenderer.sharedMesh.GetBlendShapeName(i);
Regex regex = new(pair.Value);
if (regex.IsMatch(blendShapeName))
{
mapping.TryAdd(pair.Key, i);
}
}
}
return mapping;
}
/// <summary>
/// Returns a dictionary of blendshape name and regex string used to find the index
/// TODO Modify the override to fit your version of the mapping
/// </summary>
/// <returns></returns>
protected virtual Dictionary<string, string> GetHeadRegexMapping()
{
return new Dictionary<string, string>();
}
/// <summary>
/// Updates the tongue bone rotation to the new rotation
/// </summary>
/// <param name="newRotation"></param>
protected void UpdateTongueBoneRotation(Vector3 newRotation)
{
if (!HasTongueBone) return;
_tongueBone.transform.localEulerAngles = newRotation;
}
/// <summary>
/// Updates the jaw bone rotation to the new rotation
/// </summary>
/// <param name="newRotation"></param>
protected void UpdateJawBoneRotation(Vector3 newRotation)
{
if (!HasJawBone) return;
_jawBone.transform.localEulerAngles = newRotation;
}
/// <summary>
/// Updates the current blendshape or visemes frame
/// </summary>
protected abstract void UpdateBlendShape();
/// <summary>
/// This removes the excess frames in the queue
/// </summary>
public abstract void PurgeExcessBlendShapeFrames();
/// <summary>
/// This resets the whole queue of the frames
/// </summary>
protected bool CanPurge<T>(Queue<T> queue)
{
// ? Should I hardcode the limiter for this check
return queue.Count < 10;
}
public abstract void ClearQueue();
/// <summary>
/// Adds blendshape frames in the queue
/// </summary>
/// <param name="blendshapeFrames"></param>
public virtual void EnqueueQueue(Queue<BlendshapeFrame> blendshapeFrames) { }
/// <summary>
/// Adds Visemes frames in the list
/// </summary>
/// <param name="visemesFrames"></param>
public virtual void EnqueueQueue(Queue<VisemesData> visemesFrames) { }
/// <summary>
/// Adds a blendshape frame in the last queue
/// </summary>
/// <param name="blendshapeFrame"></param>
public virtual void EnqueueFrame(BlendshapeFrame blendshapeFrame) { }
/// <summary>
/// Adds a viseme frame to the last element of the list
/// </summary>
/// <param name="viseme"></param>
public virtual void EnqueueFrame(VisemesData viseme) { }
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bb231034f5b2dee4494498fe9117bda1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,96 @@
using System.Collections.Generic;
using Service;
using UnityEngine;
namespace Convai.Scripts.Utils.LipSync
{
public class LipSyncBlendFrameData
{
public enum FrameType
{
Visemes,
Blendshape
}
private readonly int _totalFrames;
private readonly Queue<BlendshapeFrame> _blendShapeFrames = new Queue<BlendshapeFrame>();
private readonly Queue<VisemesData> _visemesFrames = new Queue<VisemesData>();
private readonly GetResponseResponse _getResponseResponse;
private readonly FrameType _frameType;
private int _framesCaptured;
private bool _partiallyProcessed;
public LipSyncBlendFrameData(int totalFrames, GetResponseResponse response, FrameType frameType)
{
_totalFrames = totalFrames;
_framesCaptured = 0;
_getResponseResponse = response;
_frameType = frameType;
//Logger.DebugLog($"Total Frames: {_totalFrames} | {response.AudioResponse.TextData}", Logger.LogCategory.LipSync);
}
public void Enqueue(BlendshapeFrame blendShapeFrame)
{
_blendShapeFrames.Enqueue(blendShapeFrame);
_framesCaptured++;
}
public void Enqueue(VisemesData visemesData)
{
_visemesFrames.Enqueue(visemesData);
}
public void Process(ConvaiNPC npc)
{
if (!_partiallyProcessed)
npc.EnqueueResponse(_getResponseResponse);
switch (_frameType)
{
case FrameType.Visemes:
npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueQueue(new Queue<VisemesData>(_visemesFrames));
break;
case FrameType.Blendshape:
npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueQueue(new Queue<BlendshapeFrame>(_blendShapeFrames));
break;
}
npc.AudioManager.SetWaitForCharacterLipSync(false);
}
public void ProcessPartially(ConvaiNPC npc)
{
if (!_partiallyProcessed)
{
_partiallyProcessed = true;
npc.EnqueueResponse(_getResponseResponse);
npc.AudioManager.SetWaitForCharacterLipSync(false);
}
switch (_frameType)
{
case FrameType.Visemes:
while (_visemesFrames.Count != 0)
{
npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueFrame(_visemesFrames.Dequeue());
}
break;
case FrameType.Blendshape:
while (_blendShapeFrames.Count != 0)
{
npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueFrame(_blendShapeFrames.Dequeue());
}
break;
}
}
public bool CanPartiallyProcess()
{
return _framesCaptured > Mathf.Min(21, _totalFrames * 0.7f);
}
public bool CanProcess()
{
return _framesCaptured == _totalFrames;
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 31db1a9457d64f3d936ff7f5aabfb193
timeCreated: 1708491067

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6158a9323c720f5408c5b7caa77405cc
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,112 @@
using System.Collections.Generic;
using System.Reflection;
using Service;
namespace Convai.Scripts.Utils.LipSync.Types
{
public class ConvaiARKitLipSync : ConvaiVisemesLipSync
{
private Dictionary<string, int> _teethMapping;
public override void Initialize(ConvaiLipSync convaiLipSync, ConvaiNPC convaiNPC)
{
base.Initialize(convaiLipSync, convaiNPC);
if (HasTeethSkinnedMeshRenderer)
_teethMapping = SetupMapping(GetTeethRegexMapping, _teethSkinMeshRenderer);
}
private Dictionary<string, string> GetTeethRegexMapping()
{
string prefix = "(?:[A-Z]\\d{1,2}_)?";
string spacer = "[\\s_]*";
string open = "[Oo]pen";
return new Dictionary<string, string>()
{
{"KK", $"{prefix}[Jj]aw{spacer}{open}"},
{"AA", $"{prefix}[Jj]aw{spacer}[Ff]orward"}
};
}
protected override Dictionary<string, string> GetHeadRegexMapping()
{
string mouth = "[Mm]outh";
string spacer = "[\\s_]*";
string left = "[Ll]eft";
string right = "[Rr]ight";
string lower = "[Ll]ower";
string upper = "[Uu]pper";
string open = "[Oo]pen";
string funnel = "[Ff]unnel";
string pucker = "[Pp]ucker";
string prefix = "(?:[A-Z]\\d{1,2}_)?";
return new Dictionary<string, string>()
{
{"PP", $"{prefix}{mouth}{spacer}{pucker}"},
{"FF", $"{prefix}{mouth}{spacer}{funnel}"},
{"THL", $"{prefix}{mouth}{spacer}{lower}{spacer}[Dd]own{spacer}{left}"},
{"THR", $"{prefix}{mouth}{spacer}{lower}{spacer}[Dd]own{spacer}{right}"},
{"DDL", $"{prefix}{mouth}{spacer}[Pp]ress{spacer}{left}"},
{"DDR", $"{prefix}{mouth}{spacer}[Pp]ress{spacer}{right}"},
{"KK", $"{prefix}[Jj]aw{spacer}{open}"},
{"CHL",$"{prefix}{mouth}{spacer}[Ss]tretch{spacer}{left}"},
{"CHR",$"{prefix}{mouth}{spacer}[Ss]tretch{spacer}{right}"},
{"SSL", $"{prefix}{mouth}{spacer}[Ss]mile{spacer}{left}"},
{"SSR", $"{prefix}{mouth}{spacer}[Ss]mile{spacer}{right}"},
{"NNL", $"{prefix}[Nn]ose{spacer}[Ss]neer{spacer}{left}"},
{"NNR", $"{prefix}[Nn]ose{spacer}[Ss]neer{spacer}{right}"},
{"RRU",$"{prefix}{mouth}{spacer}[Rr]oll{spacer}{upper}"},
{"RRL", $"{prefix}{mouth}{spacer}[Rr]oll{spacer}{lower}"},
{"AA", $"{prefix}[Jj]aw{spacer}{open}"},
{"EL", $"{prefix}{mouth}{spacer}{upper}{spacer}[Uu]p{spacer}{left}"},
{"ER", $"{prefix}{mouth}{spacer}{upper}{spacer}[Uu]p{spacer}{right}"},
{"IHL", $"{prefix}{mouth}{spacer}[Ff]rown{spacer}{left}"},
{"IHR",$"{prefix}{mouth}{spacer}[Ff]rown{spacer}{right}"},
{"OU", $"{prefix}{mouth}{spacer}{pucker}"},
{"OH", $"{prefix}{mouth}{spacer}{funnel}"},
};
}
private void Update()
{
// Check if the dequeued frame is not null.
if (_currentViseme == null) return;
// Check if the frame represents silence (-2 is a placeholder for silence).
if (_currentViseme.Sil == -2) return;
float weight;
List<int> knownHeadIndexs = new List<int>();
List<int> knownTeethIndexs = new List<int>();
foreach (PropertyInfo propertyInfo in typeof(Viseme).GetProperties())
{
if (propertyInfo.PropertyType != typeof(float)) continue;
string fieldName = propertyInfo.Name.ToUpper();
float value = (float)propertyInfo.GetValue(_currentViseme);
weight = fieldName switch
{
"KK" => 1.0f / 1.5f,
"DD" => 1.0f / 0.7f,
"CH" => 1.0f / 2.7f,
"SS" => 1.0f / 1.5f,
"NN" => 1.0f / 2.0f,
"RR" => 1.0f / 0.9f,
"AA" => 1.0f / 2.0f,
"II" => 1.0f / 1.2f,
"OH" => 1.2f,
_ => 1.0f
};
foreach (string s in _possibleCombinations)
{
float weightThisFrame = value * weight * _weightMultiplier;
string modifiedFieldName = fieldName + s;
if (HasHeadSkinnedMeshRenderer)
{
FindAndUpdateBlendWeight(_headSkinMeshRenderer, modifiedFieldName, weightThisFrame, knownHeadIndexs, _headMapping);
}
if (HasTeethSkinnedMeshRenderer)
{
FindAndUpdateBlendWeight(_teethSkinMeshRenderer, modifiedFieldName, weightThisFrame, knownTeethIndexs, _teethMapping);
}
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a78929d391d407d46ab3288e15a0700d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,202 @@
using System.Collections.Generic;
using System.Reflection;
using Service;
using UnityEngine;
/*
! This class is a Work in progress and can produce un expected results, Convai does not advise to use this class in production, please use this with extreme caution
*/
namespace Convai.Scripts.Utils.LipSync.Types
{
public class ConvaiBlendShapeLipSync : ConvaiLipSyncApplicationBase
{
private const float A2XFRAMERATE = 1f / 30f;
private Queue<Queue<BlendshapeFrame>> _blendShapesQueue = new();
private ARKitBlendShapes _currentBlendshape;
protected override Dictionary<string, string> GetHeadRegexMapping()
{
#region Regex Finders
string prefix = "(?:[A-Z]\\d{1,2}_)?";
string spacer = "[\\s_]*";
string mouth = "[Mm]outh";
string nose = "[Nn]ose";
string left = "[Ll]eft";
string right = "[Rr]ight";
string up = "[Uu]p";
string down = "[Dd]own";
string lower = "[Ll]ower";
string upper = "[Uu]pper";
string open = "[Oo]pen";
string funnel = "[Ff]unnel";
string pucker = "[Pp]ucker";
string sneer = "[Ss]neer";
string cheek = "[Cc]heek";
string squint = "[Ss]quint";
string brow = "[Bb]row";
string outer = "[Oo]uter";
string inner = "[Ii]nner";
string eye = "[Ee]ye";
string blink = "[Bb]link";
string look = "[Ll]ook";
string In = "[Ii]n";
string Out = "[Oo]ut";
string wide = "[Ww]ide";
string forward = "[Ff]orward";
string jaw = "[Jj]aw";
string close = "[Cc]lose";
string smile = "[Ss]mile";
string frown = "[Ff]rown";
string dimple = "[Dd]imple";
string stretch = "[Ss]tretch";
string roll = "[Rr]oll";
string shrug = "[Ss]hrug";
string press = "[Pp]ress";
#endregion
return new Dictionary<string, string>(){
{"TougueOut", $"{prefix}[Tt]ougue{spacer}[Oo]ut"},
{"NoseSneerRight", $"{prefix}{nose}{spacer}{sneer}{spacer}{right}"},
{"NoseSneerLeft", $"{prefix}{nose}{spacer}{sneer}{spacer}{left}"},
{"CheekSquintRight", $"{prefix}{cheek}{spacer}{squint}{spacer}{right}"},
{"CheekSquintLeft", $"{prefix}{cheek}{spacer}{squint}{spacer}{left}"},
{"CheekPuff", $"{prefix}{cheek}{spacer}[Pp]uff"},
{"BrowDownLeft", $"{prefix}{brow}{spacer}{down}{spacer}{left}"},
{"BrowDownRight", $"{prefix}{brow}{spacer}{down}{spacer}{right}"},
{"BrowInnerUp", $"{prefix}{brow}{spacer}{inner}{spacer}{up}"},
{"BrowOuterUpLeft", $"{prefix}{brow}{spacer}{outer}{spacer}{up}{spacer}{left}"},
{"BrowOuterUpRight", $"{prefix}{brow}{spacer}{outer}{spacer}{up}{spacer}{right}"},
{"EyeBlinkLeft", $"{prefix}{eye}{spacer}{blink}{spacer}{left}"},
{"EyeLookDownLeft",$"{prefix}{eye}{spacer}{look}{spacer}{In}{left}"},
{"EyeLookInLeft", $"{prefix}{eye}{spacer}{look}{spacer}{In}{spacer}{left}"},
{"EyeLookOutLeft", $"{prefix}{eye}{spacer}{look}{spacer}{Out}{spacer}{left}"},
{"EyeLookUpLeft", $"{prefix}{eye}{spacer}{look}{spacer}{up}{spacer}{left}"},
{"EyeSquintLeft", $"{prefix}{eye}{spacer}{squint}{spacer}{left}"},
{"EyeWideLeft", $"{prefix}{eye}{spacer}{wide}{spacer}{left}"},
{"EyeBlinkRight", $"{prefix}{eye}{spacer}{blink}{spacer}{right}"},
{"EyeLookDownRight",$"{prefix}{eye}{spacer}{look}{spacer}{In}{right}"},
{"EyeLookInRight", $"{prefix}{eye}{spacer}{look}{spacer}{In}{spacer}{right}"},
{"EyeLookOutRight", $"{prefix}{eye}{spacer}{look}{spacer}{Out}{spacer}{right}"},
{"EyeLookUpRight", $"{prefix}{eye}{spacer}{look}{spacer}{up}{spacer}{right}"},
{"EyeSquintRight", $"{prefix}{eye}{spacer}{squint}{spacer}{right}"},
{"EyeWideRight", $"{prefix}{eye}{spacer}{wide}{spacer}{right}"},
{"JawForward", $"{prefix}{jaw}{spacer}{forward}"},
{"JawLeft", $"{prefix}{jaw}{spacer}{left}"},
{"JawRight", $"{prefix}{jaw}{spacer}{right}"},
{"JawOpen", $"{prefix}{jaw}{spacer}{open}"},
{"MouthClose", $"{prefix}{mouth}{spacer}{close}"},
{"MouthFunnel", $"{prefix}{mouth}{spacer}{funnel}"},
{"MouthPucker", $"{prefix}{mouth}{spacer}{pucker}"},
{"Mouthleft", $"{prefix}{mouth}{spacer}{left}"},
{"MouthRight", $"{prefix}{mouth}{spacer}{right}"},
{"MouthSmileLeft", $"{prefix}{mouth}{spacer}{smile}{spacer}{left}"},
{"MouthSmileRight", $"{prefix}{mouth}{spacer}{smile}{spacer}{right}"},
{"MouthFrownLeft", $"{prefix}{mouth}{spacer}{frown}{spacer}{left}"},
{"MouthFrownRight", $"{prefix}{mouth}{spacer}{frown}{spacer}{right}"},
{"MouthDimpleLeft", $"{prefix}{mouth}{spacer}{dimple}{spacer}{left}"},
{"MouthDimpleRight", $"{prefix}{mouth}{spacer}{dimple}{spacer}{right}"},
{"MouthStretchLeft", $"{prefix}{mouth}{spacer}{stretch}{spacer}{left}"},
{"MouthStretchRight", $"{prefix}{mouth}{spacer}{stretch}{spacer}{right}"},
{"MouthRollLower", $"{prefix}{mouth}{spacer}{roll}{spacer}{lower}"},
{"MouthRollUpper", $"{prefix}{mouth}{spacer}{roll}{spacer}{upper}"},
{"MouthShrugLower", $"{prefix}{mouth}{spacer}{shrug}{spacer}{lower}"},
{"MouthShrugUpper", $"{prefix}{mouth}{spacer}{shrug}{spacer}{upper}"},
{"MouthPressLeft", $"{prefix}{mouth}{spacer}{press}{spacer}{left}"},
{"MouthPressRight", $"{prefix}{mouth}{spacer}{press}{spacer}{right}"},
{"MouthLowerDownLeft", $"{prefix}{mouth}{spacer}{lower}{spacer}{down}{spacer}{left}"},
{"MouthLowerDownRight", $"{prefix}{mouth}{spacer}{lower}{spacer}{down}{spacer}{right}"},
{"MouthUpperUpLeft", $"{prefix}{mouth}{spacer}{upper}{spacer}{up}{spacer}{left}"},
{"MouthUpperUpRight", $"{prefix}{mouth}{spacer}{upper}{spacer}{up}{spacer}{right}"},
};
}
public override void Initialize(ConvaiLipSync convaiLipSync, ConvaiNPC convaiNPC)
{
base.Initialize(convaiLipSync, convaiNPC);
InvokeRepeating(nameof(UpdateBlendShape), 0, A2XFRAMERATE);
}
protected override void UpdateBlendShape()
{
if (_blendShapesQueue == null || _blendShapesQueue.Count <= 0)
{
_currentBlendshape = new ARKitBlendShapes();
return;
}
if (_blendShapesQueue.Peek().Count == 0)
{
_blendShapesQueue.Dequeue();
return;
}
if (!_convaiNPC.IsCharacterTalking) return;
_currentBlendshape = _blendShapesQueue.Peek().Dequeue().Blendshapes;
}
private void Update()
{
if (_currentBlendshape == null) return;
UpdateJawBoneRotation(new Vector3(0.0f, 0.0f, -90.0f - _currentBlendshape.JawOpen * 30f));
UpdateTongueBoneRotation(new Vector3(0.0f, 0.0f, -5.0f * _currentBlendshape.TongueOut));
if (!HasHeadSkinnedMeshRenderer) return;
foreach (PropertyInfo propertyInfo in typeof(ARKitBlendShapes).GetProperties())
{
if (propertyInfo.PropertyType != typeof(float)) continue;
string fieldName = propertyInfo.Name;
float value = (float)propertyInfo.GetValue(_currentBlendshape);
if (_headMapping.TryGetValue(fieldName, out int index))
{
_headSkinMeshRenderer.SetBlendShapeWeightInterpolate(
index,
value * _weightMultiplier,
Time.deltaTime
);
}
}
}
public override void PurgeExcessBlendShapeFrames()
{
if (_blendShapesQueue.Count <= 0) return;
if (!CanPurge<BlendshapeFrame>(_blendShapesQueue.Peek())) return;
Logger.Info($"Purging {_blendShapesQueue.Peek().Count} frames", Logger.LogCategory.LipSync);
_blendShapesQueue.Dequeue();
}
public override void ClearQueue()
{
_blendShapesQueue = new Queue<Queue<BlendshapeFrame>>();
_currentBlendshape = new ARKitBlendShapes();
}
public override void EnqueueQueue(Queue<BlendshapeFrame> blendshapeFrames)
{
_blendShapesQueue.Enqueue(blendshapeFrames);
}
public override void EnqueueFrame(BlendshapeFrame blendshapeFrame)
{
if (_blendShapesQueue.Count == 0)
{
EnqueueQueue(new Queue<BlendshapeFrame>());
}
_blendShapesQueue.Peek().Enqueue(blendshapeFrame);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ca3ca8129b12656449558f306c86f70d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,162 @@
using System.Collections.Generic;
using System.Reflection;
using Service;
using UnityEngine;
namespace Convai.Scripts.Utils.LipSync.Types
{
public class ConvaiOVRLipsync : ConvaiVisemesLipSync
{
private int _firstIndex;
public override void Initialize(ConvaiLipSync convaiLipSync, ConvaiNPC convaiNPC)
{
base.Initialize(convaiLipSync, convaiNPC);
_firstIndex = convaiLipSync.firstIndex;
}
protected override Dictionary<string, string> GetHeadRegexMapping()
{
const string mouth = "[Mm]outh";
const string spacer = "[\\s_]*";
const string left = "[Ll]eft";
const string right = "[Rr]ight";
const string lower = "[Ll]ower";
const string upper = "[Uu]pper";
const string open = "[Oo]pen";
const string funnel = "[Ff]unnel";
const string pucker = "[Pp]ucker";
const string prefix = "(?:[A-Z]\\d{1,2}_)?";
return new Dictionary<string, string>
{
{"PP", $"{prefix}{mouth}{spacer}{pucker}"},
{"FF", $"{prefix}{mouth}{spacer}{funnel}"},
{"THL", $"{prefix}{mouth}{spacer}{lower}{spacer}[Dd]own{spacer}{left}"},
{"THR", $"{prefix}{mouth}{spacer}{lower}{spacer}[Dd]own{spacer}{right}"},
{"DDL", $"{prefix}{mouth}{spacer}[Pp]ress{spacer}{left}"},
{"DDR", $"{prefix}{mouth}{spacer}[Pp]ress{spacer}{right}"},
{"KK", $"{prefix}[Jj]aw{spacer}{open}"},
{"CHL", $"{prefix}{mouth}{spacer}[Ss]tretch{spacer}{left}"},
{"CHR", $"{prefix}{mouth}{spacer}[Ss]tretch{spacer}{right}"},
{"SSL", $"{prefix}{mouth}{spacer}[Ss]mile{spacer}{left}"},
{"SSR", $"{prefix}{mouth}{spacer}[Ss]mile{spacer}{right}"},
{"NNL", $"{prefix}[Nn]ose{spacer}[Ss]neer{spacer}{left}"},
{"NNR", $"{prefix}[Nn]ose{spacer}[Ss]neer{spacer}{right}"},
{"RRU", $"{prefix}{mouth}{spacer}[Rr]oll{spacer}{upper}"},
{"RRL", $"{prefix}{mouth}{spacer}[Rr]oll{spacer}{lower}"},
{"AA", $"{prefix}[Jj]aw{spacer}[Oo]pen"},
{"EL", $"{prefix}{mouth}{spacer}{upper}{spacer}[Uu]p{spacer}{left}"},
{"ER", $"{prefix}{mouth}{spacer}{upper}{spacer}[Uu]p{spacer}{right}"},
{"IHL", $"{prefix}{mouth}{spacer}[Ff]rown{spacer}{left}"},
{"IHR", $"{prefix}{mouth}{spacer}[Ff]rown{spacer}{right}"},
{"OU", $"{prefix}{mouth}{spacer}{pucker}"},
{"OH", $"{prefix}{mouth}{spacer}{funnel}"},
};
}
private void Update()
{
if (_currentViseme == null || _currentViseme.Sil == -2) return;
float weight;
List<int> knownIndexes = new List<int>();
UpdateJawBoneRotation(new Vector3(0.0f, 0.0f, -90.0f));
UpdateTongueBoneRotation(new Vector3(0.0f, 0.0f, -5.0f));
if (HasHeadSkinnedMeshRenderer)
{
foreach (PropertyInfo propertyInfo in typeof(Viseme).GetProperties())
{
if (propertyInfo.PropertyType != typeof(float)) continue;
string fieldName = propertyInfo.Name.ToUpper();
float value = (float)propertyInfo.GetValue(_currentViseme);
weight = fieldName switch
{
"KK" => 1.0f / 1.5f,
"DD" => 1.0f / 0.7f,
"CH" => 1.0f / 2.7f,
"SS" => 1.0f / 1.5f,
"NN" => 1.0f / 2.0f,
"RR" => 1.0f / 0.9f,
"AA" => 1.0f / 2.0f,
"II" => 1.0f / 1.2f,
"OH" => 1.2f,
_ => 1.0f
};
foreach (string s in _possibleCombinations)
{
float weightThisFrame = value * weight * _weightMultiplier;
string modifiedFieldName = fieldName + s;
FindAndUpdateBlendWeight(_headSkinMeshRenderer, modifiedFieldName, weightThisFrame, knownIndexes, _headMapping);
}
}
}
UpdateJawBoneRotation(new Vector3(0.0f, 0.0f, CalculateJawRotation()));
UpdateTongueBoneRotation(new Vector3(0.0f, 0.0f, CalculateTongueRotation()));
if (_teethSkinMeshRenderer.sharedMesh.blendShapeCount < (_firstIndex + 15)) return;
for (int i = 0; i < 15; i++)
{
float visemeValue = GetVisemeValueByIndex(i);
_teethSkinMeshRenderer.SetBlendShapeWeightInterpolate(_firstIndex + i, visemeValue * _weightMultiplier, Time.deltaTime);
}
}
private float CalculateJawRotation()
{
float totalWeight = 0.2f + 0.1f + 0.5f + 0.2f + 0.2f + 1.0f + 0.2f + 0.3f + 0.8f + 0.3f;
float rotation = (0.2f * _currentViseme.Th
+ 0.1f * _currentViseme.Dd
+ 0.5f * _currentViseme.Kk
+ 0.2f * _currentViseme.Nn
+ 0.2f * _currentViseme.Rr
+ 1.0f * _currentViseme.Aa
+ 0.2f * _currentViseme.E
+ 0.3f * _currentViseme.Ih
+ 0.8f * _currentViseme.Oh
+ 0.3f * _currentViseme.Ou) / totalWeight;
return -90.0f - rotation * 30f;
}
private float CalculateTongueRotation()
{
float totalWeight = 0.1f + 0.2f + 0.15f;
float rotation = (0.1f * _currentViseme.Th
+ 0.2f * _currentViseme.Nn
+ 0.15f * _currentViseme.Rr) / totalWeight;
return rotation * 80f - 5f;
}
private float GetVisemeValueByIndex(int index)
{
return index switch
{
0 => _currentViseme.Sil,
1 => _currentViseme.Pp,
2 => _currentViseme.Ff,
3 => _currentViseme.Th,
4 => _currentViseme.Dd,
5 => _currentViseme.Kk,
6 => _currentViseme.Ch,
7 => _currentViseme.Ss,
8 => _currentViseme.Nn,
9 => _currentViseme.Rr,
10 => _currentViseme.Aa,
11 => _currentViseme.E,
12 => _currentViseme.Ih,
13 => _currentViseme.Oh,
14 => _currentViseme.Ou,
_ => 0.0f
};
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 769391e6890ecb0459ada7f3c4fb1400
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Some files were not shown because too many files have changed in this diff Show More