initial upload

This commit is contained in:
tom.hempel
2025-09-30 17:58:33 +02:00
commit 69b0c79692
4818 changed files with 229318 additions and 0 deletions

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a5e535a2ca4f1fe488b8e9bc5958d726
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: d2668e173130e8d448b53fd765d6ff11
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,63 @@
using System;
using Convai.Scripts.Runtime.Core;
using Convai.Scripts.Runtime.UI;
using UnityEngine;
#if ENABLE_INPUT_SYSTEM
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine.InputSystem;
#endif
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Controls player input to trigger a notification if there is no active NPC available for conversation.
/// </summary>
public class ActiveNPCChecker : MonoBehaviour
{
#if ENABLE_INPUT_SYSTEM
/// <summary>
/// Subscribes to the talk key input action when the script starts.
/// </summary>
private void Start()
{
ConvaiInputManager.Instance.GetTalkKeyAction().started += ConvaiInputManager_TalkKeyActionStarted;
}
/// <summary>
/// Unsubscribes from the talk key input action when the script is destroyed.
/// </summary>
private void OnDestroy()
{
ConvaiInputManager.Instance.GetTalkKeyAction().started -= ConvaiInputManager_TalkKeyActionStarted;
}
/// <summary>
/// Handles the talk key action and triggers a notification if no active NPC is available.
/// </summary>
/// <param name="input">The input context of the talk key action.</param>
private void ConvaiInputManager_TalkKeyActionStarted(InputAction.CallbackContext input)
{
try
{
if (!input.action.WasPressedThisFrame() || UIUtilities.IsAnyInputFieldFocused() || ConvaiNPCManager.Instance.activeConvaiNPC == null ||
ConvaiNPCManager.Instance.CheckForNPCToNPCConversation(ConvaiNPCManager.Instance.activeConvaiNPC))
if (ConvaiNPCManager.Instance.activeConvaiNPC == null && ConvaiNPCManager.Instance.nearbyNPC == null)
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NotCloseEnoughForConversation);
}
catch (NullReferenceException)
{
ConvaiLogger.DebugLog("No active NPC available for conversation", ConvaiLogger.LogCategory.UI);
}
}
#elif ENABLE_LEGACY_INPUT_MANAGER
private void Update()
{
if (ConvaiInputManager.Instance.WasTalkKeyPressed())
{
if (ConvaiNPCManager.Instance.activeConvaiNPC == null)
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NotCloseEnoughForConversation);
}
}
#endif
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bd477455ea76a6c46b64614d87aa55b9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,94 @@
using System.Collections;
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
public class MicrophoneInputChecker : MonoBehaviour
{
// Duration for microphone input check.
private const float INPUT_CHECK_DURATION = 3f;
// Microphone sensitivity, adjust as needed.
private const float SENSITIVITY = 10f;
// Threshold level to detect microphone issues.
private const float THRESHOLD = 0.1f;
// Reference to the TalkButtonDurationChecker script to check the talk button status.
private TalkButtonDurationChecker _talkButtonDurationChecker;
private void Awake()
{
// Find and assign the TalkButtonDurationChecker instance in the scene.
_talkButtonDurationChecker = FindObjectOfType<TalkButtonDurationChecker>();
}
/// <summary>
/// Check if the microphone is working by analyzing the provided AudioClip.
/// </summary>
/// <param name="audioClip">The audio clip to analyze.</param>
public void IsMicrophoneWorking(AudioClip audioClip)
{
// Stop any existing coroutines to ensure a clean start.
StopAllCoroutines();
// Start the coroutine to check the microphone device.
StartCoroutine(CheckMicrophoneDevice(audioClip));
}
// Coroutine to check the microphone device after a specified duration.
private IEnumerator CheckMicrophoneDevice(AudioClip audioClip)
{
// Check if the provided AudioClip is null.
if (audioClip == null)
{
// Log an error and abort the microphone check.
ConvaiLogger.Error("AudioClip is null!", ConvaiLogger.LogCategory.Character);
yield break;
}
// Wait for the specified duration before analyzing microphone input.
yield return new WaitForSeconds(INPUT_CHECK_DURATION);
// If the talk button was released prematurely, abort the microphone check.
if (_talkButtonDurationChecker.isTalkKeyReleasedEarly) yield break;
// Calculate the range of audio samples to analyze based on the duration.
int sampleStart = 0;
int sampleEnd = (int)(INPUT_CHECK_DURATION * audioClip.frequency * audioClip.channels);
// Initialize an array to store audio samples.
float[] samples = new float[sampleEnd - sampleStart];
int samplesLength = samples.Length;
// Attempt to retrieve audio data from the AudioClip.
if (audioClip.GetData(samples, sampleStart) == false)
{
ConvaiLogger.Error("Failed to get audio data!", ConvaiLogger.LogCategory.Character);
yield break;
}
// Initialize a variable to store the total absolute level of audio samples.
float level = 0;
// Calculate the total absolute level of audio samples.
for (int i = 0; i < samplesLength; i++) level += Mathf.Abs(samples[i] * SENSITIVITY);
// Normalize the calculated level by dividing it by the number of samples and then multiply by sensitivity.
level = level / samplesLength * SENSITIVITY;
// Check if the microphone level is below the threshold, indicating a potential issue.
if (level < THRESHOLD)
{
ConvaiLogger.Warn("Microphone Issue Detected!", ConvaiLogger.LogCategory.Character);
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.MicrophoneIssue);
}
else
{
// Log that the microphone is working fine.
ConvaiLogger.Info("Microphone is working fine.", ConvaiLogger.LogCategory.Character);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 596b00c62fa88c645938df61b488e084
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,35 @@
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
public class NetworkReachabilityChecker : MonoBehaviour
{
private void Start()
{
// Variable to store the debug text for network reachability status
string networkStatusDebugText = "";
switch (Application.internetReachability)
{
// Check the current internet reachability status
case NetworkReachability.NotReachable:
// If the device is not reachable over the internet, set debug text and send a notification.
networkStatusDebugText = "Not Reachable";
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NetworkReachabilityIssue);
break;
case NetworkReachability.ReachableViaCarrierDataNetwork:
// Reachable via mobile data network
networkStatusDebugText = "Reachable via Carrier Data Network";
break;
case NetworkReachability.ReachableViaLocalAreaNetwork:
// Reachable via local area network
networkStatusDebugText = "Reachable via Local Area Network";
break;
}
// Log the network reachability status for debugging
ConvaiLogger.Info("Network Reachability: " + networkStatusDebugText, ConvaiLogger.LogCategory.Character);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0001b07d59270994ba1cacc80c615eb4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,35 @@
using Convai.Scripts.Runtime.UI;
namespace Convai.Scripts.Runtime.Addons
{
public class NotificationSystemActiveStatusHandler : ActiveStatusHandler
{
protected override void UISaveLoadSystem_OnLoad()
{
// Retrieve the saved notification system activation status.
bool newValue = UISaveLoadSystem.Instance.NotificationSystemActiveStatus;
// Update the UI and internal status based on the loaded value.
OnStatusChange(newValue);
_activeStatusToggle.isOn = newValue;
}
protected override void UISaveLoadSystem_OnSave()
{
// Save the current notification system activation status.
UISaveLoadSystem.Instance.NotificationSystemActiveStatus = _activeStatusToggle.isOn;
}
/// <summary>
/// Set the activation status of the notification system.
/// </summary>
/// <param name="value"> The new activation status. </param>
public override void OnStatusChange(bool value)
{
// Call the NotificationSystemHandler to update the activation status.
NotificationSystemHandler.Instance.SetNotificationSystemActiveStatus(value);
}
}
}
// Handles the activation status of the notification system based on Settings Panel Toggle.

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2c5289bfc72186f40b90ff7b9d45894a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,87 @@
using System;
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Handles the notification system's behavior and interactions.
/// </summary>
[DefaultExecutionOrder(-100)]
public class NotificationSystemHandler : MonoBehaviour
{
/// <summary>
/// Array containing predefined notification configurations.
/// This array can be modified in the Unity Editor to define different types of notifications.
/// </summary>
[SerializeField] private SONotificationGroup _notificationGroup;
/// <summary>
/// Flag indicating whether the notification system is currently active.
/// </summary>
private bool _isNotificationSystemActive = true;
/// <summary>
/// Event triggered when a notification is requested.
/// </summary>
public Action<SONotification> OnNotificationRequested;
/// <summary>
/// Singleton instance of the NotificationSystemHandler.
/// </summary>
public static NotificationSystemHandler Instance { get; private set; }
/// <summary>
/// Ensure there is only one instance of NotificationSystemHandler.
/// </summary>
private void Awake()
{
if (Instance != null)
{
ConvaiLogger.DebugLog("<color=red> There's More Than One NotificationSystemHandler </color> " + transform + " - " + Instance, ConvaiLogger.LogCategory.UI);
Destroy(gameObject);
return;
}
Instance = this;
}
/// <summary>
/// Requests a notification of the specified type.
/// </summary>
/// <param name="notificationType">The type of notification to request.</param>
public void NotificationRequest(NotificationType notificationType)
{
// Check if the notification system is currently active.
if (!_isNotificationSystemActive) return;
// Search for the requested notification type in the predefined array.
SONotification requestedSONotification = null;
foreach (SONotification notification in _notificationGroup.SONotifications)
if (notification.NotificationType == notificationType)
{
requestedSONotification = notification;
break;
}
// If the requested notification is not found, log an error.
if (requestedSONotification == null)
{
ConvaiLogger.Error("There is no Notification defined for the selected Notification Type!", ConvaiLogger.LogCategory.UI);
return;
}
// Invoke the OnNotificationRequested event with the requested notification.
OnNotificationRequested?.Invoke(requestedSONotification);
}
/// <summary>
/// Sets the activation status of the notification system.
/// </summary>
/// <param name="value">The new activation status.</param>
public void SetNotificationSystemActiveStatus(bool value)
{
_isNotificationSystemActive = value;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d911410153c6d594098cac3c3bfa456d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,44 @@
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Enumeration defining various types of in-app notifications.
/// Each enum value represents a specific scenario or issue that can trigger a notification.
/// </summary>
public enum NotificationType
{
/// <summary>
/// Indicates a notification related to microphone problems.
/// </summary>
MicrophoneIssue,
/// <summary>
/// Indicates a notification related to network reachability issues.
/// </summary>
NetworkReachabilityIssue,
/// <summary>
/// Indicates a notification when the user is not in proximity to initiate a conversation.
/// </summary>
NotCloseEnoughForConversation,
/// <summary>
/// Indicates a notification when a user releases the talk button prematurely during a conversation.
/// </summary>
TalkButtonReleasedEarly,
/// <summary>
/// Indicates that no microphone device was detected in the system
/// </summary>
NoMicrophoneDetected,
/// <summary>
/// Indicates that no API key was found.
/// </summary>
APIKeyNotFound,
/// <summary>
/// Indicates that usage limit for current plan has exceeded
/// </summary>
UsageLimitExceeded
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 18458e12a4b7457da0eb049ea8d56d4c
timeCreated: 1698156821

View File

@ -0,0 +1,34 @@
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// This class represents a notification in the game.
/// </summary>
[CreateAssetMenu(menuName = "Convai/Notification System/Notification", fileName = "New Notification")]
public class SONotification : ScriptableObject
{
/// <summary>
/// The type of the notification.
/// </summary>
[Tooltip("The type of the notification.")]
public NotificationType NotificationType;
/// <summary>
/// The icon to be displayed with the notification.
/// </summary>
[Tooltip("The icon to be displayed with the notification.")]
public Sprite Icon;
/// <summary>
/// The notification title.
/// </summary>
[Tooltip("The notification title.")] public string NotificationTitle;
/// <summary>
/// The text content of the notification.
/// </summary>
[TextArea(10, 10)] [Tooltip("The text content of the notification.")]
public string NotificationMessage;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6b33bf54ff467c742a84ac58d34105ec
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,18 @@
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Represents a group of notifications as a ScriptableObject.
/// This allows for easy configuration and management of different notifications in the Unity Editor.
/// </summary>
[CreateAssetMenu(menuName = "Convai/Notification System/Notification Group", fileName = "New Notification Group")]
public class SONotificationGroup : ScriptableObject
{
/// <summary>
/// Array of SONotification objects.
/// Each object represents a unique notification that can be triggered in the application.
/// </summary>
public SONotification[] SONotifications;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 73c98f07d31af334ba49c31a867600b2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0323cdb3f17fa914cae382e617430dd2
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 87748cd0f7abedf4e8dd7cf60e5fb99a
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: fe9295a7cd110d545b49f77fcc49c489
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 5e8394cce5330644594a848783844973
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7a58608b2e0aa77418e15e4b4ef0a1fa
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: ee7d034a751672c449ab90856e05919c
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 374f6f70a1f7d9546926f20184467b32
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 1dbb77ab53e0d714a9f00cba95a25a46
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8c855f56ec2cd0f4d9c2458cc1c3db31
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,118 @@
using Convai.Scripts.Runtime.Core;
using Convai.Scripts.Runtime.UI;
using TMPro;
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Monitors the duration of the talk button press and notifies the Notification System if released prematurely.
/// </summary>
public class TalkButtonDurationChecker : MonoBehaviour
{
/// <summary>
/// Minimum duration required for a valid talk action.
/// </summary>
private const float MIN_TALK_DURATION = 0.5f;
/// <summary>
/// Flag indicating whether the talk button was released prematurely.
/// </summary>
[HideInInspector] public bool isTalkKeyReleasedEarly;
private TMP_InputField _activeInputField;
/// <summary>
/// Timer to track the duration of the talk button press.
/// </summary>
private float _timer;
private UIAppearanceSettings _uiAppearanceSettings;
private void Awake()
{
_uiAppearanceSettings = FindObjectOfType<UIAppearanceSettings>();
}
/// <summary>
/// Update is called once per frame.
/// It checks if the talk button is being held down or released.
/// </summary>
private void Update()
{
// Check if the talk button is being held down and increment the timer based on the time passed since the last frame.
if (ConvaiInputManager.Instance.IsTalkKeyHeld && !UIUtilities.IsAnyInputFieldFocused()) _timer += Time.deltaTime;
}
private void OnEnable()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged += ConvaiNPCManager_OnActiveNPCChanged;
_uiAppearanceSettings.OnAppearanceChanged += UIAppearanceSettings_OnAppearanceChanged;
ConvaiInputManager.Instance.talkKeyInteract += HandleTalkButtonRelease;
}
private void OnDisable()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged -= ConvaiNPCManager_OnActiveNPCChanged;
_uiAppearanceSettings.OnAppearanceChanged -= UIAppearanceSettings_OnAppearanceChanged;
}
private void HandleTalkButtonRelease(bool releaseState)
{
if (releaseState || UIUtilities.IsAnyInputFieldFocused()) return;
if (_activeInputField != null && _activeInputField.isFocused)
{
_timer = 0;
return;
}
CheckTalkButtonRelease();
// Reset the timer for the next talk action.
_timer = 0;
}
private void ConvaiNPCManager_OnActiveNPCChanged(ConvaiNPC convaiNpc)
{
if (convaiNpc == null)
{
_activeInputField = null;
return;
}
_activeInputField = convaiNpc.playerInteractionManager.FindActiveInputField();
}
private void UIAppearanceSettings_OnAppearanceChanged()
{
ConvaiNPC convaiNpc = ConvaiNPCManager.Instance.activeConvaiNPC;
if (convaiNpc == null)
{
_activeInputField = null;
return;
}
_activeInputField = convaiNpc.playerInteractionManager.FindActiveInputField();
}
/// <summary>
/// Checks if the talk button was released prematurely and triggers a notification if so.
/// </summary>
private void CheckTalkButtonRelease()
{
// Initialize the flag to false.
isTalkKeyReleasedEarly = false;
// Trigger a notification if the talk button is released before reaching the minimum required duration.
if (_timer < MIN_TALK_DURATION)
{
// Check if there is an active ConvaiNPC.
if (ConvaiNPCManager.Instance.activeConvaiNPC == null) return;
// Set the flag to true and request a notification.
isTalkKeyReleasedEarly = true;
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.TalkButtonReleasedEarly);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ce5db0c0354de754f99bd35c9f7fb96a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,67 @@
using System;
using TMPro;
using UnityEngine;
using UnityEngine.UI;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Represents a UI notification element that can be activated or deactivated.
/// </summary>
public class UINotification : MonoBehaviour
{
/// <summary>
/// The RectTransform of the notification UI element.
/// </summary>
public RectTransform NotificationRectTransform;
/// <summary>
/// The image component for displaying the notification icon.
/// </summary>
[SerializeField] private Image _notificationIcon;
/// <summary>
/// The TextMeshProUGUI component for displaying the notification title.
/// </summary>
[SerializeField] private TextMeshProUGUI _notificationTitleText;
/// <summary>
/// The TextMeshProUGUI component for displaying the notification text.
/// </summary>
[SerializeField] private TextMeshProUGUI _notificationMessageText;
/// <summary>
/// Deactivates the notification UI element on awake.
/// </summary>
private void Awake()
{
SetActive(false);
}
/// <summary>
/// Initializes the UI notification with the provided Notification data.
/// </summary>
/// <param name="soNotification">The notification data to initialize the UI notification with.</param>
public void Initialize(SONotification soNotification)
{
if (soNotification == null) throw new ArgumentNullException(nameof(soNotification), "SONotification is null.");
// Set the notification icon and text based on the provided Notification.
_notificationIcon.sprite = soNotification.Icon;
_notificationTitleText.text = soNotification.NotificationTitle;
_notificationMessageText.text = soNotification.NotificationMessage;
// Activate the notification UI element.
SetActive(true);
}
/// <summary>
/// Sets the active state of the notification UI element.
/// </summary>
/// <param name="value">The new active state.</param>
public void SetActive(bool value)
{
gameObject.SetActive(value);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 894cc2b4c2298fb4a98bd3d9f2e8d6ba
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,317 @@
using System.Collections;
using System.Collections.Generic;
using Convai.Scripts.Runtime.UI;
using UnityEngine;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// This class is responsible for controlling the UI notifications in the game.
/// It handles the creation, activation, deactivation, and animation of notifications.
/// </summary>
public class UINotificationController : MonoBehaviour
{
/// <summary>
/// Maximum number of notifications that can be displayed at the same time.
/// </summary>
private const int MAX_NUMBER_OF_NOTIFICATION_AT_SAME_TIME = 3;
/// <summary>
/// References to the UI notification prefab and other necessary components.
/// </summary>
[Header("References")] [SerializeField]
private UINotification _uiNotificationPrefab;
/// <summary>
/// Spacing between Notifications
/// </summary>
[Header("Configurations")] [SerializeField]
private int _spacing = 100;
/// <summary>
/// Position for Active Notification
/// </summary>
[Tooltip("Starting position for the first notification; Y value adjusts sequentially for each subsequent notification.")] [SerializeField]
private Vector2 _activeNotificationPos;
/// <summary>
/// Position for Deactivated Notification
/// </summary>
[SerializeField] private Vector2 _deactivatedNotificationPos;
[Header("UI Notification Animation Values")] [SerializeField]
private float _activeDuration = 4f;
[SerializeField] private float _slipDuration = 0.3f;
[SerializeField] private float _delay = 0.3f;
[SerializeField] private AnimationCurve _slipAnimationCurve;
private readonly float _fadeInDuration = 0.35f;
private readonly float _fadeOutDuration = 0.2f;
/// <summary>
/// List to keep track of the order in which pending notifications were requested.
/// </summary>
private readonly List<SONotification> _pendingNotificationsOrder = new();
private Queue<UINotification> _activeUINotifications;
private CanvasGroup _canvasGroup;
private Queue<UINotification> _deactivatedUINotifications;
private FadeCanvas _fadeCanvas;
/// <summary>
/// Flag indicating whether a UI notification movement animation is currently in progress.
/// Used to prevent overlapping animation coroutines for UI notifications.
/// </summary>
private bool _isNotificationAnimationInProgress;
/// <summary>
/// Awake is called when the script instance is being loaded.
/// It is used to initialize any variables or game state before the game starts.
/// </summary>
private void Awake()
{
// Get necessary components and initialize UI notifications.
_canvasGroup = GetComponent<CanvasGroup>();
_fadeCanvas = GetComponent<FadeCanvas>();
InitializeUINotifications();
}
/// <summary>
/// This function is called when the object becomes enabled and active.
/// It is used to subscribe to the OnNotificationRequested event.
/// </summary>
private void OnEnable()
{
NotificationSystemHandler.Instance.OnNotificationRequested += NotificationSystemHandler_OnNotificationRequested;
}
/// <summary>
/// This function is called when the behaviour becomes disabled or inactive.
/// It is used to unsubscribe from the OnNotificationRequested event.
/// </summary>
private void OnDisable()
{
NotificationSystemHandler.Instance.OnNotificationRequested -= NotificationSystemHandler_OnNotificationRequested;
}
/// <summary>
/// Handles a new notification request by adding it to the order list and attempting to initialize it.
/// If a notification animation is already in progress, waits for it to complete before processing the new request.
/// </summary>
/// <param name="SONotification">The requested SONotification to be processed.</param>
private void NotificationSystemHandler_OnNotificationRequested(SONotification SONotification)
{
// Add the requested notification to the order list and try to initialize it.
_pendingNotificationsOrder.Add(SONotification);
// If a notification animation is already in progress, wait for it to complete before processing the new request.
if (_isNotificationAnimationInProgress) return;
// If initialization fails, return
if (TryInitializeNewNotification(SONotification, out UINotification uiNotification) == false) return;
// Start the coroutine for UI notification animations
StartNotificationUICoroutine(uiNotification);
}
/// <summary>
/// This function is used to initialize the UI notifications.
/// It initializes the queues for active and deactivated UI notifications and instantiates and enqueues deactivated UI
/// notifications.
/// </summary>
private void InitializeUINotifications()
{
// Initialize the queues for active and deactivated UI notifications.
_activeUINotifications = new Queue<UINotification>();
_deactivatedUINotifications = new Queue<UINotification>();
// Instantiate and enqueue deactivated UI notifications.
for (int i = 0; i < MAX_NUMBER_OF_NOTIFICATION_AT_SAME_TIME; i++)
{
UINotification uiNotification = Instantiate(_uiNotificationPrefab, transform);
// Initialize Position
uiNotification.NotificationRectTransform.anchoredPosition = _deactivatedNotificationPos;
_deactivatedUINotifications.Enqueue(uiNotification);
}
}
/// <summary>
/// Attempts to initialize a new UI notification using the provided SONotification.
/// Tries to get an available UI notification and initializes it with the given SONotification.
/// </summary>
/// <param name="SONotification">The SONotification to be used for initializing the UI notification.</param>
/// <param name="uiNotification">The initialized UINotification if successful, otherwise null.</param>
/// <returns>True if initialization is successful, false otherwise.</returns>
private bool TryInitializeNewNotification(SONotification SONotification, out UINotification uiNotification)
{
// Try to get an available UI notification and initialize it with the given SONotification.
uiNotification = GetAvailableUINotification();
if (uiNotification == null) return false;
uiNotification.Initialize(SONotification);
return true;
}
/// <summary>
/// Initiates the coroutine for UI notification animations and adds the notification to the active queue.
/// </summary>
/// <param name="uiNotification">The UINotification to be animated and added to the active queue.</param>
private void StartNotificationUICoroutine(UINotification uiNotification)
{
// Define additional delay for smoother notification end transition
float extraDelayForNotificationEndTransition = 0.5f;
// Calculate the total duration including fadeIn, activeDuration, slipDuration (for both activation and deactivation), delay, and extra delay
float totalAnimationDuration = _fadeInDuration + _activeDuration + 2 * _slipDuration + _delay + extraDelayForNotificationEndTransition;
// Start the fade animation for the canvas group
_fadeCanvas.StartFadeInFadeOutWithGap(_canvasGroup, _fadeInDuration, _fadeOutDuration, totalAnimationDuration);
// Enqueue the notification to the active queue
_activeUINotifications.Enqueue(uiNotification);
// Start the coroutine for individual UI notification animations
StartCoroutine(StartNotificationUI(uiNotification));
}
/// <summary>
/// Coroutine for managing the lifecycle of a UI notification, including its activation, display duration, and
/// deactivation.
/// </summary>
/// <param name="uiNotification">The UINotification to be managed.</param>
private IEnumerator StartNotificationUI(UINotification uiNotification)
{
// Remove the notification from the pending list
int firstIndex = 0;
_pendingNotificationsOrder.RemoveAt(firstIndex);
// Move to the active position
yield return MoveUINotificationToActivePosition(uiNotification);
// Wait for the active duration
yield return new WaitForSeconds(_activeDuration);
UpdateUINotificationPositions();
// Move to the hidden position
yield return MoveUINotificationToHiddenPosition(uiNotification);
// Deactivate the UI notification, update positions, and check for pending notifications.
DeactivateAndEnqueueUINotification(uiNotification);
// If there are pending notifications, initialize and start a new one
if (AreTherePendingNotifications()) TryInitializeAndStartNewNotification();
// Update UI notification positions after the lifecycle is complete
UpdateUINotificationPositions();
}
/// <summary>
/// Moves the UI notification to its active position.
/// </summary>
private IEnumerator MoveUINotificationToActivePosition(UINotification uiNotification)
{
float targetY = _activeNotificationPos.y - _spacing * (_activeUINotifications.Count - 1);
Vector2 targetPos = new(_activeNotificationPos.x, targetY);
yield return StartCoroutine(MoveUINotificationToTargetPos(uiNotification, targetPos));
}
/// <summary>
/// Moves the UI notification to its hidden position.
/// </summary>
private IEnumerator MoveUINotificationToHiddenPosition(UINotification uiNotification)
{
Vector2 targetPos = _deactivatedNotificationPos;
yield return StartCoroutine(MoveUINotificationToTargetPos(uiNotification, targetPos));
}
/// <summary>
/// Deactivates the UI notification, updates positions, and enqueues it for later use.
/// </summary>
private void DeactivateAndEnqueueUINotification(UINotification uiNotification)
{
uiNotification.SetActive(false);
_activeUINotifications.Dequeue();
_deactivatedUINotifications.Enqueue(uiNotification);
UpdateUINotificationPositions();
}
/// <summary>
/// Checks if there are pending notifications and initializes and starts a new one if available.
/// </summary>
private void TryInitializeAndStartNewNotification()
{
if (TryInitializeNewNotification(_pendingNotificationsOrder[0], out UINotification newUiNotification)) StartNotificationUICoroutine(newUiNotification);
}
/// <summary>
/// Smoothly moves the UI notification to the target position over a specified duration.
/// </summary>
/// <param name="uiNotification">The UINotification to be moved.</param>
/// <param name="targetPos">The target position to move the UINotification to.</param>
private IEnumerator MoveUINotificationToTargetPos(UINotification uiNotification, Vector2 targetPos)
{
// Set flag to indicate that a notification animation is in progress
_isNotificationAnimationInProgress = true;
float elapsedTime = 0f;
Vector2 startPos = uiNotification.NotificationRectTransform.anchoredPosition;
// Move the UI notification smoothly to the target position over the specified duration
while (elapsedTime <= _slipDuration + _delay)
{
elapsedTime += Time.deltaTime;
float percent = Mathf.Clamp01(elapsedTime / _slipDuration);
float curvePercent = _slipAnimationCurve.Evaluate(percent);
uiNotification.NotificationRectTransform.anchoredPosition = Vector2.Lerp(startPos, targetPos, curvePercent);
yield return null;
}
// Reset the flag once the animation is complete
_isNotificationAnimationInProgress = false;
}
/// <summary>
/// Updates the positions of active UI notifications along the Y-axis.
/// </summary>
private void UpdateUINotificationPositions()
{
float targetX = _activeNotificationPos.x;
float targetY = _activeNotificationPos.y;
// Iterate through active UI notifications and move them to their respective positions
foreach (UINotification activeUINotification in _activeUINotifications)
{
Vector2 targetPos = new(targetX, targetY);
StartCoroutine(MoveUINotificationToTargetPos(activeUINotification, targetPos));
targetY -= _spacing;
}
}
/// <summary>
/// Gets an available UI notification from the deactivated queue.
/// </summary>
/// <returns>The available UI notification, or null if the deactivated queue is empty.</returns>
private UINotification GetAvailableUINotification()
{
// Check if there are available deactivated UI notifications
if (_deactivatedUINotifications.Count == 0) return null;
// Dequeue and return an available UI notification
return _deactivatedUINotifications.Dequeue();
}
/// <summary>
/// Checks if there are pending notifications in the order list.
/// </summary>
/// <returns>True if there are pending notifications, false otherwise.</returns>
private bool AreTherePendingNotifications()
{
// Check if there are any pending notifications in the order list
return _pendingNotificationsOrder.Count >= 1;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ad1fae26184a1504bbf417585440fe12
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: fe6af3c5595f83b4aaf1e1c05ef9b819
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,25 @@
using UnityEngine;
#if ENABLE_INPUT_SYSTEM
using UnityEngine.InputSystem.UI;
#elif ENABLE_LEGACY_INPUT_MANAGER
using UnityEngine.EventSystems;
#endif
namespace Convai.Scripts.Runtime.Addons
{
public class ConvaiDynamicInputSystem : MonoBehaviour
{
private void Awake()
{
#if ENABLE_INPUT_SYSTEM
if (FindObjectOfType<InputSystemUIInputModule>() == null) gameObject.AddComponent<InputSystemUIInputModule>();
#elif ENABLE_LEGACY_INPUT_MANAGER
if (FindObjectOfType<StandaloneInputModule>() == null)
{
gameObject.AddComponent<StandaloneInputModule>();
}
#endif
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7611c6ad1a67fed44afca249d0bcd288
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,119 @@
using Convai.Scripts.Runtime.Core;
using Convai.Scripts.Runtime.UI;
using UnityEngine;
using UnityEngine.EventSystems;
namespace Convai.Scripts.Runtime.Addons
{
/// <summary>
/// Class for handling player movement including walking, running, jumping, and looking around.
/// </summary>
[RequireComponent(typeof(CharacterController))]
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Player Movement")]
[HelpURL("https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/scripts-overview")]
public class ConvaiPlayerMovement : MonoBehaviour
{
[Header("Movement Parameters")] [SerializeField] [Tooltip("The speed at which the player walks.")] [Range(1, 10)]
private float walkingSpeed = 3f;
[SerializeField] [Tooltip("The speed at which the player runs.")] [Range(1, 10)]
private float runningSpeed = 8f;
[SerializeField] [Tooltip("The speed at which the player jumps.")] [Range(1, 10)]
private float jumpSpeed = 4f;
[Header("Gravity & Grounding")] [SerializeField] [Tooltip("The gravity applied to the player.")] [Range(1, 10)]
private float gravity = 9.8f;
[Header("Camera Parameters")] [SerializeField] [Tooltip("The main camera the player uses.")]
private Camera playerCamera;
[SerializeField] [Tooltip("Speed at which the player can look around.")] [Range(0, 1)]
private float lookSpeedMultiplier = 0.5f;
[SerializeField] [Tooltip("Limit of upwards and downwards look angles.")] [Range(1, 90)]
private float lookXLimit = 45.0f;
private CharacterController _characterController;
private Vector3 _moveDirection = Vector3.zero;
private float _rotationX;
//Singleton Instance
public static ConvaiPlayerMovement Instance { get; private set; }
private void Awake()
{
// Singleton pattern to ensure only one instance exists
if (Instance == null)
Instance = this;
else
Destroy(gameObject);
}
private void Start()
{
_characterController = GetComponent<CharacterController>();
}
private void Update()
{
// Check for running state and move the player
MovePlayer();
// Handle the player and camera rotation
RotatePlayerAndCamera();
}
private void OnEnable()
{
ConvaiInputManager.Instance.jumping += Jump;
}
private void MovePlayer()
{
Vector3 horizontalMovement = Vector3.zero;
if (!EventSystem.current.IsPointerOverGameObject() && !UIUtilities.IsAnyInputFieldFocused())
{
Vector3 forward = transform.TransformDirection(Vector3.forward);
Vector3 right = transform.TransformDirection(Vector3.right);
float speed = ConvaiInputManager.Instance.isRunning ? runningSpeed : walkingSpeed;
Vector2 moveVector = ConvaiInputManager.Instance.moveVector;
float curSpeedX = speed * moveVector.x;
float curSpeedY = speed * moveVector.y;
horizontalMovement = forward * curSpeedY + right * curSpeedX;
}
if (!_characterController.isGrounded)
// Apply gravity only when canMove is true
_moveDirection.y -= gravity * Time.deltaTime;
// Move the character
_characterController.Move((_moveDirection + horizontalMovement) * Time.deltaTime);
}
private void Jump()
{
if (_characterController.isGrounded && !UIUtilities.IsAnyInputFieldFocused()) _moveDirection.y = jumpSpeed;
}
private void RotatePlayerAndCamera()
{
if (Cursor.lockState != CursorLockMode.Locked) return;
// Vertical rotation
_rotationX -= ConvaiInputManager.Instance.lookVector.y * lookSpeedMultiplier;
_rotationX = Mathf.Clamp(_rotationX, -lookXLimit, lookXLimit);
playerCamera.transform.localRotation = Quaternion.Euler(_rotationX, 0, 0);
// Horizontal rotation
float rotationY = ConvaiInputManager.Instance.lookVector.x * lookSpeedMultiplier;
transform.rotation *= Quaternion.Euler(0, rotationY, 0);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: adc3b3c371ebd1543ad6696b74dbbe9f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 189248cf557957840a0084f28183b3f9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
using UnityEngine;
namespace Convai.Scripts.Runtime.Attributes
{
public class ReadOnlyAttribute : PropertyAttribute
{
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 4af0f963530e4aeca5f5747085ac74fb
timeCreated: 1701083156

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7d932a943c13cad4381fdb6714489c14
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,227 @@
using System.Collections;
using System.Text.RegularExpressions;
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
namespace Convai.Scripts.Runtime.Core
{
// TODO: Change URL to point to the blinking script documentation after it is created
/// <summary>
/// Controls the blinking behavior of a character model in Unity.
/// </summary>
/// <remarks>
/// Instructions to find the index of left / right eyelids in BlendShapes:
/// <list type="bullet">
/// <item>
/// <description>Select your character model in the scene which has the SkinnedMeshRenderer component.</description>
/// </item>
/// <item>
/// <description>Look for the blend shapes in the SkinnedMeshRenderer component in the Inspector window.</description>
/// </item>
/// <item>
/// <description>
/// The count (from 0) of blend shape until "EyeBlink_L" or similar is the index of the lef
/// eyelid.
/// </description>
/// </item>
/// <item>
/// <description>
/// The count (from 0) of blend shape until "EyeBlink_R" or similar is the index of the right
/// eyelid.
/// </description>
/// </item>
/// </list>
/// </remarks>
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Character Blinking")]
public class ConvaiBlinkingHandler : MonoBehaviour
{
[SerializeField] [Tooltip("The SkinnedMeshRenderer for the character's face")]
private SkinnedMeshRenderer faceSkinnedMeshRenderer;
[SerializeField] [Tooltip("The index of the left eyelid blend shape in the SkinnedMeshRenderer")]
private int indexOfLeftEyelid = -1;
[SerializeField] [Tooltip("The index of the right eyelid blend shape in the SkinnedMeshRenderer")]
private int indexOfRightEyelid = -1;
[SerializeField] [Tooltip("Maximum value of the blendshape of the eye lid")]
private float maxBlendshapeWeight = 1;
[SerializeField] [Tooltip("The minimum amount of time, in seconds, for a blink. Positive values only.")] [Range(0.1f, 1f)]
private float minBlinkDuration = 0.2f;
[SerializeField]
[Tooltip(
"The maximum amount of time, in seconds, for a blink. Must be greater than the minimum blink duration.")]
[Range(0.1f, 1f)]
private float maxBlinkDuration = 0.3f;
[SerializeField] [Tooltip("The minimum amount of time, in seconds, between blinks. Positive values only.")] [Range(1f, 10f)]
private float minBlinkInterval = 2;
[SerializeField]
[Tooltip(
"The maximum amount of time, in seconds, between blinks. Must be greater than the minimum blink interval.")]
[Range(1f, 10f)]
private float maxBlinkInterval = 3;
/// <summary>
/// Initializes the settings for eyelid blinking on a character's SkinnedMeshRenderer blend shapes.
/// </summary>
/// <remarks>
/// This method executes the following sequence of operations:
/// <list type="bullet">
/// <item>
/// <description>
/// Checks if the SkinnedMeshRenderer is associated with the character's face. If it is not found,
/// it logs an error and returns.
/// </description>
/// </item>
/// <item>
/// <description>
/// If the indices of the left and right eyelids are not set (i.e., they are -1), it iterates over
/// the blend shapes of the SkinnedMeshRenderer to find these indices. It uses regex to match blend shapes'
/// names, looking for "eye" and "blink" in combination with either "_l" for left or "_r" for right
/// indicators. The appropriate indices found are stored in PlayerPrefs for caching purposes.
/// </description>
/// </item>
/// </list>
/// </remarks>
private void Start()
{
string npcName = GetComponent<ConvaiNPC>().characterName; // fetch NPC name from ConvaiNPC script
string leftBlinkKey = npcName + "LeftEyelid";
string rightBlinkKey = npcName + "RightEyelid";
if (indexOfLeftEyelid == -1)
indexOfLeftEyelid = PlayerPrefs.GetInt(leftBlinkKey, -1);
if (indexOfRightEyelid == -1)
indexOfRightEyelid = PlayerPrefs.GetInt(rightBlinkKey, -1);
if (faceSkinnedMeshRenderer == null)
faceSkinnedMeshRenderer = GetSkinnedMeshRendererWithRegex(transform);
if (faceSkinnedMeshRenderer != null)
{
// If we couldn't retrieve the indices from cache, we search for them in our mesh
if (indexOfLeftEyelid == -1 || indexOfRightEyelid == -1)
{
for (int i = 0; i < faceSkinnedMeshRenderer.sharedMesh.blendShapeCount; i++)
{
string blendShapeName = faceSkinnedMeshRenderer.sharedMesh.GetBlendShapeName(i).ToLower();
if (indexOfLeftEyelid == -1 && Regex.IsMatch(blendShapeName, @"(eye).*(blink).*(l|[Ll]eft)"))
{
indexOfLeftEyelid = i;
PlayerPrefs.SetInt(leftBlinkKey, i);
}
else if (indexOfRightEyelid == -1 && Regex.IsMatch(blendShapeName, @"(eye).*(blink).*(r|[Rr]ight)"))
{
indexOfRightEyelid = i;
PlayerPrefs.SetInt(rightBlinkKey, i);
}
}
if (indexOfLeftEyelid == -1 || indexOfRightEyelid == -1)
{
ConvaiLogger.Error("Left and/or Right eyelid blend shapes not found!", ConvaiLogger.LogCategory.Character);
return;
}
}
}
else
{
ConvaiLogger.Error("No SkinnedMeshRenderer found with matching name.", ConvaiLogger.LogCategory.Character);
}
StartCoroutine(BlinkCoroutine());
}
private void OnValidate()
{
maxBlinkDuration = Mathf.Max(minBlinkDuration, maxBlinkDuration);
maxBlinkInterval = Mathf.Max(minBlinkInterval, maxBlinkInterval);
}
private SkinnedMeshRenderer GetSkinnedMeshRendererWithRegex(Transform parentTransform)
{
SkinnedMeshRenderer findFaceSkinnedMeshRenderer = null;
Regex regexPattern = new("(.*_Head|CC_Base_Body)");
foreach (Transform child in parentTransform)
if (regexPattern.IsMatch(child.name))
{
findFaceSkinnedMeshRenderer = child.GetComponent<SkinnedMeshRenderer>();
if (findFaceSkinnedMeshRenderer != null) break;
}
return findFaceSkinnedMeshRenderer;
}
/// <summary>
/// Coroutine that controls the blinking behavior of the character.
/// </summary>
/// <remarks>
/// This coroutine is designed to perform a sequence of blinking actions where it:
/// <list type="bullet">
/// <item>
/// <description>Closes the eyes smoothly over half of the defined 'blinkDuration'</description>
/// </item>
/// <item>
/// <description>Waits for the defined 'blinkDuration'</description>
/// </item>
/// <item>
/// <description>Opens the eyes smoothly over half of the defined 'blinkDuration'</description>
/// </item>
/// <item>
/// <description>Waits for a randomized interval time before repeating the blinking process</description>
/// </item>
/// </list>
/// </remarks>
/// <returns>Enumerator to control the sequence of this coroutine</returns>
private IEnumerator BlinkCoroutine()
{
while (true)
{
float blinkDuration = Random.Range(minBlinkDuration, maxBlinkDuration);
float blinkInterval = Random.Range(minBlinkInterval, maxBlinkInterval);
// Blink the character's eyes over the course of the blinkDuration
for (float t = 0.0f; t < blinkDuration; t += Time.deltaTime)
{
float normalizedTime = t / blinkDuration;
SetEyelidsBlendShapeWeight(maxBlendshapeWeight * normalizedTime); // Increase the weight of the blend shape to affect the character's model
yield return null;
}
SetEyelidsBlendShapeWeight(maxBlendshapeWeight);
// Wait for blinkDuration seconds, this gives the impression of the eyelids being naturally closed
yield return new WaitForSeconds(blinkDuration);
// Now we 'un-blink' the character's eyes over the course of the blinkDuration
for (float t = 0.0f; t < blinkDuration; t += Time.deltaTime)
{
float normalizedTime = t / blinkDuration;
SetEyelidsBlendShapeWeight(maxBlendshapeWeight - maxBlendshapeWeight * normalizedTime);
yield return null;
}
yield return new WaitForSeconds(blinkInterval);
}
}
/// <summary>
/// Sets the same weight to both eyelids' blend shape.
/// </summary>
private void SetEyelidsBlendShapeWeight(float weight)
{
faceSkinnedMeshRenderer.SetBlendShapeWeight(indexOfLeftEyelid, weight);
faceSkinnedMeshRenderer.SetBlendShapeWeight(indexOfRightEyelid, weight);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b64bad04a93295642a4486f9899f8734
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,838 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Convai.Scripts.Runtime.Addons;
using Convai.Scripts.Runtime.Features;
using Convai.Scripts.Runtime.LoggerSystem;
using Convai.Scripts.Runtime.UI;
using Convai.Scripts.Runtime.Utils;
using Google.Protobuf;
using Grpc.Core;
using Service;
using UnityEngine;
using static Service.GetResponseRequest.Types;
namespace Convai.Scripts.Runtime.Core
{
/// <summary>
/// This class is dedicated to manage all communications between the Convai server and plugin, in addition to
/// processing any data transmitted during these interactions. It abstracts the underlying complexities of the plugin,
/// providing a seamless interface for users. Modifications to this class are discouraged as they may impact the
/// stability and functionality of the system. This class is maintained by the development team to ensure compatibility
/// and performance.
/// </summary>
[DisallowMultipleComponent]
[RequireComponent(typeof(ConvaiNPCManager))]
[AddComponentMenu("Convai/Convai GRPC API")]
public class ConvaiGRPCAPI : MonoBehaviour
{
private static bool _isInitializationErrorThrown;
public static ConvaiGRPCAPI Instance;
private static bool _usageLimitNotificationSent;
private ConvaiNPC _activeConvaiNPC;
private string _apiKey;
private CancellationTokenSource _cancellationTokenSource;
private ConvaiChatUIHandler _chatUIHandler;
private string _currentTranscript;
private string _isFinalUserQueryTextBuffer = "";
private void Awake()
{
// Singleton pattern: Ensure only one instance of this script is active.
if (Instance != null && Instance != this)
{
Destroy(gameObject);
return;
}
Instance = this;
// Load API key from a ScriptableObject in Resources folder.
ConvaiAPIKeySetup.GetAPIKey(out _apiKey);
// Find and store a reference to the ConvaiChatUIHandler component in the scene.
_chatUIHandler = FindObjectOfType<ConvaiChatUIHandler>();
}
private void Start()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged += HandleActiveNPCChanged;
_cancellationTokenSource = new CancellationTokenSource();
MainThreadDispatcher.CreateInstance();
}
private void FixedUpdate()
{
if (_chatUIHandler != null && !string.IsNullOrEmpty(_currentTranscript)) _chatUIHandler.SendPlayerText(_currentTranscript);
}
private void OnDestroy()
{
ConvaiNPCManager.Instance.OnActiveNPCChanged -= HandleActiveNPCChanged;
InterruptCharacterSpeech(_activeConvaiNPC);
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception ex)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
ConvaiLogger.Warn("Exception in OnDestroy: " + ex.Message, ConvaiLogger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
}
}
/// <summary>
/// Asynchronously initializes a session ID by communicating with a gRPC service and returns the session ID if
/// successful.
/// </summary>
/// <param name="characterName">The name of the character for which the session is being initialized.</param>
/// <param name="client">The gRPC service client used to make the call to the server.</param>
/// <param name="characterID">The unique identifier for the character.</param>
/// <param name="sessionID">The session ID that may be updated during the initialization process.</param>
/// <returns>
/// A task that represents the asynchronous operation. The task result contains the initialized session ID if
/// successful, or null if the initialization fails.
/// </returns>
public static async Task<string> InitializeSessionIDAsync(string characterName, ConvaiService.ConvaiServiceClient client, string characterID, string sessionID)
{
ConvaiLogger.DebugLog("Initializing SessionID for character: " + characterName, ConvaiLogger.LogCategory.Character);
if (client == null)
{
ConvaiLogger.Error("gRPC client is not initialized.", ConvaiLogger.LogCategory.Character);
return null;
}
using AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call = client.GetResponse();
GetResponseRequest getResponseConfigRequest = new()
{
GetResponseConfig = new GetResponseConfig
{
CharacterId = characterID,
ApiKey = Instance._apiKey,
SessionId = sessionID,
AudioConfig = new AudioConfig { DisableAudio = true }
}
};
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
TextData = "Repeat the following exactly as it is: [Hii]"
}
});
await call.RequestStream.CompleteAsync();
while (await call.ResponseStream.MoveNext())
{
GetResponseResponse result = call.ResponseStream.Current;
if (!string.IsNullOrEmpty(result.SessionId))
{
ConvaiLogger.DebugLog("SessionID Initialization SUCCESS for: " + characterName,
ConvaiLogger.LogCategory.Character);
return result.SessionId;
}
}
ConvaiLogger.Exception("SessionID Initialization FAILED for: " + characterName, ConvaiLogger.LogCategory.Character);
}
catch (RpcException rpcException)
{
switch (rpcException.StatusCode)
{
case StatusCode.Cancelled:
ConvaiLogger.Exception(rpcException, ConvaiLogger.LogCategory.Character);
break;
case StatusCode.Unknown:
ConvaiLogger.Error($"Unknown error from server: {rpcException.Status.Detail}", ConvaiLogger.LogCategory.Character);
break;
case StatusCode.PermissionDenied:
{
if (NotificationSystemHandler.Instance != null && !_isInitializationErrorThrown)
{
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.UsageLimitExceeded);
_isInitializationErrorThrown = true;
}
break;
}
default:
throw;
}
}
catch (Exception ex)
{
ConvaiLogger.Exception(ex, ConvaiLogger.LogCategory.Character);
}
return "-1";
}
/// <summary>
/// Sends text data to the server and processes the response.
/// </summary>
/// <param name="client">The gRPC client used to communicate with the server.</param>
/// <param name="userText">The text data to send to the server.</param>
/// <param name="characterID">The ID of the character that is sending the text.</param>
/// <param name="isActionActive">Indicates whether actions are active.</param>
/// <param name="isLipSyncActive">Indicates whether lip sync is active.</param>
/// <param name="actionConfig">The action configuration.</param>
/// <param name="faceModel">The face model.</param>
/// <param name="speakerId">Speaker ID of the Player</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public async Task SendTextData(ConvaiService.ConvaiServiceClient client, string userText, string characterID, bool isActionActive, bool isLipSyncActive,
ActionConfig actionConfig, FaceModel faceModel, string speakerId)
{
AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call =
GetAsyncDuplexStreamingCallOptions(client);
GetResponseRequest getResponseConfigRequest = CreateGetResponseRequest(
isActionActive,
isLipSyncActive,
0,
characterID,
actionConfig,
faceModel,
speakerId);
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
TextData = userText
}
});
await call.RequestStream.CompleteAsync();
// Store the task that receives results from the server.
Task receiveResultsTask = Task.Run(
async () => { await ReceiveResultFromServer(call, _cancellationTokenSource.Token); },
_cancellationTokenSource.Token);
// Await the task if needed to ensure it completes before this method returns [OPTIONAL]
await receiveResultsTask.ConfigureAwait(false);
}
catch (Exception ex)
{
ConvaiLogger.Error(ex, ConvaiLogger.LogCategory.Character);
}
}
// This method will be called whenever the active NPC changes.
private void HandleActiveNPCChanged(ConvaiNPC newActiveNPC)
{
if (newActiveNPC != null)
InterruptCharacterSpeech(newActiveNPC);
// Cancel the ongoing gRPC call
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception e)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
ConvaiLogger.Warn("Exception in GRPCAPI:HandleActiveNPCChanged: " + e.Message,
ConvaiLogger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
ConvaiLogger.Info("The Cancellation Token Source was Disposed in GRPCAPI:HandleActiveNPCChanged",
ConvaiLogger.LogCategory.Character);
}
_cancellationTokenSource = new CancellationTokenSource(); // Create a new token for future calls
_activeConvaiNPC = newActiveNPC;
}
/// <summary>
/// Starts recording audio and sends it to the server for processing.
/// </summary>
/// <param name="client">gRPC service Client object</param>
/// <param name="isActionActive">Bool specifying whether we are expecting action responses</param>
/// <param name="isLipSyncActive"></param>
/// <param name="recordingFrequency">Frequency of the audio being sent</param>
/// <param name="recordingLength">Length of the recording from the microphone</param>
/// <param name="characterID">Character ID obtained from the playground</param>
/// <param name="actionConfig">Object containing the action configuration</param>
/// <param name="faceModel"></param>
/// <param name="speakerID">Speaker ID of the Player</param>
public async Task StartRecordAudio(ConvaiService.ConvaiServiceClient client, bool isActionActive, bool isLipSyncActive, int recordingFrequency, int recordingLength,
string characterID, ActionConfig actionConfig, FaceModel faceModel, string speakerID)
{
AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call = GetAsyncDuplexStreamingCallOptions(client);
GetResponseRequest getResponseConfigRequest =
CreateGetResponseRequest(isActionActive, isLipSyncActive, recordingFrequency, characterID, actionConfig, faceModel, speakerID);
ConvaiLogger.DebugLog(getResponseConfigRequest.ToString(), ConvaiLogger.LogCategory.Character);
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
}
catch (Exception ex)
{
ConvaiLogger.Error(ex, ConvaiLogger.LogCategory.Character);
return; // early return on error
}
AudioClip audioClip = Microphone.Start(MicrophoneManager.Instance.SelectedMicrophoneName, false, recordingLength, recordingFrequency);
MicrophoneTestController.Instance.CheckMicrophoneDeviceWorkingStatus(audioClip);
ConvaiLogger.Info(_activeConvaiNPC.characterName + " is now listening", ConvaiLogger.LogCategory.Character);
OnPlayerSpeakingChanged?.Invoke(true);
await ProcessAudioContinuously(call, recordingFrequency, recordingLength, audioClip);
}
private AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> GetAsyncDuplexStreamingCallOptions(ConvaiService.ConvaiServiceClient client)
{
Metadata headers = new()
{
{ "source", "Unity" },
{ "version", "3.3.1" }
};
CallOptions options = new(headers);
return client.GetResponse(options);
}
/// <summary>
/// Creates a GetResponseRequest object configured with the specified parameters for initiating a gRPC call.
/// </summary>
/// <param name="isActionActive">Indicates whether actions are enabled for the character.</param>
/// <param name="isLipSyncActive">Indicates whether lip sync is enabled for the character.</param>
/// <param name="recordingFrequency">The frequency at which the audio is recorded.</param>
/// <param name="characterID">The unique identifier for the character.</param>
/// <param name="actionConfig">The configuration for character actions.</param>
/// <param name="faceModel">The facial model configuration for the character.</param>
/// <param name="speakerID"></param>
/// <param name="npc"></param>
/// <returns>A GetResponseRequest object configured with the provided settings.</returns>
private GetResponseRequest CreateGetResponseRequest(bool isActionActive, bool isLipSyncActive, int recordingFrequency, string characterID, ActionConfig actionConfig = null,
FaceModel faceModel = FaceModel.OvrModelName, string speakerID = "", ConvaiNPC npc = null)
{
GetResponseRequest getResponseConfigRequest = new()
{
GetResponseConfig = new GetResponseConfig
{
CharacterId = characterID,
ApiKey = _apiKey, // Assumes apiKey is available
SessionId = npc?.sessionID ?? _activeConvaiNPC?.sessionID ?? "-1", // Assumes _activeConvaiNPC would not be null, else this will throw NullReferenceException
SpeakerId = speakerID,
AudioConfig = new AudioConfig
{
SampleRateHertz = recordingFrequency,
EnableFacialData = isLipSyncActive,
FaceModel = faceModel
}
}
};
if (_activeConvaiNPC != null)
{
if (_activeConvaiNPC.TryGetComponent(out NarrativeDesignKeyController ndController))
{
foreach (NarrativeDesignKeyController.NarrativeDesignKey templateKey in ndController.narrativeDesignKeys)
{
getResponseConfigRequest.GetResponseConfig.NarrativeTemplateKeys.Add(templateKey.name, templateKey.value);
}
}
if (_activeConvaiNPC.TryGetComponent(out DynamicInfoController diController))
{
getResponseConfigRequest.GetResponseConfig.DynamicInfoConfig = diController.DynamicInfoConfig;
}
}
if (isActionActive || _activeConvaiNPC != null) getResponseConfigRequest.GetResponseConfig.ActionConfig = actionConfig;
return getResponseConfigRequest;
}
/// <summary>
/// Processes audio data continuously from a microphone input and sends it to the server via a gRPC call.
/// </summary>
/// <param name="call">The streaming call to send audio data to the server.</param>
/// <param name="recordingFrequency">The frequency at which the audio is recorded.</param>
/// <param name="recordingLength">The length of the audio recording in seconds.</param>
/// <param name="audioClip">The AudioClip object that contains the audio data from the microphone.</param>
/// <returns>A task that represents the asynchronous operation of processing and sending audio data.</returns>
private async Task ProcessAudioContinuously(AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, int recordingFrequency, int recordingLength,
AudioClip audioClip)
{
// Run the receiving results from the server in the background without awaiting it here.
Task receiveResultsTask = Task.Run(async () => { await ReceiveResultFromServer(call, _cancellationTokenSource.Token); }, _cancellationTokenSource.Token);
int pos = 0;
float[] audioData = new float[recordingFrequency * recordingLength];
while (Microphone.IsRecording(MicrophoneManager.Instance.SelectedMicrophoneName))
{
await Task.Delay(200);
int newPos = Microphone.GetPosition(MicrophoneManager.Instance.SelectedMicrophoneName);
int diff = newPos - pos;
if (diff > 0)
{
if (audioClip == null)
{
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception e)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
ConvaiLogger.Warn("Exception when Audio Clip is null: " + e.Message,
ConvaiLogger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
ConvaiLogger.Info("The Cancellation Token Source was Disposed because the Audio Clip was empty.",
ConvaiLogger.LogCategory.Character);
}
break;
}
audioClip.GetData(audioData, pos);
await ProcessAudioChunk(call, diff, audioData);
pos = newPos;
}
}
// Process any remaining audio data.
await ProcessAudioChunk(call,
Microphone.GetPosition(MicrophoneManager.Instance.SelectedMicrophoneName) - pos,
audioData).ConfigureAwait(false);
await call.RequestStream.CompleteAsync();
}
/// <summary>
/// Stops recording and processing the audio.
/// </summary>
public void StopRecordAudio()
{
// End microphone recording
Microphone.End(MicrophoneManager.Instance.SelectedMicrophoneName);
_usageLimitNotificationSent = false;
try
{
ConvaiLogger.Info(_activeConvaiNPC.characterName + " has stopped listening", ConvaiLogger.LogCategory.Character);
OnPlayerSpeakingChanged?.Invoke(false);
}
catch (Exception)
{
ConvaiLogger.Error("No active NPC found", ConvaiLogger.LogCategory.Character);
}
}
/// <summary>
/// Processes each audio chunk and sends it to the server.
/// </summary>
/// <param name="call">gRPC Streaming call connecting to the getResponse function</param>
/// <param name="diff">Length of the audio data from the current position to the position of the last sent chunk</param>
/// <param name="audioData">Chunk of audio data that we want to be processed</param>
private static async Task ProcessAudioChunk(AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, int diff, IReadOnlyList<float> audioData)
{
if (diff > 0)
{
// Convert audio data to byte array
byte[] audioByteArray = new byte[diff * sizeof(short)];
for (int i = 0; i < diff; i++)
{
float sample = audioData[i];
short shortSample = (short)(sample * short.MaxValue);
byte[] shortBytes = BitConverter.GetBytes(shortSample);
audioByteArray[i * sizeof(short)] = shortBytes[0];
audioByteArray[i * sizeof(short) + 1] = shortBytes[1];
}
// Send audio data to the gRPC server
try
{
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
AudioData = ByteString.CopyFrom(audioByteArray)
}
});
}
catch (RpcException rpcException)
{
switch (rpcException.StatusCode)
{
case StatusCode.Cancelled:
ConvaiLogger.Error(rpcException, ConvaiLogger.LogCategory.Character);
break;
case StatusCode.PermissionDenied:
{
if (NotificationSystemHandler.Instance != null && !_usageLimitNotificationSent)
{
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.UsageLimitExceeded);
_usageLimitNotificationSent = true;
}
break;
}
default:
throw;
}
}
catch (Exception ex)
{
ConvaiLogger.Error(ex, ConvaiLogger.LogCategory.Character);
}
}
}
/// <summary>
/// </summary>
/// <param name="newActiveNPC"></param>
public void InterruptCharacterSpeech(ConvaiNPC newActiveNPC)
{
// If the active NPC is speaking, cancel the ongoing gRPC call,
// clear the response queue, and reset the character's speaking state, lip-sync, animation, and audio playback
if (newActiveNPC != null && newActiveNPC.isCharacterActive)
{
// Cancel the ongoing gRPC call
try
{
_cancellationTokenSource?.Cancel();
}
catch (Exception e)
{
// Handle the Exception, which can occur if the CancellationTokenSource is already disposed.
ConvaiLogger.Warn("Exception in Interrupt Character Speech: " + e.Message, ConvaiLogger.LogCategory.Character);
}
finally
{
_cancellationTokenSource?.Dispose();
_cancellationTokenSource = null;
ConvaiLogger.Info($"The Cancellation Token Source for {newActiveNPC} was Disposed in ConvaiGRPCAPI:InterruptCharacterSpeech.",
ConvaiLogger.LogCategory.Character);
}
_cancellationTokenSource = new CancellationTokenSource(); // Create a new token for future calls
CharacterInterrupted?.Invoke();
// Clear the response queue
newActiveNPC.ClearResponseQueue();
// Reset the character's speaking state
newActiveNPC.SetCharacterTalking(false);
// Stop any ongoing audio playback
newActiveNPC.StopAllAudioPlayback();
// Stop any ongoing lip sync for active NPC
newActiveNPC.StopLipSync();
// Reset the character's animation to idle
newActiveNPC.ResetCharacterAnimation();
}
}
private async Task ReceiveResultFromServer(AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, CancellationToken cancellationToken,
ConvaiNPC npc = null)
{
Queue<LipSyncBlendFrameData> lipSyncBlendFrameQueue = new();
bool firstSilFound = false;
if (npc != null) npc.isCharacterActive = true;
while (!cancellationToken.IsCancellationRequested && await call.ResponseStream.MoveNext(cancellationToken).ConfigureAwait(false))
try
{
GetResponseResponse result = call.ResponseStream.Current;
// // Log response details for debugging only if text or audio data is present and audio data length is greater than 46 bytes. Also print sample rate hertz
// // if ((result.AudioResponse != null || result.UserQuery != null) && result.AudioResponse?.AudioData?.Length > 46)
// ConvaiLogger.DebugLog($"=== GetResponseResponse Details ===\n" +
// $"Session ID: {result.SessionId}\n" +
// $"Type: {(result.AudioResponse != null ? "Audio Response" : result.UserQuery != null ? "User Query" : "Other")}\n" +
// $"Text Data: {result.AudioResponse?.TextData ?? result.UserQuery?.TextData ?? ""}\n" +
// $"Audio Data Length: {result.AudioResponse?.AudioData?.Length ?? 0} bytes\n" +
// $"End of Response: {result.AudioResponse?.EndOfResponse ?? result.UserQuery?.EndOfResponse ?? false}\n" +
// $"Sample Rate Hertz: {result.AudioResponse?.AudioConfig?.SampleRateHertz ?? 0}",
// ConvaiLogger.LogCategory.Character);
OnResultReceived?.Invoke(result);
ProcessCharacterEmotion(result, npc);
ProcessUserQuery(result);
ProcessBtResponse(result, npc);
ProcessActionResponse(result, npc);
ProcessAudioResponse(result, lipSyncBlendFrameQueue, ref firstSilFound, npc);
ProcessDebugLog(result, call, npc);
UpdateSessionId(result, npc);
}
catch (RpcException rpcException) when (rpcException.StatusCode == StatusCode.Cancelled)
{
ConvaiLogger.Error(rpcException, ConvaiLogger.LogCategory.Character);
}
catch (Exception ex)
{
ConvaiLogger.DebugLog(ex, ConvaiLogger.LogCategory.Character);
}
if (cancellationToken.IsCancellationRequested)
await call.RequestStream.CompleteAsync();
}
private ConvaiNPC NPCToSendResponse(ConvaiNPC npc)
{
return npc ?? _activeConvaiNPC;
}
private void ProcessCharacterEmotion(GetResponseResponse result, ConvaiNPC npc)
{
ConvaiNPC convaiNPC = NPCToSendResponse(npc);
if (convaiNPC == null || string.IsNullOrEmpty(result.EmotionResponse)) return;
ConvaiLogger.DebugLog($"Emotion Response from the server: {result.EmotionResponse}", ConvaiLogger.LogCategory.LipSync);
List<string> newEmotions = result.EmotionResponse.Split(' ').ToList();
convaiNPC.convaiLipSync.SetCharacterEmotions(newEmotions);
}
private void ProcessUserQuery(GetResponseResponse result)
{
if (result.UserQuery != null)
{
_currentTranscript = _isFinalUserQueryTextBuffer + result.UserQuery.TextData;
if (result.UserQuery.IsFinal) _isFinalUserQueryTextBuffer += result.UserQuery.TextData;
if (result.UserQuery.EndOfResponse) _isFinalUserQueryTextBuffer = "";
}
else
{
_isFinalUserQueryTextBuffer = "";
_currentTranscript = null;
}
}
private void ProcessBtResponse(GetResponseResponse result, ConvaiNPC npc)
{
if (result.BtResponse != null)
TriggerNarrativeSection(result, npc);
}
private void ProcessActionResponse(GetResponseResponse result, ConvaiNPC npc)
{
ConvaiNPC convaiNPC = NPCToSendResponse(npc);
if (result.ActionResponse != null && convaiNPC.actionsHandler != null)
convaiNPC.actionsHandler.actionResponseList.Add(result.ActionResponse.Action);
}
private void ProcessAudioResponse(GetResponseResponse result, Queue<LipSyncBlendFrameData> lipSyncBlendFrameQueue, ref bool firstSilFound, ConvaiNPC npc)
{
ConvaiNPC convaiNPC = NPCToSendResponse(npc);
if (result.AudioResponse?.AudioData == null) return;
if (result.AudioResponse.AudioData.ToByteArray().Length > 46)
ProcessAudioData(result, lipSyncBlendFrameQueue, convaiNPC);
if (result.AudioResponse.VisemesData != null)
ProcessVisemesData(result, lipSyncBlendFrameQueue, ref firstSilFound, convaiNPC);
if (result.AudioResponse.BlendshapesData != null)
ProcessBlendshapesFrame(result, lipSyncBlendFrameQueue, convaiNPC);
}
private void ProcessAudioData(GetResponseResponse result, Queue<LipSyncBlendFrameData> lipSyncBlendFrameQueue, ConvaiNPC npc)
{
byte[] wavBytes = result.AudioResponse.AudioData.ToByteArray();
if (npc.convaiLipSync == null)
{
ConvaiLogger.DebugLog($"Enqueuing responses: {result.AudioResponse.TextData}", ConvaiLogger.LogCategory.LipSync);
npc.EnqueueResponse(result);
}
else
{
LipSyncBlendFrameData.FrameType frameType = npc.convaiLipSync.faceModel == FaceModel.OvrModelName
? LipSyncBlendFrameData.FrameType.Visemes
: LipSyncBlendFrameData.FrameType.Blendshape;
lipSyncBlendFrameQueue.Enqueue(new LipSyncBlendFrameData((int)(WavUtility.CalculateDurationSeconds(wavBytes) * 30), result, frameType));
}
}
private void ProcessVisemesData(GetResponseResponse result, Queue<LipSyncBlendFrameData> lipSyncBlendFrameQueue, ref bool firstSilFound, ConvaiNPC npc)
{
if (npc.convaiLipSync == null) return;
if (Mathf.Approximately(result.AudioResponse.VisemesData.Visemes.Sil, -2) || result.AudioResponse.EndOfResponse)
{
if (firstSilFound) lipSyncBlendFrameQueue.Dequeue().Process(npc);
firstSilFound = true;
}
else
{
lipSyncBlendFrameQueue.Peek().Enqueue(result.AudioResponse.VisemesData);
}
}
private void ProcessBlendshapesFrame(GetResponseResponse result, Queue<LipSyncBlendFrameData> lipSyncBlendFrameQueue, ConvaiNPC npc)
{
if (npc.convaiLipSync == null) return;
if (lipSyncBlendFrameQueue.Peek().CanProcess() || result.AudioResponse.EndOfResponse)
{
lipSyncBlendFrameQueue.Dequeue().Process(npc);
}
else
{
lipSyncBlendFrameQueue.Peek().Enqueue(result.AudioResponse.FaceEmotion.ArKitBlendShapes);
if (lipSyncBlendFrameQueue.Peek().CanPartiallyProcess())
lipSyncBlendFrameQueue.Peek().ProcessPartially(npc);
}
}
private void ProcessDebugLog(GetResponseResponse result, AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call, ConvaiNPC npc)
{
ConvaiNPC convaiNPC = NPCToSendResponse(npc);
if (result.AudioResponse == null && result.DebugLog != null)
convaiNPC.EnqueueResponse(call.ResponseStream.Current);
}
private void UpdateSessionId(GetResponseResponse result, ConvaiNPC npc)
{
ConvaiNPC convaiNPC = NPCToSendResponse(npc);
if (convaiNPC.sessionID == "-1")
convaiNPC.sessionID = result.SessionId;
}
/// <summary>
/// </summary>
/// <param name="result"></param>
/// <param name="npc"></param>
private void TriggerNarrativeSection(GetResponseResponse result, ConvaiNPC npc)
{
ConvaiNPC convaiNPC = NPCToSendResponse(npc);
// Trigger the current section of the narrative design manager in the active NPC
if (result.BtResponse != null)
{
ConvaiLogger.DebugLog($"Narrative Design SectionID: {result.BtResponse.NarrativeSectionId}", ConvaiLogger.LogCategory.Character);
// Get the NarrativeDesignManager component from the active NPC
NarrativeDesignManager narrativeDesignManager = convaiNPC.narrativeDesignManager;
if (narrativeDesignManager != null)
MainThreadDispatcher.Instance.RunOnMainThread(() => { narrativeDesignManager.UpdateCurrentSection(result.BtResponse.NarrativeSectionId); });
else
ConvaiLogger.Error("NarrativeDesignManager component not found in the active NPC", ConvaiLogger.LogCategory.Character);
}
}
/// <summary>
/// </summary>
/// <param name="client"></param>
/// <param name="characterID"></param>
/// <param name="triggerConfig"></param>
/// <param name="sendingNPC"></param>
public async Task SendTriggerData(ConvaiService.ConvaiServiceClient client, string characterID, TriggerConfig triggerConfig, ConvaiNPC sendingNPC = null)
{
ConvaiLogger.DebugLog($"Sending trigger data: {triggerConfig.TriggerName}", ConvaiLogger.LogCategory.Character);
AsyncDuplexStreamingCall<GetResponseRequest, GetResponseResponse> call = GetAsyncDuplexStreamingCallOptions(client);
GetResponseRequest getResponseConfigRequest = CreateGetResponseRequest(true, true, 0, characterID, npc: sendingNPC);
try
{
await call.RequestStream.WriteAsync(getResponseConfigRequest);
await call.RequestStream.WriteAsync(new GetResponseRequest
{
GetResponseData = new GetResponseData
{
TriggerData = triggerConfig
}
});
await call.RequestStream.CompleteAsync();
// Store the task that receives results from the server.
Task receiveResultsTask = Task.Run(
async () => { await ReceiveResultFromServer(call, _cancellationTokenSource.Token, sendingNPC); },
_cancellationTokenSource.Token);
// Await the task if needed to ensure it completes before this method returns [OPTIONAL]
await receiveResultsTask.ConfigureAwait(false);
}
catch (Exception ex)
{
ConvaiLogger.Error(ex, ConvaiLogger.LogCategory.Character);
}
}
/// <summary>
/// Asynchronously sends feedback to the server.
/// </summary>
/// <param name="thumbsUp">Indicates whether the feedback is a thumbs up or thumbs down.</param>
/// <param name="interactionID">The ID associated with the interaction.</param>
/// <param name="feedbackText">The text content of the feedback.</param>
/// <returns>A Task representing the asynchronous operation.</returns>
public async Task SendFeedback(bool thumbsUp, string interactionID, string feedbackText)
{
// Create a FeedbackRequest object with the provided parameters.
FeedbackRequest request = new()
{
InteractionId = interactionID,
CharacterId = _activeConvaiNPC.characterID,
SessionId = _activeConvaiNPC.sessionID,
TextFeedback = new FeedbackRequest.Types.Feedback
{
FeedbackText = feedbackText,
ThumbsUp = thumbsUp
}
};
try
{
// Send the feedback request asynchronously and await the response.
FeedbackResponse response = await _activeConvaiNPC.GetClient().SubmitFeedbackAsync(request, cancellationToken: _cancellationTokenSource.Token);
// Log the feedback response.
ConvaiLogger.Info(response.FeedbackResponse_, ConvaiLogger.LogCategory.Character);
}
catch (RpcException rpcException)
{
// Log an exception if there is an error in sending the feedback.
ConvaiLogger.Exception(rpcException, ConvaiLogger.LogCategory.Character);
}
}
#region Events
public event Action CharacterInterrupted; // Event to notify when the character's speech is interrupted
public event Action<GetResponseResponse> OnResultReceived; // Event to notify when a response is received from the server
public event Action<bool> OnPlayerSpeakingChanged; // Event to notify when the player starts or stops speaking
#endregion
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ac3bfdb7f1f556540bc41acc9a375817
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,270 @@
using Convai.Scripts.Runtime.Features;
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
namespace Convai.Scripts.Runtime.Core
{
/// <summary>
/// This class provides head tracking functionalities for an object (like a character) with an Animator.
/// It requires the Animator component to be attached to the same GameObject.
/// </summary>
[RequireComponent(typeof(Animator))]
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Character Head & Eye Tracking")]
public class ConvaiHeadTracking : MonoBehaviour
{
private const float POSITION_UPDATE_DELAY = 2f;
[field: Header("Tracking Properties")]
[Tooltip("The object that the head should track.")]
[field: SerializeField]
public Transform TargetObject { get; set; }
[Range(0.0f, 100.0f)] [Tooltip("The maximum distance at which the head must still track target.")] [SerializeField]
private float trackingDistance = 10f;
[Tooltip("Speed at which character turns towards the target.")] [Range(1f, 10f)] [SerializeField]
private float turnSpeed = 5.0f;
[Header("Look At Weights")]
[Range(0f, 1f)]
[Tooltip(
"Controls the amount of rotation applied to the body to achieve the 'Look At' target. The closer to 1, the more the body will rotate to follow the target.")]
[SerializeField]
private float bodyLookAtWeight = 0.6f;
[Range(0f, 1f)]
[Tooltip(
"Controls the amount of rotation applied to the head to achieve the 'Look At' target. The closer to 1, the more the head will rotate to follow the target.")]
[SerializeField]
private float headLookAtWeight = 0.8f;
[Range(0f, 1f)]
[Tooltip(
"Controls the amount of rotation applied to the eyes to achieve the 'Look At' target. The closer to 1, the more the eyes will rotate to follow the target.")]
[SerializeField]
private float eyesLookAtWeight = 1f;
[Space(10)]
[Tooltip(
"Set this to true if you want the character to look away randomly, false to always look at the target")]
[SerializeField]
private bool lookAway;
private Animator _animator;
private float _appliedBodyLookAtWeight;
private ConvaiActionsHandler _convaiActionsHandler;
private float _currentLookAtWeight;
private float _desiredLookAtWeight = 1f;
private Transform _headPivot;
private bool _isActionRunning;
private void Start()
{
InitializeComponents();
InitializeHeadPivot();
InvokeRepeating(nameof(UpdateTarget), 0, POSITION_UPDATE_DELAY);
}
private void OnDisable()
{
if (_convaiActionsHandler != null)
_convaiActionsHandler.UnregisterForActionEvents(ConvaiActionsHandler_OnActionStarted, ConvaiActionsHandler_OnActionEnded);
}
/// <summary>
/// Unity's built-in method called during the IK pass.
/// </summary>
public void OnAnimatorIK(int layerIndex)
{
PerformHeadTracking();
}
private void InitializeComponents()
{
if (!_animator) _animator = GetComponent<Animator>();
InitializeTargetObject();
if (TryGetComponent(out _convaiActionsHandler))
_convaiActionsHandler.RegisterForActionEvents(ConvaiActionsHandler_OnActionStarted, ConvaiActionsHandler_OnActionEnded);
}
private void ConvaiActionsHandler_OnActionStarted(string action, GameObject target)
{
SetActionRunning(true);
}
private void ConvaiActionsHandler_OnActionEnded(string action, GameObject target)
{
SetActionRunning(false);
}
private void InitializeHeadPivot()
{
// Check if the pivot already exists
if (_headPivot) return;
// Create a new GameObject for the pivot
_headPivot = new GameObject("HeadPivot").transform;
// Set the new GameObject as a child of this character object
_headPivot.transform.parent = transform;
// Position the pivot appropriately, in this case, it seems like it's a bit above the base (probably around the character's neck/head)
_headPivot.localPosition = new Vector3(0, 1.6f, 0);
}
private void RotateCharacterTowardsTarget()
{
Vector3 toTarget = TargetObject.position - transform.position;
float distance = toTarget.magnitude;
// Calculate the angle difference between the character's forward direction and the direction towards the target.
float angleDifference = Vector3.Angle(transform.forward, toTarget);
// Adjust turn speed based on distance to target.
float adjustedTurnSpeed = turnSpeed * 4 * (1f / distance);
// If the angle difference exceeds the limit, we turn the character smoothly towards the target.
if (Mathf.Abs(angleDifference) > 0.65f)
{
Vector3 targetDirection = toTarget.normalized;
// Zero out the y-component (up-down direction) to only rotate on the horizontal plane.
targetDirection.y = 0;
Quaternion targetRotation = Quaternion.LookRotation(targetDirection);
transform.rotation = Quaternion.RotateTowards(transform.rotation, targetRotation,
adjustedTurnSpeed * Time.deltaTime);
// Ensure that the character doesn't tilt on the X and Z axis.
transform.eulerAngles = new Vector3(0, transform.eulerAngles.y, 0);
}
}
private void InitializeTargetObject()
{
if (TargetObject != null) return;
ConvaiLogger.Warn("No target object set for head tracking. Setting default target as main camera",
ConvaiLogger.LogCategory.Character);
if (Camera.main != null) TargetObject = Camera.main.transform;
}
/// <summary>
/// Updates the target weight for the look-at.
/// </summary>
private void UpdateTarget()
{
_desiredLookAtWeight = lookAway ? Random.Range(0.2f, 1.0f) : 1f;
}
/// <summary>
/// Performs the head tracking towards the target object.
/// </summary>
private void PerformHeadTracking()
{
if (_isActionRunning) return;
float distance = Vector3.Distance(transform.position, TargetObject.position);
DrawRayToTarget();
// only perform head tracking if within threshold distance
if (!(distance < trackingDistance / 2))
{
_desiredLookAtWeight = 0;
if (_currentLookAtWeight > 0)
SetCurrentLookAtWeight();
}
SetCurrentLookAtWeight();
_headPivot.transform.LookAt(TargetObject); // orient the pivot towards the target object
// set the current look at weight based on how much rotation is needed
// limit the head rotation
float headRotation = _headPivot.localRotation.y;
if (Mathf.Abs(headRotation) > 0.70f)
{
// clamp rotation if more than 80 degrees
headRotation = Mathf.Sign(headRotation) * 0.70f;
Quaternion localRotation = _headPivot.localRotation;
localRotation.y = headRotation;
_headPivot.localRotation = localRotation;
}
// adjust body rotation weight based on how much the head is rotated
float targetBodyLookAtWeight = Mathf.Abs(_headPivot.localRotation.y) > 0.45f
? bodyLookAtWeight / 3f
: 0f;
// smooth transition between current and target body rotation weight
_appliedBodyLookAtWeight = Mathf.Lerp(_appliedBodyLookAtWeight, targetBodyLookAtWeight, Time.deltaTime);
// Apply rotation weights to the Animator
RotateCharacterTowardsTarget();
AdjustAnimatorLookAt();
}
/// <summary>
/// Method to set the current look at weight based on the desired look at weight.
/// </summary>
private void SetCurrentLookAtWeight()
{
float angleDifference = _headPivot.localRotation.y;
// Lerp the currentLookAtWeight towards the desiredLookAtWeight or towards 0 if above a certain threshold.
_currentLookAtWeight = Mathf.Abs(angleDifference) < 0.55f
? Mathf.Lerp(Mathf.Clamp(_currentLookAtWeight, 0, 1), Mathf.Clamp(_desiredLookAtWeight, 0, 1),
Time.deltaTime * POSITION_UPDATE_DELAY)
: Mathf.Lerp(Mathf.Clamp(_currentLookAtWeight, 0, 1), 0, Time.deltaTime * POSITION_UPDATE_DELAY);
}
/// <summary>
/// Method to apply rotation weights to the Animator
/// </summary>
private void AdjustAnimatorLookAt()
{
// Check if Animator or TargetObject are null
if (!_animator || TargetObject == null)
{
// If either is null, set the look-at weight to 0 and return, effectively ending the method early
_animator.SetLookAtWeight(0);
return;
}
// Set the look-at weights in the Animator.
// This is used to dictate how much the body, head or eyes should turn to "look at" the target.
// `Mathf.Clamp` is used to ensure the weight values lie between 0 and 1 (inclusive).
// The body weight is clamped between 0 to 0.5 since it's less advisable to rotate the body too much versus the head or eyes.
_animator.SetLookAtWeight(Mathf.Clamp(
_currentLookAtWeight, 0, 1),
Mathf.Clamp(_appliedBodyLookAtWeight, 0, .5f),
Mathf.Clamp(headLookAtWeight / 1.25f, 0, .8f),
Mathf.Clamp(eyesLookAtWeight, 0, 1));
// Set the look-at position for the Animator (where the body/head/eyes will turn toward)
_animator.SetLookAtPosition(TargetObject.position);
}
/// <summary>
/// DebugLog utility to visualize the tracking mechanism
/// </summary>
private void DrawRayToTarget()
{
Vector3 pos = transform.position;
// Draw a debug ray from our position to the normalized direction towards the target, scaled by half of the tracking distance threshold.
// The purpose is to visualize the direction and focus of the head tracking, and it's a useful debug tool in Unity's Scene view.
// "Normalized" ensures that the vector has a magnitude (length) of 1, keeping the scaling of the vector consistent.
// This ray appears red in the Scene view.
Debug.DrawRay(pos,
(TargetObject.position - pos).normalized * trackingDistance / 2, Color.red);
}
public void SetActionRunning(bool newValue)
{
_isActionRunning = newValue;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 47bf09eafaaeed940ab9e5531a64790c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,196 @@
using System;
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
using UnityEngine.EventSystems;
#if ENABLE_INPUT_SYSTEM
using UnityEngine.InputSystem;
#endif
namespace Convai.Scripts.Runtime.Core
{
[DefaultExecutionOrder(-105)]
public class ConvaiInputManager : MonoBehaviour
#if ENABLE_INPUT_SYSTEM
, Controls.IPlayerActions
#endif
{
[HideInInspector] public Vector2 moveVector;
[HideInInspector] public Vector2 lookVector;
public bool isRunning { get; private set; }
public Action jumping;
public Action sendText;
public Action toggleChat;
public Action toggleSettings;
public bool IsTalkKeyHeld { get; private set; }
public Action<bool> talkKeyInteract;
#if ENABLE_INPUT_SYSTEM
private Controls _controls;
#elif ENABLE_LEGACY_INPUT_MANAGER
[Serializable]
public class MovementKeys
{
public const KeyCode Forward = KeyCode.W;
public const KeyCode Backward = KeyCode.S;
public const KeyCode Right = KeyCode.D;
public const KeyCode Left = KeyCode.A;
}
public KeyCode TextSendKey = KeyCode.Return;
public KeyCode TextSendAltKey = KeyCode.KeypadEnter;
public KeyCode TalkKey = KeyCode.T;
public KeyCode OpenSettingPanelKey = KeyCode.F10;
public KeyCode RunKey = KeyCode.LeftShift;
public MovementKeys movementKeys;
public bool WasTalkKeyPressed()
{
return Input.GetKeyDown(TalkKey);
}
#endif
public static ConvaiInputManager Instance { get; private set; }
private void Awake()
{
if (Instance != null)
{
ConvaiLogger.DebugLog("There's more than one ConvaiInputManager! " + transform + " - " + Instance, ConvaiLogger.LogCategory.UI);
Destroy(gameObject);
return;
}
Instance = this;
LockCursor(true);
}
private void OnEnable()
{
#if ENABLE_INPUT_SYSTEM
_controls = new Controls();
_controls.Player.SetCallbacks(this);
_controls.Enable();
#endif
}
private void OnDisable()
{
#if ENABLE_INPUT_SYSTEM
_controls.Disable();
#endif
}
#if ENABLE_INPUT_SYSTEM
public void OnJump(InputAction.CallbackContext context)
{
if (context.performed) jumping?.Invoke();
}
public void OnMove(InputAction.CallbackContext context)
{
moveVector = context.ReadValue<Vector2>();
}
public void OnLook(InputAction.CallbackContext context)
{
lookVector = context.ReadValue<Vector2>();
}
public void OnMousePress(InputAction.CallbackContext context)
{
}
public void OnRun(InputAction.CallbackContext context)
{
if (context.performed) isRunning = !isRunning;
}
public void OnSendText(InputAction.CallbackContext context)
{
if (context.performed) sendText?.Invoke();
}
public void OnToggleChat(InputAction.CallbackContext context)
{
if (context.performed) toggleChat?.Invoke();
}
public void OnToggleSettings(InputAction.CallbackContext context)
{
if (context.performed) toggleSettings?.Invoke();
}
public void OnTalk(InputAction.CallbackContext context)
{
if (context.performed)
{
talkKeyInteract?.Invoke(true);
IsTalkKeyHeld = true;
}
if (context.canceled)
{
talkKeyInteract?.Invoke(false);
IsTalkKeyHeld = false;
}
}
public void OnCursorUnlock(InputAction.CallbackContext context)
{
}
#endif
private void Update()
{
#if ENABLE_INPUT_SYSTEM
if (_controls.Player.MousePress.WasPressedThisFrame() && !EventSystem.current.IsPointerOverGameObject()) LockCursor(true);
if (_controls.Player.CursorUnlock.WasPressedThisFrame()) LockCursor(false);
#elif ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetButton("Jump"))
{
jumping?.Invoke();
}
moveVector = Vector2.zero;
if (Input.GetKey(MovementKeys.Forward)) moveVector.y += 1f;
if (Input.GetKey(MovementKeys.Backward)) moveVector.y -= 1f;
if (Input.GetKey(MovementKeys.Left)) moveVector.x -= 1f;
if (Input.GetKey(MovementKeys.Right)) moveVector.x += 1f;
lookVector.x = Input.GetAxis("Mouse X") * 2f;
lookVector.y = Input.GetAxis("Mouse Y") * 2f;
if (Input.GetMouseButtonDown(0) && !EventSystem.current.IsPointerOverGameObject()) LockCursor(true);
if (Input.GetKeyDown(RunKey)) isRunning = !isRunning;
if (Input.GetKeyDown(TextSendKey) || Input.GetKeyDown(TextSendAltKey)) sendText?.Invoke();
if (Input.GetKeyDown(OpenSettingPanelKey)) toggleSettings?.Invoke();
if (Input.GetKeyDown(TalkKey))
{
talkKeyInteract?.Invoke(true);
IsTalkKeyHeld = true;
}
if (Input.GetKeyUp(TalkKey))
{
talkKeyInteract?.Invoke(false);
IsTalkKeyHeld = false;
}
#endif
}
private static void LockCursor(bool lockState)
{
Cursor.lockState = lockState ? CursorLockMode.Locked : CursorLockMode.None;
Cursor.visible = !lockState;
}
#if ENABLE_INPUT_SYSTEM
public InputAction GetTalkKeyAction()
{
return _controls.Player.Talk;
}
#endif
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7a69a6bc2bf58e64883c79cf732d1bfd
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,583 @@
using System;
using System.Collections;
using System.Collections.Generic;
using Convai.Scripts.Runtime.Addons;
using Convai.Scripts.Runtime.Attributes;
using Convai.Scripts.Runtime.Features;
using Convai.Scripts.Runtime.LoggerSystem;
using Convai.Scripts.Runtime.PlayerStats;
using Convai.Scripts.Runtime.UI;
using Grpc.Core;
using Service;
using TMPro;
using UnityEngine;
using UnityEngine.Events;
using ConvaiLipSync = Convai.Scripts.Runtime.Features.LipSync.ConvaiLipSync;
#if UNITY_ANDROID
using UnityEngine.Android;
#endif
namespace Convai.Scripts.Runtime.Core
{
/// <summary>
/// The ConvaiNPC class is a MonoBehaviour script that gives a GameObject the ability to interact with the Convai API.
/// </summary>
[RequireComponent(typeof(Animator), typeof(AudioSource))]
[AddComponentMenu("Convai/ConvaiNPC")]
[HelpURL(
"https://docs.convai.com/api-docs/plugins-and-integrations/unity-plugin/overview-of-the-convainpc.cs-script")]
public class ConvaiNPC : MonoBehaviour
{
private const int AUDIO_SAMPLE_RATE = 44100;
private const string GRPC_API_ENDPOINT = "stream.convai.com";
private const int RECORDING_FREQUENCY = AUDIO_SAMPLE_RATE;
private const int RECORDING_LENGTH = 30;
private static readonly int Talk = Animator.StringToHash("Talk");
[Header("Character Information")]
[Tooltip("Enter the character name for this NPC.")]
public string characterName;
[Tooltip("Enter the character ID for this NPC.")]
public string characterID;
[Tooltip("The current session ID for the chat with this NPC.")]
[ReadOnly]
public string sessionID = "-1";
[Tooltip("Is this character active?")]
[ReadOnly]
public bool isCharacterActive;
[HideInInspector] public ConvaiActionsHandler actionsHandler;
[HideInInspector] public ConvaiLipSync convaiLipSync;
[Tooltip("Is this character talking?")]
[SerializeField]
[ReadOnly]
private bool isCharacterTalking;
[Header("Session Initialization")]
[Tooltip("Enable/disable initializing session ID by sending a text request to the server")]
public bool initializeSessionID;
[HideInInspector] public ConvaiPlayerInteractionManager playerInteractionManager;
[HideInInspector] public NarrativeDesignManager narrativeDesignManager;
[HideInInspector] public TriggerUnityEvent onTriggerSent;
private readonly Queue<GetResponseResponse> _getResponseResponses = new();
private bool _animationPlaying;
private Channel _channel;
private Animator _characterAnimator;
private ConvaiService.ConvaiServiceClient _client;
private ConvaiChatUIHandler _convaiChatUIHandler;
private ConvaiCrosshairHandler _convaiCrosshairHandler;
private ConvaiGroupNPCController _convaiGroupNPCController;
private ConvaiPlayerDataSO _convaiPlayerData;
private bool _groupNPCComponentNotFound;
private ConvaiGRPCAPI _grpcAPI;
private bool _isActionActive;
private bool _isLipSyncActive;
private Coroutine _processResponseCoroutine;
public ActionConfig ActionConfig;
// New fields for sample and transcript buffering
private readonly List<float> _sampleBuffer = new();
private readonly List<string> _transcriptBuffer = new();
private bool HasBufferedData => _sampleBuffer.Count > 0;
private float _lastAudioDataTime;
private const int SAMPLE_BUFFER_SIZE = 44100;
private const float BUFFER_TIMEOUT = 1.5f; // 1500ms timeout
private int _currentSampleRate;
private bool IsInConversationWithAnotherNPC
{
get
{
if (_groupNPCComponentNotFound) return false;
if (_convaiGroupNPCController == null)
{
if (TryGetComponent(out ConvaiGroupNPCController component))
_convaiGroupNPCController = component;
else
_groupNPCComponentNotFound = true;
}
return _convaiGroupNPCController != null && _convaiGroupNPCController.IsInConversationWithAnotherNPC;
}
}
private string SpeakerID
{
get
{
if (_convaiPlayerData == null) return string.Empty;
return _convaiPlayerData.SpeakerID;
}
}
public bool IsCharacterTalking
{
get => isCharacterTalking;
private set => isCharacterTalking = value;
}
private FaceModel FaceModel => convaiLipSync == null ? FaceModel.OvrModelName : convaiLipSync.faceModel;
public string GetEndPointURL => GRPC_API_ENDPOINT;
// Properties with getters and setters
[field: NonSerialized] public bool IncludeActionsHandler { get; set; }
[field: NonSerialized] public bool LipSync { get; set; }
[field: NonSerialized] public bool HeadEyeTracking { get; set; }
[field: NonSerialized] public bool EyeBlinking { get; set; }
[field: NonSerialized] public bool NarrativeDesignManager { get; set; }
[field: NonSerialized] public bool ConvaiGroupNPCController { get; set; }
[field: NonSerialized] public bool LongTermMemoryController { get; set; }
[field: NonSerialized] public bool NarrativeDesignKeyController { get; set; }
[field: NonSerialized] public bool DynamicInfoController { get; set; }
public ConvaiNPCAudioManager AudioManager { get; private set; }
private void Awake()
{
ConvaiLogger.Info("Initializing ConvaiNPC : " + characterName, ConvaiLogger.LogCategory.Character);
InitializeComponents();
ConvaiLogger.Info("ConvaiNPC component initialized", ConvaiLogger.LogCategory.Character);
}
private async void Start()
{
// Assign the ConvaiGRPCAPI component in the scene
_grpcAPI = ConvaiGRPCAPI.Instance;
// Check if the platform is Android
#if UNITY_ANDROID
// Check if the user has not authorized microphone permission
if (!Permission.HasUserAuthorizedPermission(Permission.Microphone))
// Request microphone permission from the user
Permission.RequestUserPermission(Permission.Microphone);
#endif
// DO NOT EDIT
// gRPC setup configuration
#region GRPC_SETUP
SslCredentials credentials = new(); // Create SSL credentials for secure communication
List<ChannelOption> options = new()
{
new ChannelOption(ChannelOptions.MaxReceiveMessageLength, 16 * 1024 * 1024)
};
_channel = new Channel(GRPC_API_ENDPOINT, credentials, options); // Initialize a gRPC channel with the specified endpoint and credentials
_client = new ConvaiService.ConvaiServiceClient(_channel); // Initialize the gRPC client for the ConvaiService using the channel
#endregion
if (initializeSessionID) sessionID = await ConvaiGRPCAPI.InitializeSessionIDAsync(characterName, _client, characterID, sessionID);
_convaiChatUIHandler = ConvaiChatUIHandler.Instance;
}
private void OnEnable()
{
AudioManager.OnCharacterTalkingChanged += HandleIsCharacterTalkingAnimation;
AudioManager.OnAudioTranscriptAvailable += HandleAudioTranscriptAvailable;
AudioManager.OnCharacterTalkingChanged += SetCharacterTalking;
ConvaiNPCManager.Instance.OnActiveNPCChanged += HandleActiveNPCChanged;
if (_convaiChatUIHandler != null) _convaiChatUIHandler.UpdateCharacterList();
_processResponseCoroutine = StartCoroutine(ProcessResponseCoroutine());
}
private void OnDisable()
{
if (AudioManager != null)
{
AudioManager.OnCharacterTalkingChanged -= HandleIsCharacterTalkingAnimation;
AudioManager.OnAudioTranscriptAvailable -= HandleAudioTranscriptAvailable;
AudioManager.OnCharacterTalkingChanged -= SetCharacterTalking;
AudioManager.PurgeExcessLipSyncFrames -= PurgeLipSyncFrames;
}
ConvaiNPCManager.Instance.OnActiveNPCChanged -= HandleActiveNPCChanged;
if (_convaiChatUIHandler != null) _convaiChatUIHandler.UpdateCharacterList();
if (_processResponseCoroutine != null) StopCoroutine(_processResponseCoroutine);
}
/// <summary>
/// Unity callback that is invoked when the application is quitting.
/// Stops the loop that plays audio in order.
/// </summary>
private void OnApplicationQuit()
{
AudioManager.StopAudioLoop();
}
private void OnValidate()
{
if (!string.IsNullOrEmpty(characterID)) characterID = characterID.Trim();
_convaiChatUIHandler = ConvaiChatUIHandler.Instance;
if (_convaiChatUIHandler != null) _convaiChatUIHandler.UpdateCharacterList();
}
public async void TriggerEvent(string triggerName)
{
string triggerMessage = "";
TriggerConfig trigger = new()
{
TriggerName = triggerName,
TriggerMessage = triggerMessage
};
// Send the trigger to the server using GRPC
await ConvaiGRPCAPI.Instance.SendTriggerData(_client, characterID, trigger, this);
// Invoke the UnityEvent
onTriggerSent.Invoke(triggerMessage, triggerName);
}
public async void TriggerSpeech(string triggerMessage)
{
string triggerName = "";
TriggerConfig trigger = new()
{
TriggerName = triggerName,
TriggerMessage = triggerMessage
};
// Send the trigger to the server using GRPC
await ConvaiGRPCAPI.Instance.SendTriggerData(_client, characterID, trigger, this);
// Invoke the UnityEvent
onTriggerSent.Invoke(triggerMessage, triggerName);
}
private event Action<bool> OnCharacterTalking;
private void UpdateWaitUntilLipSync(bool value)
{
AudioManager.SetWaitForCharacterLipSync(value);
}
private void HandleActiveNPCChanged(ConvaiNPC newActiveNPC)
{
// If this NPC is no longer the active NPC, interrupt its speech
if (this != newActiveNPC && !IsInConversationWithAnotherNPC && ConvaiInputManager.Instance.IsTalkKeyHeld) InterruptCharacterSpeech();
}
private void InitializeComponents()
{
_convaiChatUIHandler = FindObjectOfType<ConvaiChatUIHandler>();
_convaiCrosshairHandler = FindObjectOfType<ConvaiCrosshairHandler>();
_characterAnimator = GetComponent<Animator>();
AudioManager = gameObject.AddComponent<ConvaiNPCAudioManager>();
narrativeDesignManager = GetComponent<NarrativeDesignManager>();
ConvaiPlayerDataSO.GetPlayerData(out _convaiPlayerData);
InitializePlayerInteractionManager();
InitializeLipSync();
StartCoroutine(InitializeActionsHandler());
}
private IEnumerator InitializeActionsHandler()
{
yield return new WaitForSeconds(1);
actionsHandler = GetComponent<ConvaiActionsHandler>();
if (actionsHandler != null)
{
_isActionActive = true;
ActionConfig = actionsHandler.ActionConfig;
}
}
private void InitializePlayerInteractionManager()
{
playerInteractionManager = gameObject.AddComponent<ConvaiPlayerInteractionManager>();
playerInteractionManager.Initialize(this, _convaiCrosshairHandler, _convaiChatUIHandler);
}
private void InitializeLipSync()
{
convaiLipSync = GetComponent<ConvaiLipSync>();
if (convaiLipSync != null)
{
_isLipSyncActive = true;
convaiLipSync = GetComponent<ConvaiLipSync>();
convaiLipSync.OnCharacterLipSyncing += UpdateWaitUntilLipSync;
}
}
private void HandleAudioTranscriptAvailable(string transcript)
{
if (isCharacterActive) _convaiChatUIHandler.SendCharacterText(characterName, transcript);
}
/// <summary>
/// Handles the character's talking animation based on whether the character is currently talking.
/// </summary>
private void HandleIsCharacterTalkingAnimation(bool isTalking)
{
if (isTalking)
{
if (!_animationPlaying)
{
_animationPlaying = true;
_characterAnimator.SetBool(Talk, true);
}
}
else
{
_animationPlaying = false;
_characterAnimator.SetBool(Talk, false);
}
}
/// <summary>
/// Sends message data to the server asynchronously.
/// </summary>
/// <param name="text">The message to send.</param>
public async void SendTextDataAsync(string text)
{
try
{
await ConvaiGRPCAPI.Instance.SendTextData(_client, text, characterID,
_isActionActive, _isLipSyncActive, ActionConfig, FaceModel, SpeakerID);
}
catch (Exception ex)
{
ConvaiLogger.Error(ex, ConvaiLogger.LogCategory.Character);
// Handle the exception, e.g., show a message to the user.
}
}
/// <summary>
/// Initializes the session in an asynchronous manner and handles the receiving of results from the server.
/// Initiates the audio recording process using the gRPC API.
/// </summary>
public async void StartListening()
{
if (!MicrophoneManager.Instance.HasAnyMicrophoneDevices())
{
NotificationSystemHandler.Instance.NotificationRequest(NotificationType.NoMicrophoneDetected);
return;
}
await _grpcAPI.StartRecordAudio(_client, _isActionActive, _isLipSyncActive, RECORDING_FREQUENCY,
RECORDING_LENGTH, characterID, ActionConfig, FaceModel, SpeakerID);
}
/// <summary>
/// Stops the ongoing audio recording process.
/// </summary>
public void StopListening()
{
// Stop the audio recording process using the ConvaiGRPCAPI StopRecordAudio method
_grpcAPI.StopRecordAudio();
}
/// <summary>
/// Add response to the GetResponseResponse Queue
/// </summary>
/// <param name="response"></param>
public void EnqueueResponse(GetResponseResponse response)
{
if (response?.AudioResponse == null) return;
//ConvaiLogger.DebugLog($"Adding Response for Processing: {response.AudioResponse.TextData}", ConvaiLogger.LogCategory.LipSync);
_getResponseResponses.Enqueue(response);
}
public void ClearResponseQueue()
{
_getResponseResponses.Clear();
}
private void PurgeLipSyncFrames()
{
if (convaiLipSync == null) return;
convaiLipSync.PurgeExcessFrames();
}
private IEnumerator ProcessResponseCoroutine()
{
while (gameObject.activeInHierarchy)
{
ProcessResponse();
yield return new WaitForSeconds(1f / 100f);
}
}
/// <summary>
/// Processes a response fetched from a character.
/// </summary>
/// <remarks>
/// 1. Processes audio/message/face data from the response and adds it to _responseAudios.
/// 2. Identifies actions from the response and parses them for execution.
/// </remarks>
private void ProcessResponse()
{
// Check if the character is active and should process the response
if (!isCharacterActive && !IsInConversationWithAnotherNPC)
{
return;
}
// Check if there is any queued response
if (_getResponseResponses.Count > 0)
{
GetResponseResponse serverResponse = _getResponseResponses.Dequeue();
if (serverResponse?.AudioResponse != null)
{
int audioDataLength = serverResponse.AudioResponse.AudioData.ToByteArray().Length;
// If audio data length is greater than header length, process as normal audio
if (audioDataLength > 46)
{
GetResponseResponse.Types.AudioResponse audioResponse = serverResponse.AudioResponse;
string textDataString = audioResponse.TextData;
_currentSampleRate = audioResponse.AudioConfig.SampleRateHertz;
// Process the audio data to get the samples for the audio clip
float[] currentSamples = AudioManager.ProcessByteAudioDataToAudioClip(audioResponse);
// Add current samples to buffer
_sampleBuffer.AddRange(currentSamples);
// Update last audio data time
_lastAudioDataTime = Time.time;
// Add transcript to buffer if it's not empty or null
if (!string.IsNullOrEmpty(textDataString))
{
_transcriptBuffer.Add(textDataString);
}
// Check conditions for creating AudioClip:
// 1. Buffer size >= SAMPLE_BUFFER_SIZE OR 2. We haven't received data for BUFFER_TIMEOUT seconds
bool shouldProcessBuffer = _sampleBuffer.Count >= SAMPLE_BUFFER_SIZE * 3f || Time.time - _lastAudioDataTime >= BUFFER_TIMEOUT;
if (shouldProcessBuffer)
{
CreateAndAddAudioClip(false);
}
}
else if (serverResponse.AudioResponse.EndOfResponse)
{
// If we have any buffered data, create a final AudioClip with it
if (HasBufferedData)
{
CreateAndAddAudioClip(true);
}
else
{
// No buffered data, just add a final null response
AudioManager.AddResponseAudio(new ConvaiNPCAudioManager.ResponseAudio
{
AudioClip = null,
AudioTranscript = null,
IsFinal = true
});
}
}
}
}
else if (HasBufferedData && Time.time - _lastAudioDataTime >= BUFFER_TIMEOUT)
{
// Process any remaining buffered data if we haven't received new data for a while
CreateAndAddAudioClip(false);
}
}
/// <summary>
/// Creates an AudioClip from the buffered samples and adds it to the response queue
/// </summary>
/// <param name="isFinal">Whether this is the final chunk of audio</param>
private void CreateAndAddAudioClip(bool isFinal)
{
// Convert buffer to array
float[] samples = _sampleBuffer.ToArray();
// Create merged transcript
string mergedTranscript = string.Join(" ", _transcriptBuffer);
// Create AudioClip
AudioClip clip = AudioClip.Create("Audio Response", samples.Length, 1, _currentSampleRate, false);
clip.SetData(samples, 0);
ConvaiLogger.DebugLog($"Creating AudioClip from merged samples. Length: {samples.Length}, Audio clip length: {clip.length}",
ConvaiLogger.LogCategory.Character);
// Add to response queue
AudioManager.AddResponseAudio(new ConvaiNPCAudioManager.ResponseAudio
{
AudioClip = clip,
AudioTranscript = mergedTranscript,
IsFinal = isFinal
});
// Clear buffers
_sampleBuffer.Clear();
_transcriptBuffer.Clear();
}
public int GetAudioResponseCount()
{
return AudioManager.GetAudioResponseCount();
}
public void StopAllAudioPlayback()
{
AudioManager.StopAllAudioPlayback();
AudioManager.ClearResponseAudioQueue();
}
public void ResetCharacterAnimation()
{
if (_characterAnimator != null)
_characterAnimator.SetBool(Talk, false);
if (convaiLipSync != null)
convaiLipSync.ConvaiLipSyncApplicationBase.ClearQueue();
}
public void SetCharacterTalking(bool isTalking)
{
if (IsCharacterTalking != isTalking)
{
ConvaiLogger.Info($"Character {characterName} is talking: {isTalking}", ConvaiLogger.LogCategory.Character);
IsCharacterTalking = isTalking;
OnCharacterTalking?.Invoke(IsCharacterTalking);
}
}
public void StopLipSync()
{
if (convaiLipSync != null) convaiLipSync.StopLipSync();
}
public void InterruptCharacterSpeech()
{
_grpcAPI.InterruptCharacterSpeech(this);
}
public ConvaiService.ConvaiServiceClient GetClient()
{
return _client;
}
public void UpdateSessionID(string newSessionID)
{
sessionID = newSessionID;
}
[Serializable]
public class TriggerUnityEvent : UnityEvent<string, string>
{
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4b38e4bc919e1f040ba78aea7472893e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,214 @@
using System;
using System.Collections;
using System.Collections.Generic;
using Convai.Scripts.Runtime.Features;
using Convai.Scripts.Runtime.LoggerSystem;
using Service;
using UnityEngine;
namespace Convai.Scripts.Runtime.Core
{
public class ConvaiNPCAudioManager : MonoBehaviour
{
private readonly Queue<ResponseAudio> _responseAudios = new();
private AudioSource _audioSource;
private ConvaiNPC _convaiNPC;
private ConvaiGroupNPCController _npcController;
private bool _lastTalkingState;
private Coroutine _playInOrderCoroutine;
private bool _stopAudioPlayingLoop;
private bool _waitForCharacterLipSync;
private void Awake()
{
_audioSource = GetComponent<AudioSource>();
_convaiNPC = GetComponent<ConvaiNPC>();
TryGetComponent(out _npcController);
_lastTalkingState = false;
}
private void OnEnable()
{
_playInOrderCoroutine = StartCoroutine(PlayAudioInOrder());
}
private void OnDisable()
{
ClearResponseAudioQueue();
StopAllAudioPlayback();
_audioSource.clip = null;
if (_playInOrderCoroutine == null) return;
StopCoroutine(_playInOrderCoroutine);
}
public event Action<string> OnAudioTranscriptAvailable;
public event Action<bool> OnCharacterTalkingChanged;
public event Action PurgeExcessLipSyncFrames;
public void StopAllAudioPlayback()
{
if (_audioSource != null && _audioSource.isPlaying) _audioSource.Stop();
}
public void ClearResponseAudioQueue()
{
_responseAudios.Clear();
}
private void SetCharacterTalking(bool isTalking)
{
if (_lastTalkingState != isTalking)
{
OnCharacterTalkingChanged?.Invoke(isTalking);
_lastTalkingState = isTalking;
}
}
private void PurgeLipSyncFrames()
{
PurgeExcessLipSyncFrames?.Invoke();
}
public void AddResponseAudio(ResponseAudio responseAudio)
{
_responseAudios.Enqueue(responseAudio);
}
public int GetAudioResponseCount()
{
return _responseAudios.Count;
}
public bool SetWaitForCharacterLipSync(bool value)
{
_waitForCharacterLipSync = value;
return value;
}
public IEnumerator PlayAudioInOrder()
{
while (!_stopAudioPlayingLoop)
if (_responseAudios.Count > 0)
{
ResponseAudio currentResponseAudio = _responseAudios.Dequeue();
if (!currentResponseAudio.IsFinal)
{
_audioSource.clip = currentResponseAudio.AudioClip;
while (_waitForCharacterLipSync)
yield return new WaitForSeconds(0.01f);
if (_npcController != null)
{
while (_npcController.IsOtherNPCTalking())
{
yield return new WaitForSeconds(0.1f);
}
}
_audioSource.Play();
//ConvaiLogger.DebugLog($"Playing: {currentResponseAudio.AudioTranscript}", ConvaiLogger.LogCategory.LipSync);
SetCharacterTalking(true);
OnAudioTranscriptAvailable?.Invoke(currentResponseAudio.AudioTranscript.Trim());
yield return new WaitForSeconds(currentResponseAudio.AudioClip.length);
_audioSource.Stop();
_audioSource.clip = null;
PurgeLipSyncFrames();
if (_responseAudios.Count == 0 && _convaiNPC.convaiLipSync != null)
SetWaitForCharacterLipSync(true);
}
else
{
ConvaiLogger.DebugLog($"Final Playing: {currentResponseAudio.AudioTranscript}", ConvaiLogger.LogCategory.LipSync);
SetCharacterTalking(false);
}
}
else
{
yield return new WaitForSeconds(1f);
SetCharacterTalking(false);
}
}
/// <summary>
/// Converts a byte array containing audio data into an AudioClip.
/// </summary>
/// <param name="audioResponse">Audio response containing the audio data</param>
/// <returns>Float array containing the audio samples</returns>
public float[] ProcessByteAudioDataToAudioClip(GetResponseResponse.Types.AudioResponse audioResponse)
{
try
{
byte[] byteAudio = audioResponse.AudioData.ToByteArray();
if (!WavUtility.TryParseWavHeader(byteAudio, out WavUtility.WavHeader header, out int wavHeaderSize))
{
throw new ArgumentException("Failed to parse WAV header from byte audio.", nameof(audioResponse));
}
if (byteAudio.Length <= wavHeaderSize)
throw new ArgumentException("Not enough data in byte audio to trim the header.", nameof(audioResponse));
// Trim the WAV header from the byte array to get the actual audio data
byte[] trimmedByteAudio = new byte[byteAudio.Length - wavHeaderSize];
Buffer.BlockCopy(byteAudio, wavHeaderSize, trimmedByteAudio, 0, byteAudio.Length - wavHeaderSize);
// Convert the trimmed byte audio data to a float array of audio samples
float[] samples = Convert16BitByteArrayToFloatAudioClipData(trimmedByteAudio);
if (samples.Length <= 0) throw new Exception("No samples created after conversion from byte array.");
return samples;
}
catch (Exception)
{
return null;
}
}
/// <summary>
/// Converts a byte array representing 16-bit audio samples to a float array.
/// </summary>
/// <param name="source">Byte array containing 16-bit audio data</param>
/// <returns>Float array containing audio samples in the range [-1, 1]</returns>
private static float[] Convert16BitByteArrayToFloatAudioClipData(byte[] source)
{
const int x = sizeof(short); // Size of a short in bytes
int convertedSize = source.Length / x; // Number of short samples
float[] data = new float[convertedSize]; // Float array to hold the converted data
int byteIndex = 0; // Index for the byte array
int dataIndex = 0; // Index for the float array
// Convert each pair of bytes to a short and then to a float
while (byteIndex < source.Length)
{
byte firstByte = source[byteIndex];
byte secondByte = source[byteIndex + 1];
byteIndex += 2;
// Combine the two bytes to form a short (little endian)
short s = (short)((secondByte << 8) | firstByte);
// Convert the short value to a float in the range [-1, 1]
data[dataIndex] = s / 32768.0F; // Dividing by 32768.0 to normalize the range
dataIndex++;
}
return data;
}
public void StopAudioLoop()
{
_stopAudioPlayingLoop = true;
}
public class ResponseAudio
{
public AudioClip AudioClip;
public string AudioTranscript;
public bool IsFinal;
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 07c6d33f72d441209362753c7f5759bd
timeCreated: 1705425029

View File

@ -0,0 +1,218 @@
using System;
using System.Collections.Generic;
using Convai.Scripts.Runtime.Attributes;
using Convai.Scripts.Runtime.Features;
using Convai.Scripts.Runtime.LoggerSystem;
using UnityEngine;
namespace Convai.Scripts.Runtime.Core
{
[DefaultExecutionOrder(-101)]
public class ConvaiNPCManager : MonoBehaviour
{
private static readonly RaycastHit[] RaycastHits = new RaycastHit[1];
[Tooltip("Length of the ray used for detecting NPCs.")] [SerializeField]
private float rayLength = 2.0f;
[Tooltip("Angle from the ray's direction to keep the NPC active, even if not directly hit by the ray.")] [SerializeField]
private float visionConeAngle = 45f;
[Tooltip("Reference to the currently active NPC.")] [ReadOnly]
public ConvaiNPC activeConvaiNPC;
[Tooltip("Reference to the NPC that is currently near the player.")] [ReadOnly]
public ConvaiNPC nearbyNPC;
// Cache used to store NPC references and avoid redundant GetComponent calls.
private readonly Dictionary<GameObject, ConvaiNPC> _convaiNPCCache = new();
// Reference to the NPC that was last hit by the raycast.
private ConvaiNPC _lastHitNpc;
// Reference to the main camera used for ray casting.
private Camera _mainCamera;
// Singleton instance of the NPC manager.
public static ConvaiNPCManager Instance { get; private set; }
private void Awake()
{
// Singleton pattern to ensure only one instance exists
if (Instance == null)
Instance = this;
else
Destroy(gameObject);
_mainCamera = Camera.main;
}
private void LateUpdate()
{
Ray ray = new(_mainCamera.transform.position, _mainCamera.transform.forward);
bool foundConvaiNPC = false;
if (Physics.RaycastNonAlloc(ray, RaycastHits, rayLength) > 0)
{
RaycastHit hit = RaycastHits[0];
nearbyNPC = GetConvaiNPC(hit.transform.gameObject);
if (nearbyNPC != null)
{
foundConvaiNPC = true;
if (_lastHitNpc != nearbyNPC && !CheckForNPCToNPCConversation(nearbyNPC))
{
UpdateActiveNPC(nearbyNPC);
}
}
}
if (!foundConvaiNPC && _lastHitNpc != null)
{
Vector3 toLastHitNPC = _lastHitNpc.transform.position - ray.origin;
float angleToLastHitNPC = Vector3.Angle(ray.direction, toLastHitNPC.normalized);
float distanceToLastHitNPC = toLastHitNPC.magnitude;
if (angleToLastHitNPC > visionConeAngle || distanceToLastHitNPC > rayLength * 1.2f)
{
ConvaiLogger.DebugLog($"Player left {_lastHitNpc.gameObject.name}", ConvaiLogger.LogCategory.Character);
UpdateActiveNPC(null);
}
}
}
private void OnDrawGizmos()
{
if (_mainCamera == null)
_mainCamera = Camera.main;
if (_mainCamera == null)
return;
Transform cameraTransform = _mainCamera.transform;
Vector3 rayOrigin = cameraTransform.position;
Vector3 rayDirection = cameraTransform.forward;
// Drawing the main ray
Gizmos.color = Color.blue;
Gizmos.DrawRay(rayOrigin, rayDirection.normalized * rayLength);
if (_lastHitNpc != null) DrawVisionConeArc(rayOrigin, rayDirection, cameraTransform.up);
}
private void DrawVisionConeArc(Vector3 rayOrigin, Vector3 rayDirection, Vector3 up)
{
const int arcResolution = 50; // number of segments to use for arc
float angleStep = 2 * visionConeAngle / arcResolution; // angle between each segment
Vector3 previousPoint = Quaternion.AngleAxis(-visionConeAngle, up) * rayDirection * rayLength;
for (int i = 1; i <= arcResolution; i++)
{
Vector3 nextPoint = Quaternion.AngleAxis(-visionConeAngle + angleStep * i, up) * rayDirection * rayLength;
Gizmos.DrawLine(rayOrigin + previousPoint, rayOrigin + nextPoint);
previousPoint = nextPoint;
}
Quaternion leftRotation = Quaternion.AngleAxis(-visionConeAngle, up);
Quaternion rightRotation = Quaternion.AngleAxis(visionConeAngle, up);
Vector3 leftDirection = leftRotation * rayDirection;
Vector3 rightDirection = rightRotation * rayDirection;
Gizmos.color = Color.yellow;
Gizmos.DrawLine(rayOrigin, rayOrigin + leftDirection.normalized * rayLength);
Gizmos.DrawLine(rayOrigin, rayOrigin + rightDirection.normalized * rayLength);
}
/// <summary>
/// Checks if the specified NPC is in conversation with another NPC.
/// </summary>
/// <param name="npc">The NPC to check.</param>
/// <returns>True if the NPC is in conversation with another NPC; otherwise, false.</returns>
public bool CheckForNPCToNPCConversation(ConvaiNPC npc)
{
return npc.TryGetComponent(out ConvaiGroupNPCController convaiGroupNPC) && convaiGroupNPC.IsInConversationWithAnotherNPC;
}
private void UpdateActiveNPC(ConvaiNPC newActiveNPC)
{
// Check if the new active NPC is different from the current active NPC.
if (activeConvaiNPC != newActiveNPC)
{
// Deactivate the currently active NPC, if any.
if (activeConvaiNPC != null) activeConvaiNPC.isCharacterActive = false;
// Update the reference to the new active NPC.
activeConvaiNPC = newActiveNPC;
_lastHitNpc = newActiveNPC; // Ensure the _lastHitNpc reference is updated accordingly.
// Activate the new NPC, if any.
if (newActiveNPC != null)
{
newActiveNPC.isCharacterActive = true;
ConvaiLogger.DebugLog($"Active NPC changed to {newActiveNPC.gameObject.name}", ConvaiLogger.LogCategory.Character);
}
// Invoke the OnActiveNPCChanged event, notifying other parts of the system of the change.
OnActiveNPCChanged?.Invoke(newActiveNPC);
}
}
/// <summary>
/// Sets the active NPC to the specified NPC.
/// </summary>
/// <param name="newActiveNPC">The NPC to set as active.</param>
/// <param name="updateLastHitNPC"> Whether to update the last hit NPC reference.</param>
public void SetActiveConvaiNPC(ConvaiNPC newActiveNPC, bool updateLastHitNPC = true)
{
if (activeConvaiNPC != newActiveNPC)
{
if (activeConvaiNPC != null)
// Deactivate the previous NPC
activeConvaiNPC.isCharacterActive = false;
activeConvaiNPC = newActiveNPC;
if (updateLastHitNPC)
_lastHitNpc = newActiveNPC;
if (newActiveNPC != null)
{
// Activate the new NPC
newActiveNPC.isCharacterActive = true;
ConvaiLogger.DebugLog($"Active NPC changed to {newActiveNPC.gameObject.name}", ConvaiLogger.LogCategory.Character);
}
OnActiveNPCChanged?.Invoke(newActiveNPC);
}
}
/// <summary>
/// Event that's triggered when the active NPC changes.
/// </summary>
public event Action<ConvaiNPC> OnActiveNPCChanged;
private ConvaiNPC GetConvaiNPC(GameObject obj)
{
if (!_convaiNPCCache.TryGetValue(obj, out ConvaiNPC npc))
{
npc = obj.GetComponent<ConvaiNPC>();
if (npc != null)
_convaiNPCCache[obj] = npc;
}
return npc;
}
/// <summary>
/// Gets the currently active ConvaiNPC.
/// </summary>
/// <returns>The currently active ConvaiNPC.</returns>
public ConvaiNPC GetActiveConvaiNPC()
{
return activeConvaiNPC;
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: c5777a8a41e942f5a05df965241ef598
timeCreated: 1696103142

View File

@ -0,0 +1,134 @@
using System;
using System.Linq;
using Convai.Scripts.Runtime.Features;
using Convai.Scripts.Runtime.UI;
using TMPro;
using UnityEngine;
namespace Convai.Scripts.Runtime.Core
{
public class ConvaiPlayerInteractionManager : MonoBehaviour
{
private ConvaiChatUIHandler _convaiChatUIHandler;
private ConvaiCrosshairHandler _convaiCrosshairHandler;
private ConvaiNPC _convaiNPC;
private TMP_InputField _currentInputField;
private ConvaiInputManager InputManager => ConvaiInputManager.Instance ? ConvaiInputManager.Instance : null;
private void OnEnable()
{
if (InputManager == null) return;
InputManager.sendText += HandleSendText;
InputManager.toggleChat += HandleToggleChat;
InputManager.talkKeyInteract += HandleVoiceInput;
InputManager.talkKeyInteract += HandleNPCInteraction;
}
private void OnDisable()
{
if (InputManager == null) return;
InputManager.sendText -= HandleSendText;
InputManager.toggleChat -= HandleToggleChat;
InputManager.talkKeyInteract -= HandleVoiceInput;
InputManager.talkKeyInteract -= HandleNPCInteraction;
}
public void Initialize(ConvaiNPC convaiNPC, ConvaiCrosshairHandler convaiCrosshairHandler, ConvaiChatUIHandler convaiChatUIHandler)
{
_convaiNPC = convaiNPC ? convaiNPC : throw new ArgumentNullException(nameof(convaiNPC));
_convaiCrosshairHandler = convaiCrosshairHandler ? convaiCrosshairHandler : throw new ArgumentNullException(nameof(convaiCrosshairHandler));
_convaiChatUIHandler = convaiChatUIHandler ? convaiChatUIHandler : throw new ArgumentNullException(nameof(convaiChatUIHandler));
}
private void UpdateCurrentInputField(TMP_InputField inputFieldInScene)
{
if (inputFieldInScene != null && _currentInputField != inputFieldInScene) _currentInputField = inputFieldInScene;
}
private void HandleInputSubmission(string input)
{
if (!_convaiNPC.isCharacterActive || string.IsNullOrEmpty(input.Trim())) return;
_convaiNPC.InterruptCharacterSpeech();
UpdateActionConfig();
_convaiNPC.SendTextDataAsync(input);
_convaiChatUIHandler.SendPlayerText(input);
ClearInputField();
}
public TMP_InputField FindActiveInputField()
{
// TODO : Implement Text Send for ChatUIBase and get input field directly instead of finding here
return _convaiChatUIHandler.GetCurrentUI().GetCanvasGroup().gameObject.GetComponentsInChildren<TMP_InputField>(true)
.FirstOrDefault(inputField => inputField.interactable);
}
private void ClearInputField()
{
if (_currentInputField != null)
{
_currentInputField.text = string.Empty;
_currentInputField.DeactivateInputField();
}
}
private void HandleToggleChat()
{
TMP_InputField inputFieldInScene = FindActiveInputField();
if (!inputFieldInScene.isFocused && _convaiNPC.isCharacterActive)
{
inputFieldInScene.ActivateInputField();
}
}
private void HandleSendText()
{
TMP_InputField inputFieldInScene = FindActiveInputField();
UpdateCurrentInputField(inputFieldInScene);
if (_currentInputField != null && _currentInputField.isFocused && _convaiNPC.isCharacterActive) HandleInputSubmission(_currentInputField.text);
}
private void HandleVoiceInput(bool listenState)
{
if (UIUtilities.IsAnyInputFieldFocused() || !_convaiNPC.isCharacterActive) return;
switch (listenState)
{
case true:
_convaiNPC.InterruptCharacterSpeech();
UpdateActionConfig();
_convaiNPC.StartListening();
break;
case false:
{
if (_convaiNPC.isCharacterActive && (_currentInputField == null || !_currentInputField.isFocused)) _convaiNPC.StopListening();
break;
}
}
}
private void HandleNPCInteraction(bool state)
{
if (!IsNpcInConversation() || !state || UIUtilities.IsAnyInputFieldFocused()) return;
NPC2NPCConversationManager.Instance.EndConversation(_convaiNPC.GetComponent<ConvaiGroupNPCController>());
_convaiNPC.InterruptCharacterSpeech();
_convaiNPC.StartListening();
}
private bool IsNpcInConversation()
{
bool isNpcInConversation;
if (TryGetComponent(out ConvaiGroupNPCController convaiGroupNPC))
isNpcInConversation = convaiGroupNPC.IsInConversationWithAnotherNPC && ConvaiNPCManager.Instance.nearbyNPC == _convaiNPC;
else
isNpcInConversation = false;
return isNpcInConversation;
}
public void UpdateActionConfig()
{
if (_convaiNPC.ActionConfig != null && _convaiCrosshairHandler != null)
_convaiNPC.ActionConfig.CurrentAttentionObject = _convaiCrosshairHandler.FindPlayerReferenceObject();
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 2fc37bad9e2b4a5ab1f28e78cd40203a
timeCreated: 1705427710

View File

@ -0,0 +1,250 @@
using System;
using UnityEngine;
namespace Convai.Scripts.Runtime.Core
{
public static class WavUtility
{
public struct WavHeader
{
public int ChunkID; // "RIFF"
public int FileSize; // File size (minus 8 bytes)
public int RiffType; // "WAVE"
public int FmtID; // "fmt "
public int FmtSize; // 16 for PCM
public short AudioFormat; // 1 for PCM
public short NumChannels; // Mono = 1, Stereo = 2, etc.
public int SampleRate; // Samples per second (e.g., 44100)
public int ByteRate; // SampleRate * NumChannels * BitsPerSample/8
public short BlockAlign; // NumChannels * BitsPerSample/8
public short BitsPerSample; // 8 bits = 8, 16 bits = 16, etc.
public int DataID; // "data"
public int DataSize; // Number of bytes in the data.
}
public static bool TryParseWavHeader(byte[] wavBytes, out WavHeader header, out int headerSize)
{
header = new WavHeader();
headerSize = 0;
if (wavBytes == null || wavBytes.Length < 44)
{
throw new ArgumentException($"WAV data is too short to contain a header. Length: {(wavBytes == null ? 0 : wavBytes.Length)} bytes");
}
try
{
// RIFF chunk
string riffId = System.Text.Encoding.ASCII.GetString(wavBytes, 0, 4);
header.ChunkID = BitConverter.ToInt32(wavBytes, 0);
if (riffId != "RIFF")
{
Debug.LogError($"Invalid WAV header: Expected 'RIFF' but got '{riffId}'");
return false;
}
header.FileSize = BitConverter.ToInt32(wavBytes, 4);
Debug.Log($"WAV FileSize from header: {header.FileSize}, Actual bytes: {wavBytes.Length}");
string waveId = System.Text.Encoding.ASCII.GetString(wavBytes, 8, 4);
header.RiffType = BitConverter.ToInt32(wavBytes, 8);
if (waveId != "WAVE")
{
Debug.LogError($"Invalid WAV header: Expected 'WAVE' but got '{waveId}'");
return false;
}
// fmt sub-chunk
string fmtId = System.Text.Encoding.ASCII.GetString(wavBytes, 12, 4);
header.FmtID = BitConverter.ToInt32(wavBytes, 12);
if (fmtId != "fmt ")
{
Debug.LogWarning($"WAV header: Expected 'fmt ' but got '{fmtId}'. Attempting to find data chunk...");
int dataChunkPos = FindChunk(wavBytes, "data", 12);
if (dataChunkPos == -1)
{
Debug.LogError("Could not find 'data' chunk.");
return false;
}
Debug.Log($"Found data chunk at position: {dataChunkPos}");
header.NumChannels = 1;
header.SampleRate = 44100;
header.BitsPerSample = 16;
header.DataID = BitConverter.ToInt32(wavBytes, dataChunkPos);
header.DataSize = BitConverter.ToInt32(wavBytes, dataChunkPos + 4);
headerSize = dataChunkPos + 8;
Debug.Log($"Using default format values. Data size: {header.DataSize}, Header size: {headerSize}");
return true;
}
header.FmtSize = BitConverter.ToInt32(wavBytes, 16);
header.AudioFormat = BitConverter.ToInt16(wavBytes, 20);
header.NumChannels = BitConverter.ToInt16(wavBytes, 22);
header.SampleRate = BitConverter.ToInt32(wavBytes, 24);
header.ByteRate = BitConverter.ToInt32(wavBytes, 28);
header.BlockAlign = BitConverter.ToInt16(wavBytes, 32);
header.BitsPerSample = BitConverter.ToInt16(wavBytes, 34);
Debug.Log($"Format chunk parsed: Format={header.AudioFormat}, Channels={header.NumChannels}, " +
$"Rate={header.SampleRate}, BitsPerSample={header.BitsPerSample}, FmtSize={header.FmtSize}");
int dataChunkPosition = FindChunk(wavBytes, "data", 36);
if (dataChunkPosition == -1)
{
dataChunkPosition = FindChunk(wavBytes, "data", 20 + header.FmtSize);
if (dataChunkPosition == -1)
{
Debug.LogError("WAV header: 'data' chunk not found after extensive search.");
return false;
}
}
Debug.Log($"Found data chunk at position: {dataChunkPosition}");
header.DataID = BitConverter.ToInt32(wavBytes, dataChunkPosition);
header.DataSize = BitConverter.ToInt32(wavBytes, dataChunkPosition + 4);
headerSize = dataChunkPosition + 8;
// Handle invalid size values by using actual data size
if (header.DataSize <= 0 || header.DataSize > wavBytes.Length - headerSize)
{
Debug.LogWarning($"Invalid data size in header: {header.DataSize}. Using actual data size instead.");
header.DataSize = wavBytes.Length - headerSize;
Debug.Log($"Adjusted data size to: {header.DataSize} bytes");
}
if (header.AudioFormat != 1)
{
Debug.LogError($"Unsupported WAV audio format: {header.AudioFormat}. Only PCM (1) is supported.");
return false;
}
if (header.BitsPerSample != 16 && header.BitsPerSample != 8)
{
Debug.LogWarning($"Uncommon BitsPerSample: {header.BitsPerSample}. Assuming 16-bit conversion path.");
}
return true;
}
catch (Exception ex)
{
Debug.LogError($"Error parsing WAV header: {ex.Message}\nStack trace: {ex.StackTrace}");
return false;
}
}
private static int FindChunk(byte[] source, string chunkName, int startIndex)
{
if (source == null || startIndex < 0 || startIndex >= source.Length)
{
Debug.LogError($"Invalid parameters for FindChunk: startIndex={startIndex}, sourceLength={(source?.Length ?? 0)}");
return -1;
}
try
{
byte[] chunkBytes = System.Text.Encoding.ASCII.GetBytes(chunkName);
// Search for the chunk, allowing for alignment padding
for (int i = startIndex; i <= source.Length - 8; i++) // -8 for chunk header
{
bool match = true;
for (int j = 0; j < chunkBytes.Length; j++)
{
if (source[i + j] != chunkBytes[j])
{
match = false;
break;
}
}
if (match)
{
Debug.Log($"Found chunk '{chunkName}' at position {i}");
return i;
}
}
Debug.LogWarning($"Chunk '{chunkName}' not found after position {startIndex}");
return -1;
}
catch (Exception ex)
{
Debug.LogError($"Error in FindChunk: {ex.Message}");
return -1;
}
}
public static float[] ConvertPcmToFloat(byte[] pcmData, short bitsPerSample, short numChannels)
{
if (pcmData == null || pcmData.Length == 0)
{
Debug.LogError("PCM data is null or empty");
return new float[0];
}
try
{
int samples;
float[] floatData;
if (bitsPerSample == 16)
{
if (pcmData.Length % 2 != 0)
{
Debug.LogWarning($"16-bit PCM data length is not even: {pcmData.Length}. Truncating last byte.");
Array.Resize(ref pcmData, pcmData.Length - 1);
}
samples = pcmData.Length / 2; // 2 bytes per sample for 16-bit
floatData = new float[samples];
for (int i = 0; i < samples; i++)
{
short pcmSample = BitConverter.ToInt16(pcmData, i * 2);
floatData[i] = pcmSample / 32768f; // Normalize to [-1.0, 1.0]
}
}
else if (bitsPerSample == 8)
{
samples = pcmData.Length;
floatData = new float[samples];
for (int i = 0; i < samples; i++)
{
// 8-bit PCM is unsigned [0, 255], convert to [-1.0, 1.0]
floatData[i] = ((pcmData[i] - 128) / 128f);
}
}
else
{
Debug.LogError($"Unsupported bits per sample: {bitsPerSample}");
return new float[0];
}
Debug.Log($"Converted {pcmData.Length} bytes of {bitsPerSample}-bit PCM to {floatData.Length} float samples");
return floatData;
}
catch (Exception ex)
{
Debug.LogError($"Error converting PCM to float: {ex.Message}");
return new float[0];
}
}
public static float CalculateDurationSeconds(byte[] wavBytes)
{
if (TryParseWavHeader(wavBytes, out WavHeader header, out int headerSize))
{
// Calculate the total number of samples in the data chunk
int totalSamples = header.DataSize / (header.NumChannels * (header.BitsPerSample / 8));
// Calculate the duration in seconds
return (float)totalSamples / header.SampleRate;
}
Debug.LogError("Failed to parse WAV header for duration calculation");
return 0f;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 155cbb724b25941ce9af644f21f8be50
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a9460cc9d8cacc34791f7a22e8b2eceb
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2df3c3aabcfefe74cb44efb454f919da
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f49deed203eacd243907160ac61a5a0b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,234 @@
// <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: service.proto
// </auto-generated>
// Original file comments:
// service.proto
#pragma warning disable 0414, 1591
#region Designer generated code
using grpc = global::Grpc.Core;
namespace Service {
public static partial class ConvaiService
{
static readonly string __ServiceName = "service.ConvaiService";
static readonly grpc::Marshaller<global::Service.HelloRequest> __Marshaller_service_HelloRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.HelloRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.HelloResponse> __Marshaller_service_HelloResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.HelloResponse.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.STTRequest> __Marshaller_service_STTRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.STTRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.STTResponse> __Marshaller_service_STTResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.STTResponse.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.GetResponseRequest> __Marshaller_service_GetResponseRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.GetResponseRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.GetResponseResponse> __Marshaller_service_GetResponseResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.GetResponseResponse.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.GetResponseRequestSingle> __Marshaller_service_GetResponseRequestSingle = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.GetResponseRequestSingle.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.FeedbackRequest> __Marshaller_service_FeedbackRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.FeedbackRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Service.FeedbackResponse> __Marshaller_service_FeedbackResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Service.FeedbackResponse.Parser.ParseFrom);
static readonly grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse> __Method_Hello = new grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse>(
grpc::MethodType.Unary,
__ServiceName,
"Hello",
__Marshaller_service_HelloRequest,
__Marshaller_service_HelloResponse);
static readonly grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse> __Method_HelloStream = new grpc::Method<global::Service.HelloRequest, global::Service.HelloResponse>(
grpc::MethodType.DuplexStreaming,
__ServiceName,
"HelloStream",
__Marshaller_service_HelloRequest,
__Marshaller_service_HelloResponse);
static readonly grpc::Method<global::Service.STTRequest, global::Service.STTResponse> __Method_SpeechToText = new grpc::Method<global::Service.STTRequest, global::Service.STTResponse>(
grpc::MethodType.DuplexStreaming,
__ServiceName,
"SpeechToText",
__Marshaller_service_STTRequest,
__Marshaller_service_STTResponse);
static readonly grpc::Method<global::Service.GetResponseRequest, global::Service.GetResponseResponse> __Method_GetResponse = new grpc::Method<global::Service.GetResponseRequest, global::Service.GetResponseResponse>(
grpc::MethodType.DuplexStreaming,
__ServiceName,
"GetResponse",
__Marshaller_service_GetResponseRequest,
__Marshaller_service_GetResponseResponse);
static readonly grpc::Method<global::Service.GetResponseRequestSingle, global::Service.GetResponseResponse> __Method_GetResponseSingle = new grpc::Method<global::Service.GetResponseRequestSingle, global::Service.GetResponseResponse>(
grpc::MethodType.ServerStreaming,
__ServiceName,
"GetResponseSingle",
__Marshaller_service_GetResponseRequestSingle,
__Marshaller_service_GetResponseResponse);
static readonly grpc::Method<global::Service.FeedbackRequest, global::Service.FeedbackResponse> __Method_SubmitFeedback = new grpc::Method<global::Service.FeedbackRequest, global::Service.FeedbackResponse>(
grpc::MethodType.Unary,
__ServiceName,
"SubmitFeedback",
__Marshaller_service_FeedbackRequest,
__Marshaller_service_FeedbackResponse);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Service.ServiceReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of ConvaiService</summary>
[grpc::BindServiceMethod(typeof(ConvaiService), "BindService")]
public abstract partial class ConvaiServiceBase
{
public virtual global::System.Threading.Tasks.Task<global::Service.HelloResponse> Hello(global::Service.HelloRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task HelloStream(grpc::IAsyncStreamReader<global::Service.HelloRequest> requestStream, grpc::IServerStreamWriter<global::Service.HelloResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task SpeechToText(grpc::IAsyncStreamReader<global::Service.STTRequest> requestStream, grpc::IServerStreamWriter<global::Service.STTResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task GetResponse(grpc::IAsyncStreamReader<global::Service.GetResponseRequest> requestStream, grpc::IServerStreamWriter<global::Service.GetResponseResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task GetResponseSingle(global::Service.GetResponseRequestSingle request, grpc::IServerStreamWriter<global::Service.GetResponseResponse> responseStream, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
public virtual global::System.Threading.Tasks.Task<global::Service.FeedbackResponse> SubmitFeedback(global::Service.FeedbackRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for ConvaiService</summary>
public partial class ConvaiServiceClient : grpc::ClientBase<ConvaiServiceClient>
{
/// <summary>Creates a new client for ConvaiService</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public ConvaiServiceClient(grpc::ChannelBase channel) : base(channel)
{
}
/// <summary>Creates a new client for ConvaiService that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public ConvaiServiceClient(grpc::CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected ConvaiServiceClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected ConvaiServiceClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
public virtual global::Service.HelloResponse Hello(global::Service.HelloRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return Hello(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual global::Service.HelloResponse Hello(global::Service.HelloRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_Hello, null, options, request);
}
public virtual grpc::AsyncUnaryCall<global::Service.HelloResponse> HelloAsync(global::Service.HelloRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return HelloAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncUnaryCall<global::Service.HelloResponse> HelloAsync(global::Service.HelloRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_Hello, null, options, request);
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.HelloRequest, global::Service.HelloResponse> HelloStream(grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return HelloStream(new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.HelloRequest, global::Service.HelloResponse> HelloStream(grpc::CallOptions options)
{
return CallInvoker.AsyncDuplexStreamingCall(__Method_HelloStream, null, options);
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.STTRequest, global::Service.STTResponse> SpeechToText(grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return SpeechToText(new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.STTRequest, global::Service.STTResponse> SpeechToText(grpc::CallOptions options)
{
return CallInvoker.AsyncDuplexStreamingCall(__Method_SpeechToText, null, options);
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.GetResponseRequest, global::Service.GetResponseResponse> GetResponse(grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return GetResponse(new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncDuplexStreamingCall<global::Service.GetResponseRequest, global::Service.GetResponseResponse> GetResponse(grpc::CallOptions options)
{
return CallInvoker.AsyncDuplexStreamingCall(__Method_GetResponse, null, options);
}
public virtual grpc::AsyncServerStreamingCall<global::Service.GetResponseResponse> GetResponseSingle(global::Service.GetResponseRequestSingle request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return GetResponseSingle(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncServerStreamingCall<global::Service.GetResponseResponse> GetResponseSingle(global::Service.GetResponseRequestSingle request, grpc::CallOptions options)
{
return CallInvoker.AsyncServerStreamingCall(__Method_GetResponseSingle, null, options, request);
}
public virtual global::Service.FeedbackResponse SubmitFeedback(global::Service.FeedbackRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return SubmitFeedback(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual global::Service.FeedbackResponse SubmitFeedback(global::Service.FeedbackRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_SubmitFeedback, null, options, request);
}
public virtual grpc::AsyncUnaryCall<global::Service.FeedbackResponse> SubmitFeedbackAsync(global::Service.FeedbackRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken))
{
return SubmitFeedbackAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
public virtual grpc::AsyncUnaryCall<global::Service.FeedbackResponse> SubmitFeedbackAsync(global::Service.FeedbackRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_SubmitFeedback, null, options, request);
}
/// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary>
protected override ConvaiServiceClient NewInstance(ClientBaseConfiguration configuration)
{
return new ConvaiServiceClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
/// <param name="serviceImpl">An object implementing the server-side handling logic.</param>
public static grpc::ServerServiceDefinition BindService(ConvaiServiceBase serviceImpl)
{
return grpc::ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_Hello, serviceImpl.Hello)
.AddMethod(__Method_HelloStream, serviceImpl.HelloStream)
.AddMethod(__Method_SpeechToText, serviceImpl.SpeechToText)
.AddMethod(__Method_GetResponse, serviceImpl.GetResponse)
.AddMethod(__Method_GetResponseSingle, serviceImpl.GetResponseSingle)
.AddMethod(__Method_SubmitFeedback, serviceImpl.SubmitFeedback).Build();
}
/// <summary>Register service method with a service binder with or without implementation. Useful when customizing the service binding logic.
/// Note: this method is part of an experimental API that can change or be removed without any prior notice.</summary>
/// <param name="serviceBinder">Service methods will be bound by calling <c>AddMethod</c> on this object.</param>
/// <param name="serviceImpl">An object implementing the server-side handling logic.</param>
public static void BindService(grpc::ServiceBinderBase serviceBinder, ConvaiServiceBase serviceImpl)
{
serviceBinder.AddMethod(__Method_Hello, serviceImpl == null ? null : new grpc::UnaryServerMethod<global::Service.HelloRequest, global::Service.HelloResponse>(serviceImpl.Hello));
serviceBinder.AddMethod(__Method_HelloStream, serviceImpl == null ? null : new grpc::DuplexStreamingServerMethod<global::Service.HelloRequest, global::Service.HelloResponse>(serviceImpl.HelloStream));
serviceBinder.AddMethod(__Method_SpeechToText, serviceImpl == null ? null : new grpc::DuplexStreamingServerMethod<global::Service.STTRequest, global::Service.STTResponse>(serviceImpl.SpeechToText));
serviceBinder.AddMethod(__Method_GetResponse, serviceImpl == null ? null : new grpc::DuplexStreamingServerMethod<global::Service.GetResponseRequest, global::Service.GetResponseResponse>(serviceImpl.GetResponse));
serviceBinder.AddMethod(__Method_GetResponseSingle, serviceImpl == null ? null : new grpc::ServerStreamingServerMethod<global::Service.GetResponseRequestSingle, global::Service.GetResponseResponse>(serviceImpl.GetResponseSingle));
serviceBinder.AddMethod(__Method_SubmitFeedback, serviceImpl == null ? null : new grpc::UnaryServerMethod<global::Service.FeedbackRequest, global::Service.FeedbackResponse>(serviceImpl.SubmitFeedback));
}
}
}
#endregion

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 04b2abb2eec68fa4d8d9f3f1c40b8ad9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 883d1528e5d0ee54ab5519d64712aeca
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,12 @@
using UnityEngine;
namespace Convai.Scripts.Runtime.Extensions
{
public static class GameObjectExtension
{
public static T GetOrAddComponent<T>(this GameObject gameObject) where T : Component
{
return gameObject.TryGetComponent(out T t) ? t : gameObject.AddComponent<T>();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 60b5cd27ba404cd42a0608590d0d6b96
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,14 @@
using UnityEngine;
using UnityEngine.UI;
namespace Convai.Scripts.Runtime.Extensions
{
public static class ImageExtensions
{
public static Image WithColorValue(this Image image, float? r = null, float? g = null, float? b = null, float? a = null)
{
image.color = new Color(r ?? image.color.r, g ?? image.color.g, b ?? image.color.b, a ?? image.color.a);
return image;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bde7d53e94e232744aa2b649f44847f5
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,12 @@
using UnityEngine;
namespace Convai.Scripts.Runtime.Extensions
{
public static class SkinnedMeshRendererExtension
{
public static void SetBlendShapeWeightInterpolate(this SkinnedMeshRenderer renderer, int index, float value, float weight)
{
renderer.SetBlendShapeWeight(index, Mathf.Lerp(renderer.GetBlendShapeWeight(index), value, weight));
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e3db3cb9745d4844fa122272c2f7b11d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,32 @@
using System.Text.RegularExpressions;
using UnityEngine;
namespace Convai.Scripts.Runtime.Extensions
{
public static class TransformExtensions
{
public static T GetComponentOnChildWithMatchingRegex<T>(this Transform transform, string regexStringPattern)
{
// Initialize a variable to store the found SkinnedMeshRenderer.
T targetComponent = default;
// Define a regular expression pattern for matching child object names.
Regex regexPattern = new(regexStringPattern);
// Iterate through each child of the parentTransform.
foreach (Transform child in transform)
// Check if the child's name matches the regex pattern.
if (regexPattern.IsMatch(child.name))
{
// If a match is found, get the SkinnedMeshRenderer component of the child.
targetComponent = child.GetComponent<T>();
// If a SkinnedMeshRenderer is found, break out of the loop.
if (targetComponent != null) break;
}
// Return the found SkinnedMeshRenderer (or null if none is found).
return targetComponent;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7ca02589b1584674caf2cd1ff39cdfc7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: addfb9a10dbf1064d9ea03fa2a2cf452
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f7ee91cfe70e2af439a2ed9b7557855d
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,787 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Convai.Scripts.Runtime.Core;
using Convai.Scripts.Runtime.LoggerSystem;
using Service;
using UnityEngine;
using UnityEngine.AI;
using UnityEngine.Serialization;
namespace Convai.Scripts.Runtime.Features
{
// STEP 1: Add the enum for your custom action here.
public enum ActionChoice
{
None,
Jump,
Crouch,
MoveTo,
PickUp,
Drop
}
/// <summary>
/// DISCLAIMER: The action API is in experimental stages and can misbehave. Meanwhile, feel free to try it out and play
/// around with it.
/// </summary>
[DisallowMultipleComponent]
[AddComponentMenu("Convai/Convai Actions Handler")]
public class ConvaiActionsHandler : MonoBehaviour
{
[SerializeField] public ActionMethod[] actionMethods;
public List<string> actionResponseList = new();
private readonly List<ConvaiAction> _actionList = new();
public readonly ActionConfig ActionConfig = new();
private List<string> _actions = new();
private ConvaiNPC _currentNPC;
private ConvaiInteractablesData _interactablesData;
private Coroutine _playActionListCoroutine;
// Awake is called when the script instance is being loaded
private void Awake()
{
// Find the global action settings object in the scene
_interactablesData = FindObjectOfType<ConvaiInteractablesData>();
// Check if the global action settings object is missing
if (_interactablesData == null)
// Log an error message to indicate missing Convai Action Settings
ConvaiLogger.Error("Convai Action Settings missing. Please create a game object that handles actions.",
ConvaiLogger.LogCategory.Character);
// Check if this GameObject has a ConvaiNPC component attached
if (TryGetComponent(out ConvaiNPC npc))
// If it does, set the current NPC to this GameObject
_currentNPC = npc;
// Iterate through each action method and add its name to the action configuration
foreach (ActionMethod actionMethod in actionMethods) ActionConfig.Actions.Add(actionMethod.action);
if (_interactablesData != null)
{
// Iterate through each character in global action settings and add them to the action configuration
foreach (ConvaiInteractablesData.Character character in _interactablesData.Characters)
{
ActionConfig.Types.Character rpcCharacter = new()
{
Name = character.Name,
Bio = character.Bio
};
ActionConfig.Characters.Add(rpcCharacter);
}
// Iterate through each object in global action settings and add them to the action configuration
foreach (ConvaiInteractablesData.Object eachObject in _interactablesData.Objects)
{
ActionConfig.Types.Object rpcObject = new()
{
Name = eachObject.Name,
Description = eachObject.Description
};
ActionConfig.Objects.Add(rpcObject);
}
}
}
private void Reset()
{
actionMethods = new ActionMethod[]
{
new() { action = "Move To", actionChoice = ActionChoice.MoveTo },
new() { action = "Pick Up", actionChoice = ActionChoice.PickUp },
new() { action = "Dance", animationName = "Dance", actionChoice = ActionChoice.None },
new() { action = "Drop", actionChoice = ActionChoice.Drop },
new() { action = "Jump", actionChoice = ActionChoice.Jump }
};
}
// Start is called before the first frame update
private void Start()
{
// Set up the action configuration
#region Actions Setup
// Set the classification of the action configuration to "multistep"
ActionConfig.Classification = "multistep";
// Log the configured action information
ConvaiLogger.DebugLog(ActionConfig, ConvaiLogger.LogCategory.Actions);
#endregion
// Start playing the action list using a coroutine
_playActionListCoroutine = StartCoroutine(PlayActionList());
}
private void OnEnable() {
if ( _playActionListCoroutine != null ) {
_playActionListCoroutine = StartCoroutine(PlayActionList());
}
}
private void OnDisable() {
if ( _playActionListCoroutine != null ) {
StopCoroutine(_playActionListCoroutine);
}
}
private void Update()
{
if (actionResponseList.Count > 0)
{
ParseActions(actionResponseList[0]);
actionResponseList.RemoveAt(0);
}
}
private void ParseActions(string actionsString)
{
actionsString = actionsString.Trim();
ConvaiLogger.DebugLog($"Parsing actions from: {actionsString}", ConvaiLogger.LogCategory.Actions);
_actions = actionsString.Split(", ").ToList();
_actionList.Clear();
foreach (string action in _actions)
{
List<string> actionWords = action.Split(' ').ToList();
ConvaiLogger.Info($"Processing action: {action}", ConvaiLogger.LogCategory.Actions);
ParseSingleAction(actionWords);
}
}
/// <summary>
/// Parses a single action from a list of action words.
/// </summary>
/// <param name="actionWords">The list of words representing the action.</param>
private void ParseSingleAction(List<string> actionWords)
{
for (int j = 0; j < actionWords.Count; j++)
{
// Split the action into verb and object parts
string[] verbPart = actionWords.Take(j + 1).ToArray();
string[] objectPart = actionWords.Skip(j + 1).ToArray();
// Remove trailing 's' from verb words
verbPart = verbPart.Select(word => word.TrimEnd('s')).ToArray();
string actionString = string.Join(" ", verbPart);
// Find the best matching action using Levenshtein distance
ActionMethod matchingAction = actionMethods
.OrderBy(a => LevenshteinDistance(a.action.ToLower(), actionString.ToLower()))
.FirstOrDefault();
// If no close match is found, continue to the next iteration
if (matchingAction == null || LevenshteinDistance(matchingAction.action.ToLower(), actionString.ToLower()) > 2) continue;
// Find the target object for the action
GameObject targetObject = FindTargetObject(objectPart);
LogActionResult(verbPart, objectPart, targetObject);
// Add the parsed action to the action list
_actionList.Add(new ConvaiAction(matchingAction.actionChoice, targetObject, matchingAction.animationName));
break;
}
}
/// <summary>
/// Finds the target object based on the object part of the action.
/// </summary>
/// <param name="objectPart">The array of words representing the object.</param>
/// <returns>The GameObject that best matches the object description, or null if no match is found.</returns>
private GameObject FindTargetObject(string[] objectPart)
{
string targetName = string.Join(" ", objectPart);
// Try to find a matching object
ConvaiInteractablesData.Object obj = _interactablesData.Objects
.OrderBy(o => LevenshteinDistance(o.Name.ToLower(), targetName.ToLower()))
.FirstOrDefault();
if (obj != null && LevenshteinDistance(obj.Name.ToLower(), targetName.ToLower()) <= 2)
return obj.gameObject;
// If no object is found, try to find a matching character
ConvaiInteractablesData.Character character = _interactablesData.Characters
.OrderBy(c => LevenshteinDistance(c.Name.ToLower(), targetName.ToLower()))
.FirstOrDefault();
if (character != null && LevenshteinDistance(character.Name.ToLower(), targetName.ToLower()) <= 2)
return character.gameObject;
return null;
}
/// <summary>
/// Calculates the Levenshtein distance between two strings.
/// </summary>
/// <param name="s">The first string.</param>
/// <param name="t">The second string.</param>
/// <returns>The Levenshtein distance between the two strings.</returns>
private int LevenshteinDistance(string s, string t)
{
int[][] d = new int[s.Length + 1][];
for (int index = 0; index < s.Length + 1; index++) d[index] = new int[t.Length + 1];
// Initialize the first row and column
for (int i = 0; i <= s.Length; i++)
d[i][0] = i;
for (int j = 0; j <= t.Length; j++)
d[0][j] = j;
// Calculate the distance
for (int j = 1; j <= t.Length; j++)
for (int i = 1; i <= s.Length; i++)
{
int cost = s[i - 1] == t[j - 1] ? 0 : 1;
d[i][j] = Math.Min(Math.Min(d[i - 1][j] + 1, d[i][j - 1] + 1), d[i - 1][j - 1] + cost);
}
return d[s.Length][t.Length];
}
private void LogActionResult(string[] verbPart, string[] objectPart, GameObject targetObject)
{
string verb = string.Join(" ", verbPart).ToLower();
string obj = string.Join(" ", objectPart).ToLower();
if (targetObject != null)
{
ConvaiLogger.DebugLog($"Active Target: {obj}", ConvaiLogger.LogCategory.Actions);
ConvaiLogger.DebugLog($"Found matching target: {targetObject.name} for action: {verb}", ConvaiLogger.LogCategory.Actions);
}
else
{
ConvaiLogger.Warn($"No matching target found for action: {verb}", ConvaiLogger.LogCategory.Actions);
}
}
/// <summary>
/// Event that is triggered when an action starts.
/// </summary>
/// <remarks>
/// This event can be subscribed to in order to perform custom logic when an action starts.
/// The event provides the name of the action and the GameObject that the action is targeting.
/// </remarks>
public event Action<string, GameObject> ActionStarted;
/// <summary>
/// Event that is triggered when an action ends.
/// </summary>
/// <remarks>
/// This event can be subscribed to in order to perform custom logic when an action ends.
/// The event provides the name of the action and the GameObject that the action was targeting.
/// </remarks>
public event Action<string, GameObject> ActionEnded;
/// <summary>
/// This coroutine handles playing the actions in the action list.
/// </summary>
/// <returns></returns>
private IEnumerator PlayActionList()
{
while (true)
// Check if there are actions in the action list
if (_actionList.Count > 0)
{
// Call the DoAction function for the first action in the list and wait until it's done
yield return DoAction(_actionList[0]);
// Remove the completed action from the list
_actionList.RemoveAt(0);
}
else
{
// If there are no actions in the list, yield to wait for the next frame
yield return null;
}
}
private IEnumerator DoAction(ConvaiAction action)
{
// STEP 2: Add the function call for your action here corresponding to your enum.
// Remember to yield until its return if it is an Enumerator function.
// Use a switch statement to handle different action choices based on the ActionChoice enum
switch (action.Verb)
{
case ActionChoice.MoveTo:
// Call the MoveTo function and yield until it's completed
yield return MoveTo(action.Target);
break;
case ActionChoice.PickUp:
// Call the PickUp function and yield until it's completed
yield return PickUp(action.Target);
break;
case ActionChoice.Drop:
// Call the Drop function
Drop(action.Target);
break;
case ActionChoice.Jump:
// Call the Jump function
Jump();
break;
case ActionChoice.Crouch:
// Call the Crouch function and yield until it's completed
yield return Crouch();
break;
case ActionChoice.None:
// Call the AnimationActions function and yield until it's completed
yield return AnimationActions(action.Animation);
break;
default:
throw new ArgumentOutOfRangeException();
}
// Yield once to ensure the coroutine advances to the next frame
yield return null;
}
/// <summary>
/// This method is a coroutine that handles playing an animation for Convai NPC.
/// The method takes in the name of the animation to be played as a string parameter.
/// </summary>
/// <param name="animationName"> The name of the animation to be played. </param>
/// <returns> A coroutine that plays the animation. </returns>
private IEnumerator AnimationActions(string animationName)
{
// Logging the action of initiating the animation with the provided animation name.
ConvaiLogger.DebugLog("Doing animation: " + animationName, ConvaiLogger.LogCategory.Actions);
// Attempting to get the Animator component attached to the current NPC object.
// The Animator component is responsible for controlling animations on the GameObject.
Animator animator = _currentNPC.GetComponent<Animator>();
// Converting the provided animation name to its corresponding hash code.
// This is a more efficient way to refer to animations and Animator states.
int animationHash = Animator.StringToHash(animationName);
// Check if the Animator component has a state with the provided hash code.
// This is a safety check to prevent runtime errors if the animation is not found.
if (!animator.HasState(0, animationHash))
{
// Logging a message to indicate that the animation was not found.
ConvaiLogger.DebugLog("Could not find an animator state named: " + animationName, ConvaiLogger.LogCategory.Actions);
// Exiting the coroutine early since the animation is not available.
yield break;
}
// Playing the animation with a cross-fade transition.
// The second parameter '0.1f' specifies the duration of the cross-fade.
animator.CrossFadeInFixedTime(animationHash, 0.1f);
// Waiting for a short duration (just over the cross-fade time) to allow the animation transition to start.
// This ensures that subsequent code runs after the animation has started playing.
yield return new WaitForSeconds(0.11f);
// Getting information about the current animation clip that is playing.
AnimatorClipInfo[] clipInfo = animator.GetCurrentAnimatorClipInfo(0);
// Checking if there is no animation clip information available.
if (clipInfo == null || clipInfo.Length == 0)
{
// Logging a message to indicate that there are no animation clips associated with the state.
ConvaiLogger.DebugLog("Animator state named: " + animationName + " has no associated animation clips",
ConvaiLogger.LogCategory.Actions);
// Exiting the coroutine as there is no animation to play.
yield break;
}
// Defining variables to store the length and name of the animation clip.
float length = 0;
string animationClipName = "";
// Iterating through the array of animation clips to find the one that is currently playing.
foreach (AnimatorClipInfo clipInf in clipInfo)
{
// Logging the name of the animation clip for debugging purposes.
ConvaiLogger.DebugLog("Clip name: " + clipInf.clip.name, ConvaiLogger.LogCategory.Actions);
// Storing the current animation clip in a local variable for easier access.
AnimationClip clip = clipInf.clip;
// Checking if the animation clip is valid.
if (clip != null)
{
// Storing the length and name of the animation clip.
length = clip.length;
animationClipName = clip.name;
// Exiting the loop as we've found the information we need.
break;
}
}
// Checking if a valid animation clip was found.
if (length > 0.0f)
{
// Logging a message indicating that the animation is now playing.
ConvaiLogger.DebugLog(
"Playing the animation " + animationClipName + " from the Animator State " + animationName +
" for " + length + " seconds", ConvaiLogger.LogCategory.Actions);
// Waiting for the duration of the animation to allow it to play out.
yield return new WaitForSeconds(length);
}
else
{
// Logging a message to indicate that no valid animation clips were found or their length was zero.
ConvaiLogger.DebugLog(
"Animator state named: " + animationName +
" has no valid animation clips or they have a length of 0", ConvaiLogger.LogCategory.Actions);
// Exiting the coroutine early.
yield break;
}
// Transitioning back to the idle animation.
// It is assumed that an "Idle" animation exists and is set up in your Animator Controller.
animator.CrossFadeInFixedTime(Animator.StringToHash("Idle"), 0.1f);
// Yielding to wait for one frame to ensure that the coroutine progresses to the next frame.
// This is often done at the end of a coroutine to prevent issues with Unity's execution order.
yield return null;
}
/// <summary>
/// Registers the provided methods to the ActionStarted and ActionEnded events.
/// This allows external code to subscribe to these events and react when they are triggered.
/// </summary>
/// <param name="onActionStarted">
/// The method to be called when an action starts. It should accept a string (the action
/// name) and a GameObject (the target of the action).
/// </param>
/// <param name="onActionEnded">
/// The method to be called when an action ends. It should accept a string (the action name)
/// and a GameObject (the target of the action).
/// </param>
public void RegisterForActionEvents(Action<string, GameObject> onActionStarted,
Action<string, GameObject> onActionEnded)
{
ActionStarted += onActionStarted;
ActionEnded += onActionEnded;
}
/// <summary>
/// Unregisters the provided methods from the ActionStarted and ActionEnded events.
/// This allows external code to unsubscribe from these events when they are no longer interested in them.
/// </summary>
/// <param name="onActionStarted">
/// The method to be removed from the ActionStarted event. It should be the same method that
/// was previously registered.
/// </param>
/// <param name="onActionEnded">
/// The method to be removed from the ActionEnded event. It should be the same method that was
/// previously registered.
/// </param>
public void UnregisterForActionEvents(Action<string, GameObject> onActionStarted,
Action<string, GameObject> onActionEnded)
{
ActionStarted -= onActionStarted;
ActionEnded -= onActionEnded;
}
[Serializable]
public class ActionMethod
{
[FormerlySerializedAs("Action")] [SerializeField]
public string action;
[SerializeField] public string animationName;
[SerializeField] public ActionChoice actionChoice;
}
private class ConvaiAction
{
public ConvaiAction(ActionChoice verb, GameObject target, string animation)
{
Verb = verb;
Target = target;
Animation = animation;
}
#region 04. Public variables
public readonly string Animation;
public readonly GameObject Target;
public readonly ActionChoice Verb;
#endregion
}
// STEP 3: Add the function for your action here.
#region Action Implementation Methods
private IEnumerator Crouch()
{
ActionStarted?.Invoke("Crouch", _currentNPC.gameObject);
ConvaiLogger.DebugLog("Crouching!", ConvaiLogger.LogCategory.Actions);
Animator animator = _currentNPC.GetComponent<Animator>();
animator.CrossFadeInFixedTime(Animator.StringToHash("Crouch"), 0.1f);
// Wait for the next frame to ensure the Animator has transitioned to the new state.
yield return new WaitForSeconds(0.11f);
AnimatorClipInfo[] clipInfo = animator.GetCurrentAnimatorClipInfo(0);
if (clipInfo == null || clipInfo.Length == 0)
{
ConvaiLogger.DebugLog("No animation clips found for crouch state!", ConvaiLogger.LogCategory.Actions);
yield break;
}
float length = clipInfo[0].clip.length;
_currentNPC.GetComponents<CapsuleCollider>()[0].height = 1.2f;
_currentNPC.GetComponents<CapsuleCollider>()[0].center = new Vector3(0, 0.6f, 0);
if (_currentNPC.GetComponents<CapsuleCollider>().Length > 1)
{
_currentNPC.GetComponents<CapsuleCollider>()[1].height = 1.2f;
_currentNPC.GetComponents<CapsuleCollider>()[1].center = new Vector3(0, 0.6f, 0);
}
yield return new WaitForSeconds(length);
animator.CrossFadeInFixedTime(Animator.StringToHash("Idle"), 0.1f);
yield return null;
ActionEnded?.Invoke("Crouch", _currentNPC.gameObject);
}
private IEnumerator MoveTo(GameObject target)
{
if (!IsTargetValid(target)) yield break;
ConvaiLogger.DebugLog($"Moving to Target: {target.name}", ConvaiLogger.LogCategory.Actions);
ActionStarted?.Invoke("MoveTo", target);
Animator animator = _currentNPC.GetComponent<Animator>();
NavMeshAgent navMeshAgent = _currentNPC.GetComponent<NavMeshAgent>();
SetupAnimationAndNavigation(animator, navMeshAgent);
Vector3 targetDestination = CalculateTargetDestination(target);
navMeshAgent.SetDestination(targetDestination);
yield return null;
yield return MoveTowardsTarget(target, navMeshAgent);
FinishMovement(animator, target);
}
private bool IsTargetValid(GameObject target)
{
if (target == null || !target.activeInHierarchy)
{
ConvaiLogger.DebugLog("MoveTo target is null or inactive.", ConvaiLogger.LogCategory.Actions);
return false;
}
return true;
}
private void SetupAnimationAndNavigation(Animator animator, NavMeshAgent navMeshAgent)
{
animator.CrossFade(Animator.StringToHash("Walking"), 0.01f);
animator.applyRootMotion = false;
navMeshAgent.updateRotation = false;
}
private Vector3 CalculateTargetDestination(GameObject target)
{
Vector3 targetDestination = target.transform.position;
if (target.TryGetComponent(out Renderer rendererComponent))
{
float zOffset = rendererComponent.bounds.size.z;
targetDestination += zOffset * target.transform.forward;
}
else
{
targetDestination += 0.5f * target.transform.forward;
}
return targetDestination;
}
private IEnumerator MoveTowardsTarget(GameObject target, NavMeshAgent navMeshAgent)
{
float rotationSpeed = 5;
while (navMeshAgent.remainingDistance > navMeshAgent.stoppingDistance)
{
if (!target.activeInHierarchy)
{
ConvaiLogger.DebugLog("Target deactivated during movement.", ConvaiLogger.LogCategory.Actions);
yield break;
}
if (navMeshAgent.velocity.sqrMagnitude < Mathf.Epsilon) yield return null;
RotateTowardsMovementDirection(navMeshAgent, rotationSpeed);
yield return null;
}
}
private void RotateTowardsMovementDirection(NavMeshAgent navMeshAgent, float rotationSpeed)
{
Quaternion rotation = Quaternion.LookRotation(navMeshAgent.velocity.normalized);
rotation.x = 0;
rotation.z = 0;
transform.rotation = Quaternion.Slerp(transform.rotation, rotation, rotationSpeed * Time.deltaTime);
}
private void FinishMovement(Animator animator, GameObject target)
{
animator.CrossFade(Animator.StringToHash("Idle"), 0.1f);
if (_actions.Count == 1 && Camera.main != null) StartCoroutine(RotateTowardsCamera());
animator.applyRootMotion = true;
ActionEnded?.Invoke("MoveTo", target);
}
private IEnumerator RotateTowardsCamera()
{
if (Camera.main != null)
{
Vector3 direction = (Camera.main.transform.position - transform.position).normalized;
Quaternion targetRotation = Quaternion.LookRotation(direction);
float elapsedTime = 0f;
float rotationTime = 2f;
while (elapsedTime < rotationTime)
{
targetRotation.x = 0;
targetRotation.z = 0;
transform.rotation = Quaternion.Slerp(transform.rotation, targetRotation, elapsedTime / rotationTime);
elapsedTime += Time.deltaTime;
yield return null;
}
}
}
/// <summary>
/// Coroutine to pick up a target GameObject, adjusting the NPCs' rotation and playing animations.
/// </summary>
/// <param name="target">The target GameObject to pick up.</param>
private IEnumerator PickUp(GameObject target)
{
// Invoke the ActionStarted event with the "PickUp" action and the target GameObject.
ActionStarted?.Invoke("PickUp", target);
// Check if the target GameObject is null. If it is, log an error and exit the coroutine.
if (target == null)
{
ConvaiLogger.DebugLog("Target is null! Exiting PickUp coroutine.", ConvaiLogger.LogCategory.Actions);
yield break;
}
// Check if the target GameObject is active. If not, log an error and exit the coroutine.
if (!target.activeInHierarchy)
{
ConvaiLogger.DebugLog($"Target: {target.name} is inactive! Exiting PickUp coroutine.",
ConvaiLogger.LogCategory.Actions);
yield break;
}
// Calculate the direction from the NPC to the target, ignoring the vertical (y) component.
Vector3 direction = (target.transform.position - transform.position).normalized;
direction.y = 0;
// Calculate the target rotation to face the target direction.
Quaternion targetRotation = Quaternion.LookRotation(direction);
float elapsedTime = 0f;
float rotationTime = 0.5f;
// Smoothly rotate the NPC towards the target direction over a specified time.
while (elapsedTime < rotationTime)
{
targetRotation.x = 0;
targetRotation.z = 0;
transform.rotation = Quaternion.Slerp(transform.rotation, targetRotation, elapsedTime / rotationTime);
elapsedTime += Time.deltaTime;
yield return null;
}
// Log the action of picking up the target along with its name.
ConvaiLogger.DebugLog($"Picking up Target: {target.name}", ConvaiLogger.LogCategory.Actions);
// Retrieve the Animator component from the current NPC.
Animator animator = _currentNPC.GetComponent<Animator>();
// Start the "Picking Up" animation with a cross-fade transition.
animator.CrossFade(Animator.StringToHash("Picking Up"), 0.1f);
// Wait for one second to ensure that the Animator has had time to transition to the "Picking Up" animation state.
yield return new WaitForSeconds(1);
// Define the time it takes for the hand to reach the object in the "Picking Up" animation.
// This is a specific point in time during the animation that we are interested in.
float timeToReachObject = 1f;
// Wait for the time it takes for the hand to reach the object.
yield return new WaitForSeconds(timeToReachObject);
// Check again if the target is still active before attempting to pick it up.
if (!target.activeInHierarchy)
{
ConvaiLogger.DebugLog(
$"Target: {target.name} became inactive during the pick up animation! Exiting PickUp coroutine.",
ConvaiLogger.LogCategory.Actions);
yield break;
}
// Once the hand has reached the object, set the target's parent to the NPCs' transform,
// effectively "picking up" the object, and then deactivate the object.
target.transform.parent = gameObject.transform;
target.SetActive(false);
// Transition back to the "Idle" animation.
animator.CrossFade(Animator.StringToHash("Idle"), 0.4f);
// Invoke the ActionEnded event with the "PickUp" action and the target GameObject.
ActionEnded?.Invoke("PickUp", target);
}
private void Drop(GameObject target)
{
ActionStarted?.Invoke("Drop", target);
if (target == null) return;
ConvaiLogger.DebugLog($"Dropping Target: {target.name}", ConvaiLogger.LogCategory.Actions);
target.transform.parent = null;
target.SetActive(true);
ActionEnded?.Invoke("Drop", target);
}
private void Jump()
{
ActionStarted?.Invoke("Jump", _currentNPC.gameObject);
float jumpForce = 5f;
GetComponent<Rigidbody>().AddForce(new Vector3(0f, jumpForce, 0f), ForceMode.Impulse);
_currentNPC.GetComponent<Animator>().CrossFade(Animator.StringToHash("Dance"), 1);
ActionEnded?.Invoke("Jump", _currentNPC.gameObject);
}
// STEP 3: Add the function for your action here.
#endregion
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 744cc0ebc534aa44b8b0871ca72570b9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,39 @@
using System;
using UnityEngine;
namespace Convai.Scripts.Runtime.Features
{
/// <summary>
/// This script defines global actions and settings for Convai.
/// </summary>
[AddComponentMenu("Convai/Convai Interactables Data")]
public class ConvaiInteractablesData : MonoBehaviour
{
[Tooltip("Array of Characters in the environment")] [SerializeField]
public Character[] Characters;
[Tooltip("Array of Objects in the environment")] [SerializeField]
public Object[] Objects;
public Transform DynamicMoveTargetIndicator;
/// <summary>
/// Represents a character in the environment.
/// </summary>
[Serializable]
public class Character
{
[SerializeField] public string Name;
[SerializeField] public string Bio;
[SerializeField] public GameObject gameObject;
}
[Serializable]
public class Object
{
[SerializeField] public string Name;
[SerializeField] public string Description;
[SerializeField] public GameObject gameObject;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b3b04a8e56d4a394585355d57021ba7a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 2b8ebe81f46c4ee9ba8dc4742b92ca72
timeCreated: 1729098038

View File

@ -0,0 +1,30 @@
using Service;
using UnityEngine;
namespace Convai.Scripts.Runtime.Features
{
public class DynamicInfoController : MonoBehaviour
{
public DynamicInfoConfig DynamicInfoConfig { get; private set; }
private void Awake()
{
DynamicInfoConfig = new DynamicInfoConfig();
}
public void SetDynamicInfo(string info)
{
DynamicInfoConfig.Text = info;
}
public void AddDynamicInfo(string info)
{
DynamicInfoConfig.Text += info;
}
public void ClearDynamicInfo()
{
DynamicInfoConfig.Text = "";
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 05aa3cdd3ee246a0a8e5ca86bcfba893
timeCreated: 1729098061

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 79e17fbbffadd7f47b7b345e0be9a750
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,112 @@
using System;
using System.Collections.Generic;
using Convai.Scripts.Runtime.Core;
using Convai.Scripts.Runtime.Extensions;
using Convai.Scripts.Runtime.Features.LipSync.Models;
using Convai.Scripts.Runtime.Features.LipSync.Types;
using Service;
using UnityEngine;
namespace Convai.Scripts.Runtime.Features.LipSync
{
public class ConvaiLipSync : MonoBehaviour
{
[HideInInspector] public FaceModel faceModel = FaceModel.OvrModelName;
[field: SerializeField]
[field: Tooltip("Assign the skin renderers and its respective effectors, along with the bones used for Facial Expression")]
public FacialExpressionData FacialExpressionData { get; private set; } = new();
[field: SerializeField]
[field: Range(0f, 1f)]
[field: Tooltip("This decides how much blending will occur between two different blendshape frames")]
public float WeightBlendingPower { get; private set; } = 0.5f;
[SerializeField] private List<string> characterEmotions;
private ConvaiNPC _convaiNPC;
public ConvaiLipSyncApplicationBase ConvaiLipSyncApplicationBase { get; private set; }
/// <summary>
/// This function will automatically set any of the unassigned skinned mesh renderers to appropriate values using regex
/// based functions.
/// Sets the references of the required variables
/// Sets wait for lipsync to true
/// </summary>
private void Start()
{
FindSkinMeshRenderer();
_convaiNPC = GetComponent<ConvaiNPC>();
ConvaiLipSyncApplicationBase = gameObject.GetOrAddComponent<ConvaiVisemesLipSync>();
ConvaiLipSyncApplicationBase.Initialize(this, _convaiNPC);
SetCharacterLipSyncing(true);
}
private void OnDisable()
{
StopLipSync();
}
private void OnApplicationQuit()
{
StopLipSync();
}
public event Action<bool> OnCharacterLipSyncing;
private void FindSkinMeshRenderer()
{
if (FacialExpressionData.Head.Renderer == null)
FacialExpressionData.Head.Renderer = transform.GetComponentOnChildWithMatchingRegex<SkinnedMeshRenderer>("(.*_Head|CC_Base_Body)");
if (FacialExpressionData.Teeth.Renderer == null)
FacialExpressionData.Teeth.Renderer = transform.GetComponentOnChildWithMatchingRegex<SkinnedMeshRenderer>("(.*_Teeth|CC_Base_Teeth)");
if (FacialExpressionData.Tongue.Renderer == null)
FacialExpressionData.Tongue.Renderer = transform.GetComponentOnChildWithMatchingRegex<SkinnedMeshRenderer>("(.*_Tongue|CC_Base_Tongue)");
}
/// <summary>
/// Overrides the character emotions list
/// </summary>
/// <param name="newEmotions">list of new emotions</param>
public void SetCharacterEmotions(List<string> newEmotions)
{
characterEmotions = new List<string>(newEmotions);
}
/// <summary>
/// Returns Direct reference of the character emotions [Not Recommended to directly change this list]
/// </summary>
/// <returns></returns>
public List<string> GetCharacterEmotions()
{
return characterEmotions;
}
/// <summary>
/// Fires an event with update the Character Lip Syncing State
/// </summary>
/// <param name="value"></param>
private void SetCharacterLipSyncing(bool value)
{
OnCharacterLipSyncing?.Invoke(value);
}
/// <summary>
/// Purges the latest chuck of lipsync frames
/// </summary>
public void PurgeExcessFrames()
{
ConvaiLipSyncApplicationBase?.PurgeExcessBlendShapeFrames();
}
/// <summary>
/// Stops the Lipsync by clearing the frames queue
/// </summary>
public void StopLipSync()
{
ConvaiLipSyncApplicationBase?.ClearQueue();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 88bce56f6985ef84f8835a0152628fa1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,126 @@
using System.Collections.Generic;
using Convai.Scripts.Runtime.Core;
using Convai.Scripts.Runtime.Features.LipSync;
using Convai.Scripts.Runtime.Features.LipSync.Models;
using Service;
using UnityEngine;
namespace Convai.Scripts.Runtime.Features
{
/// <summary>
/// This Class will serve as a base for any method of Lipsync that Convai will develop or use
/// </summary>
public abstract class ConvaiLipSyncApplicationBase : MonoBehaviour
{
/// <summary>
/// Reference to the NPC on which lipsync will be applied
/// </summary>
protected ConvaiNPC ConvaiNPC;
/// <summary>
/// Cached Reference of Facial Expression Data
/// </summary>
protected FacialExpressionData FacialExpressionData;
/// <summary>
/// Cached Reference of WeightBlendingPower
/// </summary>
protected float WeightBlendingPower;
/// <summary>
/// Initializes and setup up of the things necessary for lipsync to work
/// </summary>
/// <param name="convaiLipSync"></param>
/// <param name="convaiNPC"></param>
public virtual void Initialize(ConvaiLipSync convaiLipSync, ConvaiNPC convaiNPC)
{
FacialExpressionData = convaiLipSync.FacialExpressionData;
WeightBlendingPower = convaiLipSync.WeightBlendingPower;
HasHeadSkinnedMeshRenderer = FacialExpressionData.Head.Renderer != null;
HasTeethSkinnedMeshRenderer = FacialExpressionData.Teeth.Renderer != null;
HasTongueSkinnedMeshRenderer = FacialExpressionData.Tongue.Renderer != null;
HasJawBone = FacialExpressionData.JawBone != null;
HasTongueBone = FacialExpressionData.TongueBone != null;
ConvaiNPC = convaiNPC;
}
/// <summary>
/// Updates the tongue bone rotation to the new rotation
/// </summary>
/// <param name="newRotation"></param>
protected void UpdateTongueBoneRotation(Vector3 newRotation)
{
if (!HasTongueBone) return;
FacialExpressionData.TongueBone.transform.localEulerAngles = newRotation;
}
/// <summary>
/// Updates the jaw bone rotation to the new rotation
/// </summary>
/// <param name="newRotation"></param>
protected void UpdateJawBoneRotation(Vector3 newRotation)
{
if (!HasJawBone) return;
FacialExpressionData.JawBone.transform.localEulerAngles = newRotation;
}
/// <summary>
/// This removes the excess frames in the queue
/// </summary>
public abstract void PurgeExcessBlendShapeFrames();
/// <summary>
/// This resets the whole queue of the frames
/// </summary>
protected bool CanPurge<T>(Queue<T> queue)
{
// ? Should I hardcode the limiter for this check
return queue.Count < 10;
}
public abstract void ClearQueue();
/// <summary>
/// Adds blendshape frames in the queue
/// </summary>
/// <param name="blendshapeFrames"></param>
public virtual void EnqueueQueue(Queue<ARKitBlendShapes> blendshapeFrames)
{
}
/// <summary>
/// Adds Visemes frames in the list
/// </summary>
/// <param name="visemesFrames"></param>
public virtual void EnqueueQueue(Queue<VisemesData> visemesFrames)
{
}
/// <summary>
/// Adds a blendshape frame in the last queue
/// </summary>
/// <param name="blendshapeFrame"></param>
public virtual void EnqueueFrame(ARKitBlendShapes blendshapeFrame)
{
}
/// <summary>
/// Adds a viseme frame to the last element of the list
/// </summary>
/// <param name="viseme"></param>
public virtual void EnqueueFrame(VisemesData viseme)
{
}
#region Null States of References
protected bool HasHeadSkinnedMeshRenderer { get; private set; }
protected bool HasTeethSkinnedMeshRenderer { get; private set; }
protected bool HasTongueSkinnedMeshRenderer { get; private set; }
private bool HasJawBone { get; set; }
private bool HasTongueBone { get; set; }
#endregion
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bb231034f5b2dee4494498fe9117bda1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,96 @@
using System.Collections.Generic;
using Convai.Scripts.Runtime.Core;
using Service;
using UnityEngine;
namespace Convai.Scripts.Runtime.Features
{
public class LipSyncBlendFrameData
{
#region FrameType enum
public enum FrameType
{
Visemes,
Blendshape
}
#endregion
private readonly Queue<ARKitBlendShapes> _blendShapeFrames = new();
private readonly FrameType _frameType;
private readonly GetResponseResponse _getResponseResponse;
private readonly int _totalFrames;
private readonly Queue<VisemesData> _visemesFrames = new();
private int _framesCaptured;
private bool _partiallyProcessed;
public LipSyncBlendFrameData(int totalFrames, GetResponseResponse response, FrameType frameType)
{
_totalFrames = totalFrames;
_framesCaptured = 0;
_getResponseResponse = response;
_frameType = frameType;
//ConvaiLogger.DebugLog($"Total Frames: {_totalFrames} | {response.AudioResponse.TextData}", ConvaiLogger.LogCategory.LipSync);
}
public void Enqueue(ARKitBlendShapes blendShapeFrame)
{
_blendShapeFrames.Enqueue(blendShapeFrame);
_framesCaptured++;
}
public void Enqueue(VisemesData visemesData)
{
_visemesFrames.Enqueue(visemesData);
}
public void Process(ConvaiNPC npc)
{
if (!_partiallyProcessed)
npc.EnqueueResponse(_getResponseResponse);
switch (_frameType)
{
case FrameType.Visemes:
npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueQueue(new Queue<VisemesData>(_visemesFrames));
break;
case FrameType.Blendshape:
npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueQueue(new Queue<ARKitBlendShapes>(_blendShapeFrames));
break;
}
npc.AudioManager.SetWaitForCharacterLipSync(false);
}
public void ProcessPartially(ConvaiNPC npc)
{
if (!_partiallyProcessed)
{
_partiallyProcessed = true;
npc.EnqueueResponse(_getResponseResponse);
npc.AudioManager.SetWaitForCharacterLipSync(false);
}
switch (_frameType)
{
case FrameType.Visemes:
while (_visemesFrames.Count != 0) npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueFrame(_visemesFrames.Dequeue());
break;
case FrameType.Blendshape:
while (_blendShapeFrames.Count != 0) npc.convaiLipSync.ConvaiLipSyncApplicationBase.EnqueueFrame(_blendShapeFrames.Dequeue());
break;
}
}
public bool CanPartiallyProcess()
{
return _framesCaptured > Mathf.Min(21, _totalFrames * 0.7f);
}
public bool CanProcess()
{
return _framesCaptured == _totalFrames;
}
}
}

View File

@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 31db1a9457d64f3d936ff7f5aabfb193
timeCreated: 1708491067

Some files were not shown because too many files have changed in this diff Show More