Files
Master-Arbeit-Tom-Hempel/Unity-Master/Assets/Scripts/Multiplayer/ConvaiSimpleUDPAudioSender.cs
2025-09-26 16:40:21 +02:00

910 lines
35 KiB
C#

using System;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Convai.Scripts.Runtime.LoggerSystem;
using Convai.Scripts.Runtime.UI;
using UnityEngine;
using UnityEngine.XR;
using UnityEngine.InputSystem;
using UnityEngine.InputSystem.XR;
using System.IO;
namespace Convai.Scripts.Runtime.Multiplayer
{
/// <summary>
/// Simplified version of UDP Audio Sender that avoids complex chunking
/// This version sends smaller, more frequent packets to avoid array bounds issues
/// </summary>
public class ConvaiSimpleUDPAudioSender : MonoBehaviour
{
[Header("Network Settings")]
[SerializeField] private string targetIP = "127.0.0.1";
[SerializeField] private int targetPort = 12345;
[SerializeField] private bool useGlobalNetworkConfig = true;
[SerializeField] private NetworkConfig networkConfigAsset;
[Header("Audio Settings")]
[SerializeField] private int recordingFrequency = 16000;
[SerializeField] private int recordingLength = 10;
[SerializeField] private int samplesPerPacket = 1024; // Number of audio samples per packet (not bytes)
[Header("UI")]
[SerializeField] private KeyCode talkKey = KeyCode.T;
[SerializeField] private bool useHoldToTalk = true;
[SerializeField] private KeyCode controllerTalkButton = KeyCode.JoystickButton0; // A button on most controllers
[SerializeField] private bool useXRControllerAButton = true; // OpenXR primaryButton
[SerializeField] private XRNode xrControllerNode = XRNode.RightHand; // Quest A button is on right hand
[SerializeField] private bool useInputSystemXR = true; // Use new Input System for XR button
[SerializeField] private bool enableBButtonTest = true; // Press B/secondary to send test packet
[SerializeField] private InputActionReference sendVoiceActionReference; // Optional external action (e.g., QuestPro primaryButton)
[Header("Debug")]
[SerializeField] private bool enableDebugLogging = true;
[SerializeField] private KeyCode testConnectionKey = KeyCode.C;
private UdpClient _udpClient;
private IPEndPoint _targetEndPoint;
private AudioClip _audioClip;
private bool _isRecording = false;
private CancellationTokenSource _cancellationTokenSource;
private CancellationTokenSource _ackCancellationTokenSource;
private int _lastMicrophonePosition = 0;
private float[] _audioBuffer;
private string _selectedMicrophone;
private int _packetSequence = 0;
private volatile bool _startAckReceived = false;
private bool _xrAButtonPrevPressed = false;
private InputAction _xrTalkAction;
private InputAction _xrTestAction;
private bool _usingExternalTalkAction = false;
private InputAction _externalTalkAction;
// Protocol constants
private const uint AUDIO_MAGIC = 0xC0A1;
private const uint ACK_MAGIC = 0xC0A2;
private const byte FLAG_AUDIO = 0;
private const byte FLAG_END = 1;
private const byte FLAG_START = 2;
public event Action<bool> OnRecordingStateChanged;
[Header("Recording Storage")]
[SerializeField] private bool saveLocalAudio = true;
[SerializeField] private int localSampleRate = 16000;
[SerializeField] private string localFilePrefix = "sender_audio";
private readonly object _localAudioLock = new object();
private readonly System.Collections.Generic.List<short> _localSamples = new System.Collections.Generic.List<short>(128 * 1024);
private bool _localSaveInProgress = false;
private DateTime _localSessionStart;
private string _persistentDataPath;
private void Start()
{
// Apply global config if enabled
if (useGlobalNetworkConfig)
{
var cfg = networkConfigAsset != null ? networkConfigAsset : NetworkConfig.Instance;
if (cfg != null)
{
targetIP = cfg.ipAddress;
targetPort = cfg.multiplayerAudioPort;
}
}
InitializeNetwork();
InitializeAudio();
_persistentDataPath = Application.persistentDataPath;
_cancellationTokenSource = new CancellationTokenSource();
_ackCancellationTokenSource = new CancellationTokenSource();
// Start ACK listener
_ = ListenForAcks(_ackCancellationTokenSource.Token);
// Setup Input System action for XR A/primary button
if (useInputSystemXR)
{
if (sendVoiceActionReference != null && sendVoiceActionReference.action != null)
{
SetupExternalTalkInputAction(sendVoiceActionReference.action);
}
else
{
SetupXRTalkInputAction();
}
if (enableBButtonTest)
{
SetupXRTestInputAction();
}
}
}
private void Update()
{
HandleInput();
}
private void OnDestroy()
{
StopRecording();
_cancellationTokenSource?.Cancel();
_cancellationTokenSource?.Dispose();
_ackCancellationTokenSource?.Cancel();
_ackCancellationTokenSource?.Dispose();
if (_usingExternalTalkAction)
TeardownExternalTalkInputAction();
else
TeardownXRTalkInputAction();
TeardownXRTestInputAction();
_udpClient?.Close();
}
private void InitializeNetwork()
{
try
{
_udpClient = new UdpClient();
_targetEndPoint = new IPEndPoint(IPAddress.Parse(targetIP), targetPort);
ConvaiLogger.Info($"Simple UDP Audio Sender initialized. Target: {targetIP}:{targetPort}", ConvaiLogger.LogCategory.Character);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to initialize UDP client: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private void InitializeAudio()
{
try
{
// Try to get selected microphone from Convai's UI system
_selectedMicrophone = MicrophoneManager.Instance?.SelectedMicrophoneName;
}
catch (Exception ex)
{
// If UISaveLoadSystem / MicrophoneManager isn't initialized yet, fall back to first available device
ConvaiLogger.Warn($"MicrophoneManager not available; falling back to default device. {ex.Message}", ConvaiLogger.LogCategory.Character);
_selectedMicrophone = null;
}
// Fallback: pick the first available microphone if none selected or manager unavailable
if (string.IsNullOrEmpty(_selectedMicrophone))
{
var devices = Microphone.devices;
if (devices != null && devices.Length > 0)
{
_selectedMicrophone = devices[0];
ConvaiLogger.Info($"Using default microphone: {_selectedMicrophone}", ConvaiLogger.LogCategory.Character);
}
}
_audioBuffer = new float[recordingFrequency * recordingLength];
if (string.IsNullOrEmpty(_selectedMicrophone))
{
ConvaiLogger.Error("No microphone available or selected for UDP audio sender", ConvaiLogger.LogCategory.Character);
}
}
private void HandleInput()
{
// Input System XR events handle XR button press/release.
// Here we keep keyboard/legacy controller as fallback.
bool xrDown = false;
bool xrUp = false;
if (useXRControllerAButton && !useInputSystemXR)
{
bool xrPressed = GetXRPrimaryButtonPressed(xrControllerNode);
xrDown = xrPressed && !_xrAButtonPrevPressed;
xrUp = !xrPressed && _xrAButtonPrevPressed;
_xrAButtonPrevPressed = xrPressed;
}
// Handle talk key
if (useHoldToTalk)
{
if ((Input.GetKeyDown(talkKey) || Input.GetKeyDown(controllerTalkButton) || xrDown) && !_isRecording)
{
StartRecording();
}
else if ((Input.GetKeyUp(talkKey) || Input.GetKeyUp(controllerTalkButton) || xrUp) && _isRecording)
{
StopRecording();
}
}
else
{
if (Input.GetKeyDown(talkKey) || Input.GetKeyDown(controllerTalkButton) || xrDown)
{
if (_isRecording)
StopRecording();
else
StartRecording();
}
}
// Handle test connection key
if (Input.GetKeyDown(testConnectionKey))
{
TestConnection();
}
}
private void SetupXRTalkInputAction()
{
try
{
// Create button action
_xrTalkAction = new InputAction("XRTalk", InputActionType.Button);
string handTag = xrControllerNode == XRNode.LeftHand ? "{LeftHand}" : "{RightHand}";
// Bind to common XR controller primary/A button paths
_xrTalkAction.AddBinding($"<XRController>{handTag}/primaryButton");
_xrTalkAction.AddBinding($"<OculusTouchController>{handTag}/primaryButton");
_xrTalkAction.AddBinding($"<MetaTouchController>{handTag}/primaryButton");
_xrTalkAction.AddBinding($"<QuestProTouchController>{handTag}/primaryButton");
_xrTalkAction.AddBinding($"<XRController>{handTag}/buttonSouth");
// Gamepad A as additional fallback (useful in editor)
_xrTalkAction.AddBinding("<Gamepad>/buttonSouth");
if (useHoldToTalk)
{
_xrTalkAction.started += ctx => { if (!_isRecording) StartRecording(); };
_xrTalkAction.canceled += ctx => { if (_isRecording) StopRecording(); };
}
else
{
_xrTalkAction.started += ctx => { if (_isRecording) StopRecording(); else StartRecording(); };
}
_xrTalkAction.Enable();
}
catch (Exception ex)
{
ConvaiLogger.Warn($"Failed to setup XR InputAction: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private void TeardownXRTalkInputAction()
{
try
{
if (_xrTalkAction != null)
{
_xrTalkAction.Disable();
_xrTalkAction.Dispose();
_xrTalkAction = null;
}
}
catch (Exception)
{
// ignore
}
}
private void SetupExternalTalkInputAction(InputAction action)
{
try
{
_externalTalkAction = action;
_usingExternalTalkAction = true;
if (useHoldToTalk)
{
_externalTalkAction.started += ctx => { if (!_isRecording) StartRecording(); };
_externalTalkAction.canceled += ctx => { if (_isRecording) StopRecording(); };
}
else
{
_externalTalkAction.started += ctx => { if (_isRecording) StopRecording(); else StartRecording(); };
}
if (!_externalTalkAction.enabled)
_externalTalkAction.Enable();
}
catch (Exception ex)
{
ConvaiLogger.Warn($"Failed to setup external talk action: {ex.Message}", ConvaiLogger.LogCategory.Character);
_usingExternalTalkAction = false;
_externalTalkAction = null;
// Fallback to programmatic setup
SetupXRTalkInputAction();
}
}
private void TeardownExternalTalkInputAction()
{
try
{
if (_externalTalkAction != null)
{
// Remove handlers; do not disable/dispose external actions
if (useHoldToTalk)
{
_externalTalkAction.started -= ctx => { if (!_isRecording) StartRecording(); };
_externalTalkAction.canceled -= ctx => { if (_isRecording) StopRecording(); };
}
else
{
_externalTalkAction.started -= ctx => { if (_isRecording) StopRecording(); else StartRecording(); };
}
}
}
catch (Exception)
{
// ignore
}
finally
{
_externalTalkAction = null;
_usingExternalTalkAction = false;
}
}
private void SetupXRTestInputAction()
{
try
{
_xrTestAction = new InputAction("XRTest", InputActionType.Button);
string handTag = xrControllerNode == XRNode.LeftHand ? "{LeftHand}" : "{RightHand}";
// B button is typically secondaryButton or buttonEast
_xrTestAction.AddBinding($"<XRController>{handTag}/secondaryButton");
_xrTestAction.AddBinding($"<OculusTouchController>{handTag}/secondaryButton");
_xrTestAction.AddBinding($"<MetaTouchController>{handTag}/secondaryButton");
_xrTestAction.AddBinding($"<XRController>{handTag}/buttonEast");
_xrTestAction.AddBinding("<Gamepad>/buttonEast");
_xrTestAction.started += ctx => { SendTestPacket(); };
_xrTestAction.Enable();
}
catch (Exception ex)
{
ConvaiLogger.Warn($"Failed to setup XR Test InputAction: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private void TeardownXRTestInputAction()
{
try
{
if (_xrTestAction != null)
{
_xrTestAction.Disable();
_xrTestAction.Dispose();
_xrTestAction = null;
}
}
catch (Exception)
{
// ignore
}
}
private async void SendTestPacket()
{
try
{
if (_udpClient == null || _targetEndPoint == null)
{
ConvaiLogger.Error("UDP client not initialized for test packet", ConvaiLogger.LogCategory.Character);
return;
}
string testMessage = "Hello this is a Test";
byte[] data = System.Text.Encoding.UTF8.GetBytes(testMessage);
await _udpClient.SendAsync(data, data.Length, _targetEndPoint);
ConvaiLogger.Info("Sent test packet: 'Hello this is a Test'", ConvaiLogger.LogCategory.Character);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to send test packet: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private bool GetXRPrimaryButtonPressed(XRNode hand)
{
try
{
var device = InputDevices.GetDeviceAtXRNode(hand);
if (!device.isValid)
return false;
if (device.TryGetFeatureValue(UnityEngine.XR.CommonUsages.primaryButton, out bool pressed))
return pressed;
}
catch (Exception)
{
// ignore errors and treat as not pressed
}
return false;
}
public void StartRecording()
{
if (_isRecording || string.IsNullOrEmpty(_selectedMicrophone))
return;
try
{
// Use looping clip so we can handle ring-buffer wrap-around reliably
_audioClip = Microphone.Start(_selectedMicrophone, true, recordingLength, recordingFrequency);
_isRecording = true;
_lastMicrophonePosition = 0;
_packetSequence = 0;
_startAckReceived = false;
_localSessionStart = DateTime.UtcNow;
lock (_localAudioLock)
{
_localSamples.Clear();
}
ConvaiLogger.Info("Started recording for UDP transmission (Simple)", ConvaiLogger.LogCategory.Character);
OnRecordingStateChanged?.Invoke(true);
// Send START control and wait briefly for ACK to ensure receiver is ready
_ = SendStartOfRecordingSignalAndAwaitAck();
// Start continuous audio processing
_ = ProcessAudioContinuously(_cancellationTokenSource.Token);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to start recording: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
public void StopRecording()
{
if (!_isRecording)
return;
try
{
Microphone.End(_selectedMicrophone);
_isRecording = false;
ConvaiLogger.Info("Stopped recording for UDP transmission (Simple)", ConvaiLogger.LogCategory.Character);
OnRecordingStateChanged?.Invoke(false);
// Send end-of-recording signal
SendEndOfRecordingSignal();
if (saveLocalAudio)
{
TrySaveLocalAudioAsync();
}
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to stop recording: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private async Task ProcessAudioContinuously(CancellationToken cancellationToken)
{
while (_isRecording && !cancellationToken.IsCancellationRequested)
{
try
{
await Task.Delay(30, cancellationToken); // Process ~33 times/sec for better capture granularity
if (_audioClip == null || !Microphone.IsRecording(_selectedMicrophone))
break;
int currentMicrophonePosition = Microphone.GetPosition(_selectedMicrophone);
int clipSamples = _audioClip.samples;
if (clipSamples <= 0)
continue;
// Compute how many new samples are available, accounting for wrap-around
int audioDataLength = currentMicrophonePosition - _lastMicrophonePosition;
bool wrapped = false;
if (audioDataLength < 0)
{
audioDataLength += clipSamples;
wrapped = true;
}
if (audioDataLength <= 0)
continue;
if (!wrapped)
{
// Contiguous region, read exactly the new samples
int segmentLen = audioDataLength;
var segment = new float[segmentLen];
_audioClip.GetData(segment, _lastMicrophonePosition);
await SendAudioDataInChunks(segment, segmentLen);
}
else
{
// Wrapped: send tail [lastPos .. end) then head [0 .. currentPos)
int firstLen = clipSamples - _lastMicrophonePosition;
if (firstLen > 0)
{
var firstSeg = new float[firstLen];
_audioClip.GetData(firstSeg, _lastMicrophonePosition);
await SendAudioDataInChunks(firstSeg, firstLen);
}
int secondLen = currentMicrophonePosition;
if (secondLen > 0)
{
var secondSeg = new float[secondLen];
_audioClip.GetData(secondSeg, 0);
await SendAudioDataInChunks(secondSeg, secondLen);
}
}
_lastMicrophonePosition = currentMicrophonePosition;
}
catch (Exception ex) when (!(ex is OperationCanceledException))
{
ConvaiLogger.Error($"Error in audio processing: {ex.Message}", ConvaiLogger.LogCategory.Character);
break;
}
}
}
private async Task SendAudioDataInChunks(float[] audioData, int totalSamples)
{
int processedSamples = 0;
while (processedSamples < totalSamples)
{
try
{
int remainingSamples = totalSamples - processedSamples;
int currentChunkSamples = Mathf.Min(samplesPerPacket, remainingSamples);
// Create a simple packet structure
byte[] packet = CreateSimpleAudioPacket(audioData, processedSamples, currentChunkSamples);
// Buffer locally for saving
if (saveLocalAudio)
{
AppendLocalAudio(audioData, processedSamples, currentChunkSamples);
}
// Send the packet
await _udpClient.SendAsync(packet, packet.Length, _targetEndPoint);
if (enableDebugLogging && _packetSequence % 10 == 0) // Log every 10th packet
{
ConvaiLogger.DebugLog($"Sent packet {_packetSequence} with {currentChunkSamples} samples", ConvaiLogger.LogCategory.Character);
}
processedSamples += currentChunkSamples;
_packetSequence++;
// Small delay to avoid overwhelming the network
await Task.Delay(10);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to send audio chunk: {ex.Message}", ConvaiLogger.LogCategory.Character);
break;
}
}
}
private byte[] CreateSimpleAudioPacket(float[] audioData, int startIndex, int sampleCount)
{
// Simple packet structure:
// 4 bytes: Magic number (0xC0A1)
// 4 bytes: Packet sequence number
// 4 bytes: Sample count in this packet
// 4 bytes: Start position in stream
// 1 byte: Flags (0 = normal audio, 1 = end of recording)
// N bytes: Audio data (converted to shorts)
int headerSize = 17; // 4 + 4 + 4 + 4 + 1
int audioDataSize = sampleCount * sizeof(short);
byte[] packet = new byte[headerSize + audioDataSize];
int offset = 0;
// Magic number
BitConverter.GetBytes(AUDIO_MAGIC).CopyTo(packet, offset);
offset += 4;
// Packet sequence
BitConverter.GetBytes(_packetSequence).CopyTo(packet, offset);
offset += 4;
// Sample count
BitConverter.GetBytes(sampleCount).CopyTo(packet, offset);
offset += 4;
// Start position
BitConverter.GetBytes(_lastMicrophonePosition + startIndex).CopyTo(packet, offset);
offset += 4;
// Flags (0 for normal audio)
packet[offset] = FLAG_AUDIO;
offset += 1;
// Convert audio samples to bytes (same as Convai approach)
for (int i = 0; i < sampleCount; i++)
{
float sample = audioData[startIndex + i];
short shortSample = (short)(sample * short.MaxValue);
byte[] shortBytes = BitConverter.GetBytes(shortSample);
packet[offset] = shortBytes[0];
packet[offset + 1] = shortBytes[1];
offset += 2;
}
return packet;
}
private void SendEndOfRecordingSignal()
{
try
{
// Create end packet
byte[] packet = new byte[17]; // Header only, no audio data
int offset = 0;
// Magic number
BitConverter.GetBytes(AUDIO_MAGIC).CopyTo(packet, offset);
offset += 4;
// Packet sequence
BitConverter.GetBytes(_packetSequence).CopyTo(packet, offset);
offset += 4;
// Sample count (0 for end signal)
BitConverter.GetBytes(0).CopyTo(packet, offset);
offset += 4;
// Start position
BitConverter.GetBytes(_lastMicrophonePosition).CopyTo(packet, offset);
offset += 4;
// Flags (1 for end of recording)
packet[offset] = FLAG_END;
_udpClient.SendAsync(packet, packet.Length, _targetEndPoint);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to send end signal: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private void AppendLocalAudio(float[] source, int startIndex, int count)
{
if (source == null || count <= 0)
return;
lock (_localAudioLock)
{
for (int i = 0; i < count; i++)
{
float sample = source[startIndex + i];
short shortSample = (short)(Mathf.Clamp(sample, -1f, 1f) * short.MaxValue);
_localSamples.Add(shortSample);
}
}
}
private void TrySaveLocalAudioAsync()
{
if (_localSaveInProgress)
return;
short[] dataToSave;
DateTime sessionStart;
lock (_localAudioLock)
{
if (_localSamples.Count == 0)
{
if (enableDebugLogging)
ConvaiLogger.Info("No local audio to save.", ConvaiLogger.LogCategory.Character);
return;
}
dataToSave = _localSamples.ToArray();
_localSamples.Clear();
sessionStart = _localSessionStart;
}
_localSaveInProgress = true;
Task.Run(async () =>
{
try
{
// Small delay to allow any final chunks to enqueue
await Task.Delay(100);
string timestamp = sessionStart.ToLocalTime().ToString("yyyyMMdd_HHmmss");
string fileName = $"{localFilePrefix}_{timestamp}.wav";
string dir = _persistentDataPath;
string path = Path.Combine(dir, fileName);
WriteWav(path, dataToSave, localSampleRate, 1);
ConvaiLogger.Info($"Saved local audio to: {path}", ConvaiLogger.LogCategory.Character);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Failed to save local audio: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
finally
{
_localSaveInProgress = false;
}
});
}
private void WriteWav(string path, short[] samples, int sampleRate, int channels)
{
using (var fs = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.None))
using (var writer = new BinaryWriter(fs))
{
int bitsPerSample = 16;
int byteRate = sampleRate * channels * (bitsPerSample / 8);
int blockAlign = channels * (bitsPerSample / 8);
int dataSize = samples.Length * (bitsPerSample / 8);
int fileSize = 44 - 8 + dataSize;
writer.Write(System.Text.Encoding.ASCII.GetBytes("RIFF"));
writer.Write(fileSize);
writer.Write(System.Text.Encoding.ASCII.GetBytes("WAVE"));
writer.Write(System.Text.Encoding.ASCII.GetBytes("fmt "));
writer.Write(16);
writer.Write((short)1);
writer.Write((short)channels);
writer.Write(sampleRate);
writer.Write(byteRate);
writer.Write((short)blockAlign);
writer.Write((short)bitsPerSample);
writer.Write(System.Text.Encoding.ASCII.GetBytes("data"));
writer.Write(dataSize);
for (int i = 0; i < samples.Length; i++)
{
writer.Write(samples[i]);
}
}
}
private async Task SendStartOfRecordingSignalAndAwaitAck()
{
try
{
const int maxAttempts = 3;
const int ackTimeoutMs = 250;
for (int attempt = 1; attempt <= maxAttempts && !_startAckReceived; attempt++)
{
// Build START control packet (no audio, special flag)
byte[] packet = new byte[17];
int offset = 0;
BitConverter.GetBytes(AUDIO_MAGIC).CopyTo(packet, offset);
offset += 4;
// Use -1 as the special sequence for START control
BitConverter.GetBytes(-1).CopyTo(packet, offset);
offset += 4;
BitConverter.GetBytes(0).CopyTo(packet, offset);
offset += 4;
BitConverter.GetBytes(_lastMicrophonePosition).CopyTo(packet, offset);
offset += 4;
packet[offset] = FLAG_START;
await _udpClient.SendAsync(packet, packet.Length, _targetEndPoint);
// Wait for ACK
int waited = 0;
while (!_startAckReceived && waited < ackTimeoutMs)
{
await Task.Delay(10);
waited += 10;
}
if (_startAckReceived)
{
if (enableDebugLogging)
ConvaiLogger.DebugLog("Received START ACK from receiver", ConvaiLogger.LogCategory.Character);
break;
}
else if (enableDebugLogging)
{
ConvaiLogger.Warn($"No START ACK (attempt {attempt}/{maxAttempts}), retrying...", ConvaiLogger.LogCategory.Character);
}
}
}
catch (Exception ex)
{
ConvaiLogger.Warn($"Error during START ACK process: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
private async Task ListenForAcks(CancellationToken token)
{
// Use the same UDP client; acks will be sent back to our ephemeral local port
while (!token.IsCancellationRequested)
{
try
{
var result = await _udpClient.ReceiveAsync();
var data = result.Buffer;
if (data == null || data.Length < 8)
continue;
uint magic = BitConverter.ToUInt32(data, 0);
if (magic != ACK_MAGIC)
continue;
int seq = BitConverter.ToInt32(data, 4);
if (seq == -1)
{
_startAckReceived = true;
}
}
catch (ObjectDisposedException)
{
break;
}
catch (Exception)
{
// Ignore and keep listening
}
}
}
// Public methods for external control
public void SetTargetEndpoint(string ip, int port)
{
targetIP = ip;
targetPort = port;
_targetEndPoint = new IPEndPoint(IPAddress.Parse(ip), port);
}
public bool IsRecording => _isRecording;
// Debug and testing methods
public async void TestConnection()
{
if (_udpClient == null)
{
ConvaiLogger.Error("UDP client not initialized", ConvaiLogger.LogCategory.Character);
return;
}
try
{
ConvaiLogger.Info($"Testing connection to {targetIP}:{targetPort}", ConvaiLogger.LogCategory.Character);
// Send a simple test packet
string testMessage = "CONVAI_TEST_CONNECTION";
byte[] testData = System.Text.Encoding.UTF8.GetBytes(testMessage);
await _udpClient.SendAsync(testData, testData.Length, _targetEndPoint);
ConvaiLogger.Info("Test packet sent successfully", ConvaiLogger.LogCategory.Character);
}
catch (Exception ex)
{
ConvaiLogger.Error($"Connection test failed: {ex.Message}", ConvaiLogger.LogCategory.Character);
}
}
public void ShowNetworkStatus()
{
ConvaiLogger.Info($"=== Network Status ===", ConvaiLogger.LogCategory.Character);
ConvaiLogger.Info($"Target: {targetIP}:{targetPort}", ConvaiLogger.LogCategory.Character);
ConvaiLogger.Info($"UDP Client: {(_udpClient != null ? "Initialized" : "Not initialized")}", ConvaiLogger.LogCategory.Character);
ConvaiLogger.Info($"Recording: {_isRecording}", ConvaiLogger.LogCategory.Character);
ConvaiLogger.Info($"Microphone: {_selectedMicrophone}", ConvaiLogger.LogCategory.Character);
ConvaiLogger.Info($"Packets sent: {_packetSequence}", ConvaiLogger.LogCategory.Character);
if (_udpClient?.Client?.LocalEndPoint != null)
{
ConvaiLogger.Info($"Local endpoint: {_udpClient.Client.LocalEndPoint}", ConvaiLogger.LogCategory.Character);
}
}
}
}