Add Listen Server networking: exclusive NPC lock, Opus audio broadcast, LOD

- Replicate conversation state (bNetIsConversing, NetConversatingPlayer) for exclusive NPC locking
- Opus encode TTS audio on server, multicast to all clients for shared playback
- Replicate emotion state (OnRep) so clients compute facial expressions locally
- Multicast speaking/interrupted/text events so lip sync and posture run locally
- Route mic audio via Server RPC (client→server→ElevenLabs WebSocket)
- LOD: cull audio beyond 30m, skip lip sync beyond 15m for non-speaker clients
- Auto-detect player disconnection and release NPC on authority
- InteractionComponent: skip occupied NPCs, auto-start conversation on selection
- No changes to LipSync, Posture, FacialExpression, MicCapture or AnimNodes

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
j.foucher 2026-02-28 11:04:07 +01:00
parent 453450d7eb
commit 765da966a2
3 changed files with 541 additions and 14 deletions

View File

@ -10,6 +10,9 @@
#include "Sound/SoundAttenuation.h"
#include "Sound/SoundWaveProcedural.h"
#include "GameFramework/Actor.h"
#include "GameFramework/PlayerController.h"
#include "Net/UnrealNetwork.h"
#include "VoiceModule.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ConvAgent_ElevenLabs, Log, All);
@ -22,6 +25,9 @@ UPS_AI_ConvAgent_ElevenLabsComponent::UPS_AI_ConvAgent_ElevenLabsComponent()
// Tick is used only to detect silence (agent stopped speaking).
// Disable if not needed for perf.
PrimaryComponentTick.TickInterval = 1.0f / 60.0f;
// Enable network replication for this component.
SetIsReplicated(true);
}
// ─────────────────────────────────────────────────────────────────────────────
@ -31,6 +37,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::BeginPlay()
{
Super::BeginPlay();
InitAudioPlayback();
InitOpusCodec();
// Auto-register with the interaction subsystem so InteractionComponents can discover us.
if (UWorld* World = GetWorld())
@ -120,6 +127,17 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
}
}
// Network: detect if the conversating player disconnected (server only).
if (GetOwnerRole() == ROLE_Authority && bNetIsConversing && NetConversatingPlayer)
{
if (!IsValid(NetConversatingPlayer) || !NetConversatingPlayer->GetPawn())
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("Conversating player disconnected — releasing NPC."));
ServerReleaseConversation_Implementation();
}
}
// Silence detection.
// ISSUE-8: broadcast OnAgentStoppedSpeaking OUTSIDE AudioQueueLock.
// OnProceduralUnderflow (audio thread) also acquires AudioQueueLock — if we broadcast
@ -172,6 +190,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
Tht, LastClosedTurnIndex);
}
OnAgentStoppedSpeaking.Broadcast();
// Network: notify all clients.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentStoppedSpeaking();
}
}
}
@ -179,6 +203,31 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
// Control
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation()
{
if (GetOwnerRole() == ROLE_Authority)
{
// Server (or standalone): open WebSocket directly.
// In networked mode, also set replicated conversation state.
if (GetWorld() && GetWorld()->GetNetMode() != NM_Standalone)
{
APlayerController* PC = GetWorld()->GetFirstPlayerController();
bNetIsConversing = true;
NetConversatingPlayer = PC;
}
StartConversation_Internal();
}
else
{
// Client: request conversation via Server RPC.
APlayerController* PC = GetWorld() ? GetWorld()->GetFirstPlayerController() : nullptr;
if (PC)
{
ServerRequestConversation(PC);
}
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation_Internal()
{
if (!WebSocketProxy)
{
@ -214,17 +263,29 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation()
void UPS_AI_ConvAgent_ElevenLabsComponent::EndConversation()
{
StopListening();
// ISSUE-4: StopListening() may set bWaitingForAgentResponse=true (normal turn end path).
// Cancel it immediately — there is no response coming because we are ending the session.
// Without this, TickComponent could fire OnAgentResponseTimeout after EndConversation().
bWaitingForAgentResponse = false;
StopAgentAudio();
if (WebSocketProxy)
if (GetOwnerRole() == ROLE_Authority)
{
WebSocketProxy->Disconnect();
WebSocketProxy = nullptr;
StopListening();
// ISSUE-4: StopListening() may set bWaitingForAgentResponse=true (normal turn end path).
// Cancel it immediately — there is no response coming because we are ending the session.
// Without this, TickComponent could fire OnAgentResponseTimeout after EndConversation().
bWaitingForAgentResponse = false;
StopAgentAudio();
if (WebSocketProxy)
{
WebSocketProxy->Disconnect();
WebSocketProxy = nullptr;
}
// Reset replicated state so other players can talk to this NPC.
bNetIsConversing = false;
NetConversatingPlayer = nullptr;
}
else
{
// Client: request release via Server RPC.
ServerReleaseConversation();
}
}
@ -455,7 +516,14 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::FeedExternalAudio(const TArray<float>
if (MicAccumulationBuffer.Num() >= GetMicChunkMinBytes())
{
WebSocketProxy->SendAudioChunk(MicAccumulationBuffer);
if (GetOwnerRole() == ROLE_Authority)
{
if (WebSocketProxy) WebSocketProxy->SendAudioChunk(MicAccumulationBuffer);
}
else
{
ServerSendMicAudio(MicAccumulationBuffer);
}
MicAccumulationBuffer.Reset();
}
}
@ -485,6 +553,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleConnected(const FPS_AI_ConvAgen
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+0.00s] Agent connected. ConversationID=%s"), *Info.ConversationID);
OnAgentConnected.Broadcast(Info);
// Network: notify the requesting remote client that conversation started.
if (GetOwnerRole() == ROLE_Authority && NetConversatingPlayer)
{
ClientConversationStarted(Info);
}
// In Client turn mode (push-to-talk), the user controls listening manually via
// StartListening()/StopListening(). Auto-starting would leave the mic open
// permanently and interfere with push-to-talk — the T-release StopListening()
@ -518,6 +592,13 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleDisconnected(int32 StatusCode,
FScopeLock Lock(&MicSendLock);
MicAccumulationBuffer.Reset();
}
// Reset replicated state on disconnect.
if (GetOwnerRole() == ROLE_Authority)
{
bNetIsConversing = false;
NetConversatingPlayer = nullptr;
}
OnAgentDisconnected.Broadcast(StatusCode, Reason);
}
@ -545,6 +626,22 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAudioReceived(const TArray<uint
QueueBefore, (static_cast<float>(QueueBefore) / 16000.0f) * 1000.0f);
}
// Network: Opus-compress and broadcast to all clients before local playback.
if (GetOwnerRole() == ROLE_Authority && OpusEncoder.IsValid())
{
uint32 CompressedSize = static_cast<uint32>(OpusWorkBuffer.Num());
int32 Remainder = OpusEncoder->Encode(PCMData.GetData(), PCMData.Num(),
OpusWorkBuffer.GetData(), CompressedSize);
if (CompressedSize > 0)
{
TArray<uint8> CompressedData;
CompressedData.Append(OpusWorkBuffer.GetData(), CompressedSize);
MulticastReceiveAgentAudio(CompressedData);
}
}
// Server local playback (Listen Server is also a client).
EnqueueAgentAudio(PCMData);
// Forward raw PCM to any listeners (e.g. LipSync component for spectral analysis).
OnAgentAudioData.Broadcast(PCMData);
@ -568,6 +665,11 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponse(const FString& Re
if (bEnableAgentTextResponse)
{
OnAgentTextResponse.Broadcast(ResponseText);
// Network: broadcast text to all clients for subtitles.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentTextResponse(ResponseText);
}
}
}
@ -576,6 +678,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleInterrupted()
bWaitingForAgentResponse = false; // Interrupted — no response expected from previous turn.
StopAgentAudio();
OnAgentInterrupted.Broadcast();
// Network: notify all clients.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentInterrupted();
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponseStarted()
@ -621,6 +729,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponseStarted()
TEXT("[T+%.2fs] [Turn %d] Agent generating. (%.2fs after turn end)"),
T, LastClosedTurnIndex, LatencyFromTurnEnd);
OnAgentStartedGenerating.Broadcast();
// Network: notify all clients.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentStartedGenerating();
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponsePart(const FString& PartialText)
@ -628,6 +742,11 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponsePart(const FString
if (bEnableAgentPartialResponse)
{
OnAgentPartialResponse.Broadcast(PartialText);
// Network: broadcast partial text to all clients.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentPartialResponse(PartialText);
}
}
}
@ -828,6 +947,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::EnqueueAgentAudio(const TArray<uint8>
OnAgentStartedSpeaking.Broadcast();
// Network: notify all clients that agent started speaking.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentStartedSpeaking();
}
if (AudioPreBufferMs > 0)
{
// Pre-buffer: accumulate audio before starting playback.
@ -936,6 +1061,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StopAgentAudio()
T, LastClosedTurnIndex, AgentSpokeDuration, TotalTurnDuration);
OnAgentStoppedSpeaking.Broadcast();
// Network: notify all clients.
if (GetOwnerRole() == ROLE_Authority)
{
MulticastAgentStoppedSpeaking();
}
}
}
@ -969,7 +1100,14 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::OnMicrophoneDataCaptured(const TArray
if (MicAccumulationBuffer.Num() >= GetMicChunkMinBytes())
{
WebSocketProxy->SendAudioChunk(MicAccumulationBuffer);
if (GetOwnerRole() == ROLE_Authority)
{
if (WebSocketProxy) WebSocketProxy->SendAudioChunk(MicAccumulationBuffer);
}
else
{
ServerSendMicAudio(MicAccumulationBuffer);
}
MicAccumulationBuffer.Reset();
}
}
@ -992,3 +1130,265 @@ TArray<uint8> UPS_AI_ConvAgent_ElevenLabsComponent::FloatPCMToInt16Bytes(const T
return Out;
}
// ─────────────────────────────────────────────────────────────────────────────
// Network: Replication
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::GetLifetimeReplicatedProps(
TArray<FLifetimeProperty>& OutLifetimeProps) const
{
Super::GetLifetimeReplicatedProps(OutLifetimeProps);
DOREPLIFETIME(UPS_AI_ConvAgent_ElevenLabsComponent, bNetIsConversing);
DOREPLIFETIME(UPS_AI_ConvAgent_ElevenLabsComponent, NetConversatingPlayer);
DOREPLIFETIME(UPS_AI_ConvAgent_ElevenLabsComponent, CurrentEmotion);
DOREPLIFETIME(UPS_AI_ConvAgent_ElevenLabsComponent, CurrentEmotionIntensity);
}
void UPS_AI_ConvAgent_ElevenLabsComponent::OnRep_ConversationState()
{
if (!bNetIsConversing)
{
// Conversation ended on server — clean up local playback.
if (bAgentSpeaking)
{
StopAgentAudio();
}
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::OnRep_Emotion()
{
// Fire the existing delegate so FacialExpressionComponent picks it up on clients.
OnAgentEmotionChanged.Broadcast(CurrentEmotion, CurrentEmotionIntensity);
}
// ─────────────────────────────────────────────────────────────────────────────
// Network: Server RPCs
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::ServerRequestConversation_Implementation(
APlayerController* RequestingPlayer)
{
if (bNetIsConversing)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ServerRequestConversation denied — NPC is already in conversation."));
ClientConversationFailed(TEXT("NPC is already in conversation with another player."));
return;
}
bNetIsConversing = true;
NetConversatingPlayer = RequestingPlayer;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ServerRequestConversation granted for %s."),
*GetNameSafe(RequestingPlayer));
StartConversation_Internal();
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ServerReleaseConversation_Implementation()
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("ServerReleaseConversation."));
StopListening();
bWaitingForAgentResponse = false;
StopAgentAudio();
if (WebSocketProxy)
{
WebSocketProxy->Disconnect();
WebSocketProxy = nullptr;
}
bNetIsConversing = false;
NetConversatingPlayer = nullptr;
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ServerSendMicAudio_Implementation(
const TArray<uint8>& PCMBytes)
{
if (WebSocketProxy && WebSocketProxy->IsConnected())
{
WebSocketProxy->SendAudioChunk(PCMBytes);
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ServerSendTextMessage_Implementation(
const FString& Text)
{
if (WebSocketProxy && WebSocketProxy->IsConnected())
{
WebSocketProxy->SendTextMessage(Text);
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ServerRequestInterrupt_Implementation()
{
InterruptAgent();
}
// ─────────────────────────────────────────────────────────────────────────────
// Network: Client RPCs
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::ClientConversationStarted_Implementation(
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& Info)
{
SessionStartTime = FPlatformTime::Seconds();
TurnIndex = 0;
LastClosedTurnIndex = 0;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[Client] Conversation started. ConversationID=%s"), *Info.ConversationID);
OnAgentConnected.Broadcast(Info);
// Auto-start listening (same logic as HandleConnected).
if (bAutoStartListening && TurnMode == EPS_AI_ConvAgent_TurnMode_ElevenLabs::Server)
{
StartListening();
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ClientConversationFailed_Implementation(
const FString& Reason)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[Client] Conversation request failed: %s"), *Reason);
OnAgentError.Broadcast(Reason);
}
// ─────────────────────────────────────────────────────────────────────────────
// Network: Multicast RPCs
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastReceiveAgentAudio_Implementation(
const TArray<uint8>& OpusData)
{
// Server already handled playback in HandleAudioReceived.
if (GetOwnerRole() == ROLE_Authority) return;
if (!OpusDecoder.IsValid()) return;
// LOD: skip audio if too far (unless this client is the speaker).
const float Dist = GetDistanceToLocalPlayer();
const bool bIsSpeaker = IsLocalPlayerConversating();
if (!bIsSpeaker && AudioLODCullDistance > 0.f && Dist > AudioLODCullDistance) return;
// Decode Opus → PCM.
const uint32 MaxDecompressedSize = 16000 * 2; // 1 second of 16kHz 16-bit mono
TArray<uint8> PCMBuffer;
PCMBuffer.SetNumUninitialized(MaxDecompressedSize);
uint32 DecompressedSize = MaxDecompressedSize;
OpusDecoder->Decode(OpusData.GetData(), OpusData.Num(),
PCMBuffer.GetData(), DecompressedSize);
if (DecompressedSize == 0) return;
PCMBuffer.SetNum(DecompressedSize);
// Local playback.
EnqueueAgentAudio(PCMBuffer);
// Feed lip-sync (within LOD or speaker).
if (bIsSpeaker || LipSyncLODDistance <= 0.f || Dist <= LipSyncLODDistance)
{
OnAgentAudioData.Broadcast(PCMBuffer);
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastAgentStartedSpeaking_Implementation()
{
if (GetOwnerRole() == ROLE_Authority) return;
bAgentSpeaking = true;
OnAgentStartedSpeaking.Broadcast();
}
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastAgentStoppedSpeaking_Implementation()
{
if (GetOwnerRole() == ROLE_Authority) return;
bAgentSpeaking = false;
SilentTickCount = 0;
OnAgentStoppedSpeaking.Broadcast();
}
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastAgentInterrupted_Implementation()
{
if (GetOwnerRole() == ROLE_Authority) return;
StopAgentAudio();
OnAgentInterrupted.Broadcast();
}
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastAgentTextResponse_Implementation(
const FString& ResponseText)
{
if (GetOwnerRole() == ROLE_Authority) return;
if (bEnableAgentTextResponse)
{
OnAgentTextResponse.Broadcast(ResponseText);
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastAgentPartialResponse_Implementation(
const FString& PartialText)
{
if (GetOwnerRole() == ROLE_Authority) return;
if (bEnableAgentPartialResponse)
{
OnAgentPartialResponse.Broadcast(PartialText);
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::MulticastAgentStartedGenerating_Implementation()
{
if (GetOwnerRole() == ROLE_Authority) return;
OnAgentStartedGenerating.Broadcast();
}
// ─────────────────────────────────────────────────────────────────────────────
// Network: Opus codec
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::InitOpusCodec()
{
if (!FVoiceModule::IsAvailable()) return;
FVoiceModule& VoiceModule = FVoiceModule::Get();
if (GetOwnerRole() == ROLE_Authority)
{
OpusEncoder = VoiceModule.CreateVoiceEncoder(
PS_AI_ConvAgent_Audio_ElevenLabs::SampleRate,
PS_AI_ConvAgent_Audio_ElevenLabs::Channels,
EAudioEncodeHint::VoiceEncode_Voice);
}
OpusDecoder = VoiceModule.CreateVoiceDecoder(
PS_AI_ConvAgent_Audio_ElevenLabs::SampleRate,
PS_AI_ConvAgent_Audio_ElevenLabs::Channels);
OpusWorkBuffer.SetNumUninitialized(8 * 1024); // 8 KB scratch buffer for Opus encode/decode
}
// ─────────────────────────────────────────────────────────────────────────────
// Network: Helpers
// ─────────────────────────────────────────────────────────────────────────────
float UPS_AI_ConvAgent_ElevenLabsComponent::GetDistanceToLocalPlayer() const
{
if (UWorld* World = GetWorld())
{
if (APlayerController* PC = World->GetFirstPlayerController())
{
if (APawn* Pawn = PC->GetPawn())
{
return FVector::Dist(GetOwner()->GetActorLocation(),
Pawn->GetActorLocation());
}
}
}
return MAX_FLT;
}
bool UPS_AI_ConvAgent_ElevenLabsComponent::IsLocalPlayerConversating() const
{
if (UWorld* World = GetWorld())
{
if (APlayerController* PC = World->GetFirstPlayerController())
{
return NetConversatingPlayer == PC;
}
}
return false;
}

View File

@ -128,11 +128,20 @@ UPS_AI_ConvAgent_ElevenLabsComponent* UPS_AI_ConvAgent_InteractionComponent::Eva
UPS_AI_ConvAgent_ElevenLabsComponent* CurrentAgent = SelectedAgent.Get();
// Get local player controller for occupied-NPC check.
APlayerController* LocalPC = World->GetFirstPlayerController();
for (UPS_AI_ConvAgent_ElevenLabsComponent* Agent : Agents)
{
AActor* AgentActor = Agent->GetOwner();
if (!AgentActor) continue;
// Network: skip agents that are in conversation with a different player.
if (Agent->bNetIsConversing && Agent->NetConversatingPlayer != LocalPC)
{
continue;
}
const FVector AgentLocation = AgentActor->GetActorLocation() + FVector(0.0f, 0.0f, AgentEyeLevelOffset);
const FVector ToAgent = AgentLocation - ViewLocation;
const float DistSq = ToAgent.SizeSquared();
@ -243,6 +252,12 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
NewAgent->GetOwner() ? *NewAgent->GetOwner()->GetName() : TEXT("(null)"));
}
// Network: auto-start conversation if the agent isn't connected yet.
if (!NewAgent->IsConnected() && !NewAgent->bNetIsConversing)
{
NewAgent->StartConversation();
}
// Ensure mic is capturing so we can route audio to the new agent.
if (MicComponent && !MicComponent->IsCapturing())
{

View File

@ -7,12 +7,14 @@
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_WebSocket_ElevenLabsProxy.h"
#include "Sound/SoundWaveProcedural.h"
#include "Interfaces/VoiceCodec.h"
#include <atomic>
#include "PS_AI_ConvAgent_ElevenLabsComponent.generated.h"
class UAudioComponent;
class USoundAttenuation;
class UPS_AI_ConvAgent_MicrophoneCaptureComponent;
class APlayerController;
// ─────────────────────────────────────────────────────────────────────────────
// Delegates exposed to Blueprint
@ -270,11 +272,11 @@ public:
FOnAgentClientToolCall OnAgentClientToolCall;
/** The current emotion of the agent, as set by the "set_emotion" client tool. Defaults to Neutral. */
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|ElevenLabs")
UPROPERTY(ReplicatedUsing = OnRep_Emotion, BlueprintReadOnly, Category = "PS AI ConvAgent|ElevenLabs")
EPS_AI_ConvAgent_Emotion CurrentEmotion = EPS_AI_ConvAgent_Emotion::Neutral;
/** The current emotion intensity. Defaults to Medium. */
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|ElevenLabs")
UPROPERTY(ReplicatedUsing = OnRep_Emotion, BlueprintReadOnly, Category = "PS AI ConvAgent|ElevenLabs")
EPS_AI_ConvAgent_EmotionIntensity CurrentEmotionIntensity = EPS_AI_ConvAgent_EmotionIntensity::Medium;
// ── Raw audio data (C++ only, used by LipSync component) ────────────────
@ -282,6 +284,93 @@ public:
* Used internally by UPS_AI_ConvAgent_LipSyncComponent for spectral analysis. */
FOnAgentAudioData OnAgentAudioData;
// ── Network state (replicated) ───────────────────────────────────────────
/** True when a player is currently in conversation with this NPC.
* Replicated to all clients so InteractionComponents can skip occupied NPCs. */
UPROPERTY(ReplicatedUsing = OnRep_ConversationState, BlueprintReadOnly, Category = "PS AI ConvAgent|Network")
bool bNetIsConversing = false;
/** The player controller currently in conversation with this NPC (null if free).
* Replicated so each client knows who is speaking (used for posture target, LOD). */
UPROPERTY(ReplicatedUsing = OnRep_ConversationState, BlueprintReadOnly, Category = "PS AI ConvAgent|Network")
TObjectPtr<APlayerController> NetConversatingPlayer = nullptr;
// ── Network LOD ──────────────────────────────────────────────────────────
/** Distance (cm) beyond which remote clients stop receiving agent audio entirely.
* The speaking player always receives full audio regardless of distance. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Network|LOD",
meta = (ClampMin = "0", ToolTip = "Distance beyond which audio is culled for non-speaking players. 0 = no cull."))
float AudioLODCullDistance = 3000.f;
/** Distance (cm) beyond which remote clients skip lip-sync / emotion processing.
* Audio still plays (if within AudioLODCullDistance) but without facial animation.
* The speaking player always gets full lip-sync regardless of distance. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Network|LOD",
meta = (ClampMin = "0", ToolTip = "Distance beyond which lip-sync is skipped for non-speaking players. 0 = no LOD."))
float LipSyncLODDistance = 1500.f;
// ── Network RPCs ─────────────────────────────────────────────────────────
/** Request exclusive conversation with this NPC. Called by clients; the server
* checks availability and opens the WebSocket connection if the NPC is free. */
UFUNCTION(Server, Reliable)
void ServerRequestConversation(APlayerController* RequestingPlayer);
/** Release this NPC so other players can talk to it. */
UFUNCTION(Server, Reliable)
void ServerReleaseConversation();
/** Stream accumulated mic audio from the speaking client to the server.
* Unreliable: minor packet loss is acceptable for audio streaming. */
UFUNCTION(Server, Unreliable)
void ServerSendMicAudio(const TArray<uint8>& PCMBytes);
/** Send a text message via the server's WebSocket connection. */
UFUNCTION(Server, Reliable)
void ServerSendTextMessage(const FString& Text);
/** Request an agent interruption through the server. */
UFUNCTION(Server, Reliable)
void ServerRequestInterrupt();
/** Broadcast Opus-compressed agent audio to all clients. */
UFUNCTION(NetMulticast, Unreliable)
void MulticastReceiveAgentAudio(const TArray<uint8>& OpusData);
/** Notify all clients that the agent started speaking (first audio chunk). */
UFUNCTION(NetMulticast, Reliable)
void MulticastAgentStartedSpeaking();
/** Notify all clients that the agent stopped speaking. */
UFUNCTION(NetMulticast, Reliable)
void MulticastAgentStoppedSpeaking();
/** Notify all clients that the agent was interrupted. */
UFUNCTION(NetMulticast, Reliable)
void MulticastAgentInterrupted();
/** Broadcast the agent's complete text response (subtitles). */
UFUNCTION(NetMulticast, Reliable)
void MulticastAgentTextResponse(const FString& ResponseText);
/** Broadcast streaming partial text (real-time subtitles). */
UFUNCTION(NetMulticast, Reliable)
void MulticastAgentPartialResponse(const FString& PartialText);
/** Notify all clients that the agent started generating (thinking). */
UFUNCTION(NetMulticast, Reliable)
void MulticastAgentStartedGenerating();
/** Notify the requesting client that conversation started successfully. */
UFUNCTION(Client, Reliable)
void ClientConversationStarted(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& Info);
/** Notify the requesting client that conversation request was denied. */
UFUNCTION(Client, Reliable)
void ClientConversationFailed(const FString& Reason);
// ── Control ───────────────────────────────────────────────────────────────
/**
@ -360,8 +449,16 @@ public:
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;
virtual void TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction* ThisTickFunction) override;
virtual void GetLifetimeReplicatedProps(TArray<FLifetimeProperty>& OutLifetimeProps) const override;
private:
// ── Network OnRep handlers ───────────────────────────────────────────────
UFUNCTION()
void OnRep_ConversationState();
UFUNCTION()
void OnRep_Emotion();
// ── Internal event handlers ───────────────────────────────────────────────
UFUNCTION()
void HandleConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& Info);
@ -498,4 +595,19 @@ private:
/** Compute the minimum bytes from the user-facing MicChunkDurationMs.
* Formula: bytes = SampleRate * (ms / 1000) * BytesPerSample = 16000 * ms / 1000 * 2 = 32 * ms */
int32 GetMicChunkMinBytes() const { return MicChunkDurationMs * 32; }
// ── Opus codec (network audio compression) ───────────────────────────────
TSharedPtr<IVoiceEncoder> OpusEncoder; // Server only
TSharedPtr<IVoiceDecoder> OpusDecoder; // All clients
TArray<uint8> OpusWorkBuffer; // Reusable scratch buffer for encode/decode
void InitOpusCodec();
// ── Network helpers ──────────────────────────────────────────────────────
/** Distance from this NPC to the local player's pawn. Returns MAX_FLT if unavailable. */
float GetDistanceToLocalPlayer() const;
/** True if the local player controller is the one currently in conversation. */
bool IsLocalPlayerConversating() const;
/** Internal: performs the actual WebSocket setup (called by both local and RPC paths). */
void StartConversation_Internal();
};