Gate operational logs behind bDebug/bVerboseLogging

Only init, deinit, errors and critical warnings remain unconditional.
All turn-by-turn logs (mic open/close, agent generating/speaking/stopped,
emotion changes, audio chunks, text/viseme processing, latency, bone
resolution, selection, registration) are now gated behind component
bDebug or Settings->bVerboseLogging.

10 files, ~125 lines removed.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
j.foucher 2026-03-01 13:28:59 +01:00
parent 09ca2215a0
commit 8fcb8b6f30
10 changed files with 125 additions and 247 deletions

View File

@ -32,16 +32,6 @@ void FAnimNode_PS_AI_ConvAgent_FacialExpression::Initialize_AnyThread(const FAni
if (Comp)
{
FacialExpressionComponent = Comp;
UE_LOG(LogPS_AI_ConvAgent_FacialExprAnimNode, Log,
TEXT("PS AI ConvAgent Facial Expression AnimNode bound to component on %s."),
*Owner->GetName());
}
else
{
UE_LOG(LogPS_AI_ConvAgent_FacialExprAnimNode, Warning,
TEXT("No PS_AI_ConvAgent_FacialExpressionComponent found on %s. "
"Add the component alongside the Conversational Agent."),
*Owner->GetName());
}
}
}
@ -72,9 +62,6 @@ void FAnimNode_PS_AI_ConvAgent_FacialExpression::Update_AnyThread(const FAnimati
if (Comp)
{
FacialExpressionComponent = Comp;
UE_LOG(LogPS_AI_ConvAgent_FacialExprAnimNode, Log,
TEXT("PS AI ConvAgent Facial Expression AnimNode (late) bound to component on %s."),
*Owner->GetName());
}
}
}

View File

@ -32,16 +32,6 @@ void FAnimNode_PS_AI_ConvAgent_LipSync::Initialize_AnyThread(const FAnimationIni
if (Comp)
{
LipSyncComponent = Comp;
UE_LOG(LogPS_AI_ConvAgent_LipSyncAnimNode, Log,
TEXT("PS AI ConvAgent Lip Sync AnimNode bound to component on %s."),
*Owner->GetName());
}
else
{
UE_LOG(LogPS_AI_ConvAgent_LipSyncAnimNode, Warning,
TEXT("No PS_AI_ConvAgent_LipSyncComponent found on %s. "
"Add the component alongside the Conversational Agent."),
*Owner->GetName());
}
}
}
@ -72,9 +62,6 @@ void FAnimNode_PS_AI_ConvAgent_LipSync::Update_AnyThread(const FAnimationUpdateC
if (Comp)
{
LipSyncComponent = Comp;
UE_LOG(LogPS_AI_ConvAgent_LipSyncAnimNode, Log,
TEXT("PS AI ConvAgent Lip Sync AnimNode (late) bound to component on %s."),
*Owner->GetName());
}
}
}

View File

@ -126,22 +126,8 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Initialize_AnyThread(const FAnimationIni
}
}
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT("PS AI ConvAgent Posture AnimNode: Owner=%s Mesh=%s Head=%s Eyes=%s Chain=%d HeadComp=%.1f EyeComp=%.1f DriftComp=%.1f"),
*Owner->GetName(), *SkelMesh->GetName(),
bApplyHeadRotation ? TEXT("ON") : TEXT("OFF"),
bApplyEyeCurves ? TEXT("ON") : TEXT("OFF"),
ChainBoneNames.Num(),
Comp->GetHeadAnimationCompensation(),
Comp->GetEyeAnimationCompensation(),
Comp->GetBodyDriftCompensation());
}
else
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Warning,
TEXT("No PS_AI_ConvAgent_PostureComponent found on %s."),
*Owner->GetName());
}
}
// No posture component is fine — AnimBP may not require it.
}
}
}
@ -187,10 +173,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
: FQuat::Identity;
ChainRefPoseRotations.Add(RefRot);
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT(" Chain bone [%d] '%s' → index %d (weight=%.2f)"),
i, *ChainBoneNames[i].ToString(), CompactIdx.GetInt(),
ChainBoneWeights[i]);
// Bone resolved successfully.
}
else
{
@ -217,9 +200,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
? RefPose[MeshIndex].GetRotation()
: FQuat::Identity;
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT("Head bone '%s' resolved to index %d."),
*HeadBoneName.ToString(), HeadBoneIndex.GetInt());
// Head bone resolved successfully.
}
else
{
@ -258,15 +239,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
? RefPose[MeshIdx].GetRotation()
: FQuat::Identity;
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT("Eye bone '%s' resolved to index %d."),
*BoneName.ToString(), OutIndex.GetInt());
}
else
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT("Eye bone '%s' not found in skeleton (OK for Body AnimBP)."),
*BoneName.ToString());
// Eye bone resolved successfully.
}
};
@ -311,10 +284,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
RefAccumAboveChain = RefAccumAboveChain * RefRot;
}
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT("Body drift: %d ancestor bones above chain. RefAccum=(%s)"),
AncestorBoneIndices.Num(),
*RefAccumAboveChain.Rotator().ToCompactString());
// Body drift compensation initialized.
}
}
}
@ -339,9 +309,6 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Update_AnyThread(const FAnimationUpdateC
if (Comp)
{
PostureComponent = Comp;
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Log,
TEXT("PS AI ConvAgent Posture AnimNode (late) bound to component on %s."),
*Owner->GetName());
}
}
}

View File

@ -77,10 +77,13 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
if (WaitTime >= static_cast<double>(ResponseTimeoutSeconds))
{
bWaitingForAgentResponse = false;
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] Response timeout — server did not start generating after %.1fs. Firing OnAgentResponseTimeout."),
T, LastClosedTurnIndex, WaitTime);
if (bDebug)
{
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] Response timeout — server did not start generating after %.1fs."),
T, LastClosedTurnIndex, WaitTime);
}
OnAgentResponseTimeout.Broadcast();
}
}
@ -96,10 +99,13 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
{
bAgentGenerating = false;
GeneratingTickCount = 0;
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] Generating timeout (10s) — server generated but no audio arrived. Clearing bAgentGenerating."),
T, LastClosedTurnIndex);
if (bDebug)
{
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] Generating timeout (10s) — no audio arrived. Clearing bAgentGenerating."),
T, LastClosedTurnIndex);
}
}
}
else
@ -116,10 +122,13 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
if (Elapsed >= static_cast<double>(AudioPreBufferMs))
{
bPreBuffering = false;
const double Tpb = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Pre-buffer timeout (%dms). Starting playback."),
Tpb, LastClosedTurnIndex, AudioPreBufferMs);
if (bDebug)
{
const double Tpb = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Pre-buffer timeout (%dms). Starting playback."),
Tpb, LastClosedTurnIndex, AudioPreBufferMs);
}
if (AudioPlaybackComponent && !AudioPlaybackComponent->IsPlaying())
{
AudioPlaybackComponent->Play();
@ -182,11 +191,11 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
// Broadcast OUTSIDE the lock — Blueprint handlers can execute for arbitrary time.
if (bShouldBroadcastStopped)
{
if (bHardTimeoutFired)
if (bHardTimeoutFired && bDebug)
{
const double Tht = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] Agent silence hard-timeout (10s) without agent_response — declaring agent stopped."),
TEXT("[T+%.2fs] [Turn %d] Agent silence hard-timeout (10s) without agent_response."),
Tht, LastClosedTurnIndex);
}
OnAgentStoppedSpeaking.Broadcast();
@ -311,7 +320,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartListening()
{
if (bAgentSpeaking && bAllowInterruption)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("StartListening: interrupting agent (speaking) to allow user to speak."));
if (bDebug) UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("StartListening: interrupting agent to allow user to speak."));
InterruptAgent();
// InterruptAgent → StopAgentAudio clears bAgentSpeaking / bAgentGenerating,
// so we fall through and open the microphone immediately.
@ -373,9 +382,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartListening()
Mic->StartCapture();
}
const double T = TurnStartTime - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+%.2fs] [Turn %d] Mic opened%s — user speaking."),
T, TurnIndex, bExternalMicManagement ? TEXT(" (external)") : TEXT(""));
if (bDebug)
{
const double T = TurnStartTime - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+%.2fs] [Turn %d] Mic opened%s — user speaking."),
T, TurnIndex, bExternalMicManagement ? TEXT(" (external)") : TEXT(""));
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::StopListening()
@ -439,21 +451,22 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StopListening()
bWaitingForAgentResponse = true;
LastClosedTurnIndex = TurnIndex;
}
const double T = TurnEndTime - SessionStartTime;
const double TurnDuration = TurnStartTime > 0.0 ? TurnEndTime - TurnStartTime : 0.0;
if (bWaitingForAgentResponse)
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Mic closed — user spoke %.2fs. Waiting for server response (timeout %.0fs)..."),
T, TurnIndex, TurnDuration, ResponseTimeoutSeconds);
}
else
{
// Collision avoidance: StopListening was called from HandleAgentResponseStarted
// while server was already generating — no need to wait or time out.
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Mic closed (collision avoidance) — user spoke %.2fs. Server is already generating Turn %d response."),
T, TurnIndex, TurnDuration, LastClosedTurnIndex);
const double T = TurnEndTime - SessionStartTime;
const double TurnDuration = TurnStartTime > 0.0 ? TurnEndTime - TurnStartTime : 0.0;
if (bWaitingForAgentResponse)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Mic closed — user spoke %.2fs. Waiting for response (timeout %.0fs)..."),
T, TurnIndex, TurnDuration, ResponseTimeoutSeconds);
}
else
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Mic closed (collision) — user spoke %.2fs. Server generating Turn %d."),
T, TurnIndex, TurnDuration, LastClosedTurnIndex);
}
}
}
@ -726,9 +739,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponseStarted()
}
}
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Agent generating. (%.2fs after turn end)"),
T, LastClosedTurnIndex, LatencyFromTurnEnd);
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Agent generating. (%.2fs after turn end)"),
T, LastClosedTurnIndex, LatencyFromTurnEnd);
}
OnAgentStartedGenerating.Broadcast();
// Network: notify all clients.
@ -798,9 +814,12 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleClientToolCall(const FPS_AI_Con
CurrentEmotion = NewEmotion;
CurrentEmotionIntensity = NewIntensity;
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+%.2fs] Agent emotion changed to: %s (%s)"),
T, *UEnum::GetValueAsString(NewEmotion), *UEnum::GetValueAsString(NewIntensity));
if (bDebug)
{
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+%.2fs] Agent emotion changed to: %s (%s)"),
T, *UEnum::GetValueAsString(NewEmotion), *UEnum::GetValueAsString(NewIntensity));
}
OnAgentEmotionChanged.Broadcast(NewEmotion, NewIntensity);
@ -904,10 +923,6 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::OnProceduralUnderflow(
if (!bQueueWasDry)
{
bQueueWasDry = true;
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] AudioQueue DRY — waiting for next TTS chunk (requested %d samples)."),
T, LastClosedTurnIndex, SamplesRequired);
}
// Do NOT feed silence via QueueAudio! USoundWaveProcedural with
@ -940,11 +955,14 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::EnqueueAgentAudio(const TArray<uint8>
bQueueWasDry = false;
SilentTickCount = 0;
const double T = AgentSpeakStart - SessionStartTime;
const double LatencyFromTurnEnd = TurnEndTime > 0.0 ? AgentSpeakStart - TurnEndTime : 0.0;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Agent speaking — first audio chunk. (%.2fs after turn end)"),
T, LastClosedTurnIndex, LatencyFromTurnEnd);
if (bDebug)
{
const double T = AgentSpeakStart - SessionStartTime;
const double LatencyFromTurnEnd = TurnEndTime > 0.0 ? AgentSpeakStart - TurnEndTime : 0.0;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Agent speaking — first audio chunk. (%.2fs after turn end)"),
T, LastClosedTurnIndex, LatencyFromTurnEnd);
}
OnAgentStartedSpeaking.Broadcast();
@ -1000,10 +1018,6 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::EnqueueAgentAudio(const TArray<uint8>
// buffer underrun (TTS inter-batch gap). Restart it if needed.
if (AudioPlaybackComponent && !AudioPlaybackComponent->IsPlaying())
{
const double Tbr = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[T+%.2fs] [Turn %d] Audio component stopped during speech (buffer underrun). Restarting playback."),
Tbr, LastClosedTurnIndex);
AudioPlaybackComponent->Play();
}
// Reset silence counter — new audio arrived, we're not in a gap anymore
@ -1054,12 +1068,15 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StopAgentAudio()
// Broadcast outside the lock.
if (bWasSpeaking)
{
const double T = Now - SessionStartTime;
const double AgentSpokeDuration = AgentSpeakStart > 0.0 ? Now - AgentSpeakStart : 0.0;
const double TotalTurnDuration = TurnEndTime > 0.0 ? Now - TurnEndTime : 0.0;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Agent stopped speaking (spoke %.2fs, full turn round-trip %.2fs)."),
T, LastClosedTurnIndex, AgentSpokeDuration, TotalTurnDuration);
if (bDebug)
{
const double T = Now - SessionStartTime;
const double AgentSpokeDuration = AgentSpeakStart > 0.0 ? Now - AgentSpeakStart : 0.0;
const double TotalTurnDuration = TurnEndTime > 0.0 ? Now - TurnEndTime : 0.0;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] [Turn %d] Agent stopped speaking (spoke %.2fs, round-trip %.2fs)."),
T, LastClosedTurnIndex, AgentSpokeDuration, TotalTurnDuration);
}
OnAgentStoppedSpeaking.Broadcast();
@ -1171,8 +1188,6 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::ServerRequestConversation_Implementat
{
if (bNetIsConversing)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ServerRequestConversation denied — NPC is already in conversation."));
ClientConversationFailed(TEXT("NPC is already in conversation with another player."));
return;
}
@ -1180,17 +1195,11 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::ServerRequestConversation_Implementat
bNetIsConversing = true;
NetConversatingPlayer = RequestingPlayer;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ServerRequestConversation granted for %s."),
*GetNameSafe(RequestingPlayer));
StartConversation_Internal();
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ServerReleaseConversation_Implementation()
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("ServerReleaseConversation."));
StopListening();
bWaitingForAgentResponse = false;
StopAgentAudio();

View File

@ -93,8 +93,6 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::ValidateEmotionPoses()
{
if (!EmotionPoseMap || EmotionPoseMap->EmotionPoses.Num() == 0)
{
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,
TEXT("No emotion poses assigned in EmotionPoseMap — facial expressions disabled."));
return;
}
@ -205,10 +203,13 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged(
// Begin crossfade
CrossfadeAlpha = 0.0f;
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,
TEXT("Emotion changed: %s (%s) — anim: %s, crossfading over %.1fs..."),
*UEnum::GetValueAsString(Emotion), *UEnum::GetValueAsString(Intensity),
NewAnim ? *NewAnim->GetName() : TEXT("(none)"), EmotionBlendDuration);
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,
TEXT("Emotion changed: %s (%s) — anim: %s"),
*UEnum::GetValueAsString(Emotion), *UEnum::GetValueAsString(Intensity),
NewAnim ? *NewAnim->GetName() : TEXT("(none)"));
}
}
// ─────────────────────────────────────────────────────────────────────────────
@ -232,9 +233,6 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::TickComponent(
AgentComponent = Agent;
Agent->OnAgentEmotionChanged.AddDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged);
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,
TEXT("Facial expression (late) bound to agent component on %s."),
*Owner->GetName());
}
}
}

View File

@ -45,7 +45,6 @@ void UPS_AI_ConvAgent_InteractionComponent::BeginPlay()
MicComponent->OnAudioCaptured.AddUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::OnMicAudioCaptured);
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("InteractionComponent initialized on %s."), *Owner->GetName());
}
void UPS_AI_ConvAgent_InteractionComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)

View File

@ -19,8 +19,6 @@ void UPS_AI_ConvAgent_InteractionSubsystem::RegisterAgent(UPS_AI_ConvAgent_Eleve
}
RegisteredAgents.Add(Agent);
UE_LOG(LogPS_AI_ConvAgent_Interaction, Log, TEXT("Agent registered: %s (%d total)"),
*Agent->GetOwner()->GetName(), RegisteredAgents.Num());
}
void UPS_AI_ConvAgent_InteractionSubsystem::UnregisterAgent(UPS_AI_ConvAgent_ElevenLabsComponent* Agent)
@ -32,9 +30,6 @@ void UPS_AI_ConvAgent_InteractionSubsystem::UnregisterAgent(UPS_AI_ConvAgent_Ele
return !Weak.IsValid() || Weak.Get() == Agent;
});
UE_LOG(LogPS_AI_ConvAgent_Interaction, Log, TEXT("Agent unregistered: %s (%d remaining)"),
Agent->GetOwner() ? *Agent->GetOwner()->GetName() : TEXT("(destroyed)"),
RegisteredAgents.Num());
}
// ─────────────────────────────────────────────────────────────────────────────

View File

@ -672,9 +672,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic
Agent->OnAgentStoppedSpeaking.AddDynamic(
this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentStopped);
Agent->bEnableAgentPartialResponse = true;
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Lip sync (late) bound to agent component on %s."),
*Owner->GetName());
}
}
}
@ -713,9 +710,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic
// Timeout — start playback with spectral shapes as fallback
bWaitingForText = false;
PlaybackTimer = 0.0f;
UE_LOG(LogPS_AI_ConvAgent_LipSync, Warning,
TEXT("Text wait timeout (%.0fms). Starting lip sync with spectral shapes (Queue=%d)."),
WaitElapsed * 1000.0, VisemeQueue.Num());
}
else
{
@ -1002,7 +996,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic
void UPS_AI_ConvAgent_LipSyncComponent::OnAgentInterrupted()
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, TEXT("Agent interrupted — resetting lip sync to neutral."));
ResetToNeutral();
}
@ -1011,7 +1004,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::OnAgentStopped()
// Don't clear text state here — it's already handled by TickComponent's
// "queue runs dry" logic which checks bFullTextReceived.
// Just clear the queues so the mouth returns to neutral immediately.
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, TEXT("Agent stopped speaking — clearing lip sync queues."));
VisemeQueue.Reset();
AmplitudeQueue.Reset();
PlaybackTimer = 0.0f;
@ -1415,9 +1407,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::OnAudioChunkReceived(const TArray<uint8>
bWaitingForText = true;
WaitingForTextStartTime = FPlatformTime::Seconds();
PlaybackTimer = 0.0f;
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Waiting for text before starting lip sync (%d frames queued)."),
WindowsQueued);
}
}
else if (AccumulatedText.Len() > 0 && TextVisemeSequence.Num() > 0)
@ -1429,11 +1418,14 @@ void UPS_AI_ConvAgent_LipSyncComponent::OnAudioChunkReceived(const TArray<uint8>
ApplyTextVisemesToQueue();
}
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Audio chunk: %d samples → %d windows | Amp=[%.2f-%.2f] | Queue=%d (%.1fs) | TextVisemes=%d"),
NumSamples, WindowsQueued,
MinAmp, MaxAmp, VisemeQueue.Num(),
VisemeQueue.Num() * (512.0f / 16000.0f), TextVisemeSequence.Num());
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Audio chunk: %d samples → %d windows | Amp=[%.2f-%.2f] | Queue=%d (%.1fs) | TextVisemes=%d"),
NumSamples, WindowsQueued,
MinAmp, MaxAmp, VisemeQueue.Num(),
VisemeQueue.Num() * (512.0f / 16000.0f), TextVisemeSequence.Num());
}
}
// ─────────────────────────────────────────────────────────────────────────────
@ -1458,9 +1450,12 @@ void UPS_AI_ConvAgent_LipSyncComponent::OnPartialTextReceived(const FString& Par
// Convert accumulated text to viseme sequence progressively
ConvertTextToVisemes(AccumulatedText);
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Partial text: \"%s\" → %d visemes (accumulated: \"%s\")"),
*PartialText, TextVisemeSequence.Num(), *AccumulatedText);
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Partial text: \"%s\" → %d visemes (accumulated: \"%s\")"),
*PartialText, TextVisemeSequence.Num(), *AccumulatedText);
}
// If we were waiting for text to arrive before starting playback,
// apply text visemes to queued frames and start consuming.
@ -1490,8 +1485,11 @@ void UPS_AI_ConvAgent_LipSyncComponent::OnTextResponseReceived(const FString& Re
AccumulatedText = ResponseText;
ConvertTextToVisemes(ResponseText);
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Full text: \"%s\" → %d visemes"), *ResponseText, TextVisemeSequence.Num());
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Full text: \"%s\" → %d visemes"), *ResponseText, TextVisemeSequence.Num());
}
// Apply to any remaining queued frames (or extend timeline in pose mode)
{
@ -1513,7 +1511,7 @@ void UPS_AI_ConvAgent_LipSyncComponent::OnTextResponseReceived(const FString& Re
WaitElapsed * 1000.0);
}
// Log the viseme sequence for debugging (Log level so it's always visible)
if (bDebug)
{
FString VisSeq;
int32 Count = 0;
@ -1915,10 +1913,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::ConvertTextToVisemes(const FString& Text
}
}
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Syllable filter: %d raw → %d visible (removed %d invisible consonants/sil)"),
RawCount, Filtered.Num(), RawCount - Filtered.Num());
TextVisemeSequence = MoveTemp(Filtered);
}
}
@ -2057,13 +2051,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::ApplyTextVisemesToQueue()
}
bTextVisemesApplied = true;
const float FinalRatio = static_cast<float>(ActiveFrames) / static_cast<float>(SeqNum);
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Applied %d syllable-visemes to %d active frames (of %d total) — %.1f frames/viseme (%.0fms/viseme)"),
SeqNum, ActiveFrames, VisemeQueue.Num(),
FinalRatio, FinalRatio * 32.0f);
}
// ─────────────────────────────────────────────────────────────────────────────
@ -2142,12 +2129,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::BuildVisemeTimeline()
bVisemeTimelineActive = true;
bTextVisemesApplied = true;
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Built viseme timeline: %d entries (from %d, max %d), audio=%.0fms, scale=%.2f → %.0fms/viseme avg"),
FinalSequence.Num(), TextVisemeSequence.Num(), MaxVisemes,
AudioDurationSec * 1000.0f, Scale,
(FinalSequence.Num() > 0) ? (CursorSec * 1000.0f / FinalSequence.Num()) : 0.0f);
}
void UPS_AI_ConvAgent_LipSyncComponent::AnalyzeSpectrum()
@ -2442,15 +2423,7 @@ void UPS_AI_ConvAgent_LipSyncComponent::ApplyMorphTargets()
// OverrideCurveValue() does NOT work because the AnimGraph resets curves
// before the Control Rig reads them. The custom AnimNode injects curves
// from within the evaluation pipeline where they persist.
static bool bLoggedOnce = false;
if (!bLoggedOnce)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("MetaHuman curve mode: lip sync curves will be injected by the "
"PS AI ConvAgent Lip Sync AnimNode in the Face AnimBP. "
"Ensure the node is placed before the Control Rig in the AnimGraph."));
bLoggedOnce = true;
}
// Data is read by the AnimNode via GetCurrentBlendshapes().
// Data is read by the AnimNode via GetCurrentBlendshapes() — nothing to do here.
}
else

View File

@ -66,7 +66,6 @@ void UPS_AI_ConvAgent_MicrophoneCaptureComponent::StartCapture()
AudioCapture.StartStream();
bCapturing = true;
UE_LOG(LogPS_AI_ConvAgent_Mic, Log, TEXT("Audio capture started."));
}
void UPS_AI_ConvAgent_MicrophoneCaptureComponent::StopCapture()
@ -76,7 +75,6 @@ void UPS_AI_ConvAgent_MicrophoneCaptureComponent::StopCapture()
AudioCapture.StopStream();
AudioCapture.CloseStream();
bCapturing = false;
UE_LOG(LogPS_AI_ConvAgent_Mic, Log, TEXT("Audio capture stopped."));
}
// ─────────────────────────────────────────────────────────────────────────────

View File

@ -128,8 +128,7 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::SendUserTurnStart()
bFirstAudioResponseLogged = false;
bAgentResponseStartedFired = false;
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("[T+%.2fs] User turn started — mic open, audio chunks will follow."), T);
// No log here — turn start is implicit from audio chunks following.
}
void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::SendUserTurnEnd()
@ -145,8 +144,7 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::SendUserTurnEnd()
// in a loop: part arrives → event → StopListening → SendUserTurnEnd → flag reset → part arrives → loop.
// The flag is only reset in SendUserTurnStart() at the beginning of a new user turn.
const double T = UserTurnEndTime - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("[T+%.2fs] User turn ended — server VAD silence detection started (turn_timeout=1s)."), T);
// No log here — turn end is tracked internally for latency measurement.
}
void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::SendTextMessage(const FString& Text)
@ -169,8 +167,6 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::SendInterrupt()
{
if (!IsConnected()) return;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("Sending interrupt."));
TSharedPtr<FJsonObject> Msg = MakeShareable(new FJsonObject());
Msg->SetStringField(TEXT("type"), PS_AI_ConvAgent_MessageType_ElevenLabs::Interrupt);
SendJsonMessage(Msg);
@ -261,7 +257,13 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::OnWsConnected()
FString InitJson;
TSharedRef<TJsonWriter<>> InitWriter = TJsonWriterFactory<>::Create(&InitJson);
FJsonSerializer::Serialize(InitMsg.ToSharedRef(), InitWriter);
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("Sending initiation: %s"), *InitJson);
{
const UPS_AI_ConvAgent_Settings_ElevenLabs* S = FPS_AI_ConvAgentModule::Get().GetSettings();
if (S->bVerboseLogging)
{
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Verbose, TEXT("Sending initiation: %s"), *InitJson);
}
}
WebSocket->Send(InitJson);
}
@ -314,8 +316,14 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::OnWsMessage(const FString& Mess
return;
}
// Log every non-ping message type received from the server.
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("Received message type: %s"), *MsgType);
// Message type log gated behind bVerboseLogging to avoid output spam.
{
const UPS_AI_ConvAgent_Settings_ElevenLabs* S = FPS_AI_ConvAgentModule::Get().GetSettings();
if (S->bVerboseLogging)
{
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Verbose, TEXT("Received: %s"), *MsgType);
}
}
if (MsgType == PS_AI_ConvAgent_MessageType_ElevenLabs::ConversationInitiation)
{
@ -323,47 +331,22 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::OnWsMessage(const FString& Mess
}
else if (MsgType == PS_AI_ConvAgent_MessageType_ElevenLabs::AudioResponse)
{
// Log time-to-first-audio: latency between end of user turn and first agent audio.
if (bWaitingForResponse && !bFirstAudioResponseLogged)
{
const double Now = FPlatformTime::Seconds();
const double T = Now - SessionStartTime;
const double LatencyFromTurnEnd = (Now - UserTurnEndTime) * 1000.0;
const double LatencyFromLastChunk = (Now - LastAudioChunkSentTime) * 1000.0;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Warning,
TEXT("[T+%.2fs] [LATENCY] First audio: %.0f ms after turn end (%.0f ms after last chunk)"),
T, LatencyFromTurnEnd, LatencyFromLastChunk);
bFirstAudioResponseLogged = true;
}
HandleAudioResponse(Root);
}
else if (MsgType == PS_AI_ConvAgent_MessageType_ElevenLabs::UserTranscript)
{
// Log transcription latency.
if (bWaitingForResponse)
{
const double Now = FPlatformTime::Seconds();
const double T = Now - SessionStartTime;
const double LatencyFromTurnEnd = (Now - UserTurnEndTime) * 1000.0;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Warning,
TEXT("[T+%.2fs] [LATENCY] User transcript: %.0f ms after turn end"),
T, LatencyFromTurnEnd);
bWaitingForResponse = false;
}
HandleTranscript(Root);
}
else if (MsgType == PS_AI_ConvAgent_MessageType_ElevenLabs::AgentResponse)
{
// Log agent text response latency.
if (UserTurnEndTime > 0.0)
{
const double Now = FPlatformTime::Seconds();
const double T = Now - SessionStartTime;
const double LatencyFromTurnEnd = (Now - UserTurnEndTime) * 1000.0;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Warning,
TEXT("[T+%.2fs] [LATENCY] Agent text response: %.0f ms after turn end"),
T, LatencyFromTurnEnd);
}
HandleAgentResponse(Root);
}
else if (MsgType == PS_AI_ConvAgent_MessageType_ElevenLabs::AgentChatResponsePart)
@ -575,18 +558,9 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::HandleAgentChatResponsePart(con
// of the previous response).
if (LastInterruptEventId > 0)
{
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log,
TEXT("New generation started — resetting LastInterruptEventId (was %d)."),
LastInterruptEventId);
LastInterruptEventId = 0;
}
const double Now = FPlatformTime::Seconds();
const double T = Now - SessionStartTime;
const double LatencyFromTurnEnd = UserTurnEndTime > 0.0 ? (Now - UserTurnEndTime) * 1000.0 : 0.0;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log,
TEXT("[T+%.2fs] Agent started generating (%.0f ms after turn end — includes VAD silence timeout + LLM start)."),
T, LatencyFromTurnEnd);
OnAgentResponseStarted.Broadcast();
}
@ -642,7 +616,6 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::HandleInterruption(const TShare
}
}
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("Agent interrupted (server ack, LastInterruptEventId=%d)."), LastInterruptEventId);
OnInterrupted.Broadcast();
}
@ -681,10 +654,6 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::HandleClientToolCall(const TSha
}
}
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("[T+%.2fs] Client tool call: %s (id=%s, %d params)"),
T, *ToolCall.ToolName, *ToolCall.ToolCallId, ToolCall.Parameters.Num());
OnClientToolCall.Broadcast(ToolCall);
}
@ -698,10 +667,6 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::SendClientToolResult(const FStr
Msg->SetStringField(TEXT("result"), Result);
Msg->SetBoolField(TEXT("is_error"), bIsError);
SendJsonMessage(Msg);
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_WS_ElevenLabs, Log, TEXT("[T+%.2fs] Sent client_tool_result for %s: %s (error=%s)"),
T, *ToolCallId, *Result, bIsError ? TEXT("true") : TEXT("false"));
}
void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::HandlePing(const TSharedPtr<FJsonObject>& Root)