Add ForceDisableConversation, ActionSet data asset, passive gaze, and debug HUD improvements

- ForceDisableConversation/ForceEnableConversation: disable agent conversation
  with blend-out monitoring and OnReadyForAction event (with ActionName param)
- ActionSet data asset: configurable action list per agent with editor
  customization (Update All Agents button, custom detail panel)
- Passive gaze by proximity: nearby non-selected agents track the player
  with configurable head+eyes and body checkboxes (bAutoPassiveGaze,
  bPassiveGazeHeadEyes, bPassiveGazeBody)
- Retained gaze on conversation switch now uses the same passive gaze config
- OnPassiveGazeStarted/OnPassiveGazeStopped events on ElevenLabsComponent
- Fix debug HUD key collisions: per-actor key ranges prevent multi-agent
  HUD flickering, add actor name to all HUD titles
- Fix retained gaze bug: re-activate gaze after ExecuteLeave before
  ApplyConversationGaze kills it
- Safety timeout (5s) for blend-out monitoring
- Guard on AttachGazeTarget when conversation is disabled

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
j.foucher 2026-03-11 09:59:22 +01:00
parent eaa52a5c5f
commit aea02abe89
28 changed files with 1496 additions and 71 deletions

View File

@ -0,0 +1,3 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"

View File

@ -531,15 +531,15 @@ void UPS_AI_ConvAgent_BodyExpressionComponent::DrawDebugHUD() const
? TEXT("---")
: FString::Printf(TEXT("%s (%.1fs ago)"), *LastEventName, EventAge);
// Use key offset to avoid colliding with other debug messages
// Keys 2000-2010 reserved for BodyExpression
const int32 BaseKey = 2000;
// Per-actor key range: stride 150 per actor, offset 0 for BodyExpression
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 0;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FColor WarnColor = FColor::Yellow;
const FString OwnerName = GetOwner()->GetName();
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime, MainColor,
FString::Printf(TEXT("=== BODY EXPR: %s ==="), *StateStr));
FString::Printf(TEXT("=== BODY EXPR [%s]: %s ==="), *OwnerName, *StateStr));
GEngine->AddOnScreenDebugMessage(BaseKey + 1, DisplayTime, MainColor,
FString::Printf(TEXT(" ActivationAlpha: %.3f (target: %s)"),

View File

@ -5,6 +5,7 @@
#include "PS_AI_ConvAgent_MicrophoneCaptureComponent.h"
#include "PS_AI_ConvAgent_GazeComponent.h"
#include "PS_AI_ConvAgent_FacialExpressionComponent.h"
#include "PS_AI_ConvAgent_BodyExpressionComponent.h"
#include "PS_AI_ConvAgent_LipSyncComponent.h"
#include "PS_AI_ConvAgent_InteractionSubsystem.h"
#include "PS_AI_ConvAgent_InteractionComponent.h"
@ -100,6 +101,61 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
// ── ForceDisableConversation blend-out monitoring ─────────────────────
// After ForceDisableConversation(), sub-components are blending their
// CurrentActiveAlpha to 0. Once all are at (near) zero, fire OnReadyForAction
// so the game can start the physical action.
if (bWaitingForBlendOut)
{
BlendOutElapsedTime += DeltaTime;
static constexpr float NeutralThreshold = 0.01f;
bool bAllNeutral = true;
bool bTimedOut = (BlendOutElapsedTime >= BlendOutTimeoutSeconds);
AActor* Owner = GetOwner();
if (Owner)
{
if (auto* Gaze = Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>())
{
if (Gaze->GetActiveAlpha() > NeutralThreshold) bAllNeutral = false;
}
if (auto* LipSync = Owner->FindComponentByClass<UPS_AI_ConvAgent_LipSyncComponent>())
{
if (LipSync->GetActiveAlpha() > NeutralThreshold) bAllNeutral = false;
}
if (auto* FacialExpr = Owner->FindComponentByClass<UPS_AI_ConvAgent_FacialExpressionComponent>())
{
if (FacialExpr->GetActiveAlpha() > NeutralThreshold) bAllNeutral = false;
}
if (auto* BodyExpr = Owner->FindComponentByClass<UPS_AI_ConvAgent_BodyExpressionComponent>())
{
if (BodyExpr->GetActiveAlpha() > NeutralThreshold) bAllNeutral = false;
}
}
if (bAllNeutral)
{
bWaitingForBlendOut = false;
BlendOutElapsedTime = 0.0f;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ForceDisableConversation: all components blended to neutral — firing OnReadyForAction for action '%s'."),
*PendingActionName);
OnReadyForAction.Broadcast(PendingActionName);
}
else if (bTimedOut)
{
// Safety timeout — some component didn't reach alpha 0 in time.
// Fire OnReadyForAction anyway to avoid blocking the game action forever.
bWaitingForBlendOut = false;
BlendOutElapsedTime = 0.0f;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("ForceDisableConversation: blend-out timed out after %.1fs — not all components at neutral. Firing OnReadyForAction for action '%s' anyway."),
BlendOutTimeoutSeconds, *PendingActionName);
OnReadyForAction.Broadcast(PendingActionName);
}
}
// Response timeout: if the server hasn't started generating within ResponseTimeoutSeconds
// after the user stopped speaking, notify Blueprint so it can react (e.g. show "try again").
if (bWaitingForAgentResponse && ResponseTimeoutSeconds > 0.0f && TurnEndTime > 0.0)
@ -344,6 +400,13 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation()
{
if (bConversationDisabledByAction)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("StartConversation: blocked — conversation disabled by ForceDisableConversation(). Call ForceEnableConversation() first."));
return;
}
if (GetOwnerRole() == ROLE_Authority)
{
// Standalone / listen-server: join via the local player controller.
@ -405,6 +468,19 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation_Internal()
WebSocketProxy->bSpeculativeTurn = AgentConfig->bSpeculativeTurn;
}
// Merge dynamic variables: per-agent defaults + global context from Settings.
// These resolve {{variable_name}} placeholders in the agent's system prompt.
WebSocketProxy->DynamicVariables.Empty();
if (AgentConfig)
{
WebSocketProxy->DynamicVariables = AgentConfig->DefaultDynamicVariables;
}
const UPS_AI_ConvAgent_Settings_ElevenLabs* Settings = FPS_AI_ConvAgentModule::Get().GetSettings();
if (Settings && !Settings->GlobalContextPrompt.IsEmpty())
{
WebSocketProxy->DynamicVariables.Add(TEXT("global_context"), Settings->GlobalContextPrompt);
}
// Resolve AgentID by priority: AgentConfig > component string > project default.
FString ResolvedAgentID = AgentID;
if (AgentConfig && !AgentConfig->AgentID.IsEmpty())
@ -701,6 +777,104 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::InterruptAgent()
StopAgentAudio();
}
// ─────────────────────────────────────────────────────────────────────────────
// ForceDisableConversation / ForceEnableConversation
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_ElevenLabsComponent::ForceDisableConversation(const FString& ActionName)
{
if (bConversationDisabledByAction)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("ForceDisableConversation: already disabled (pending action: %s)."), *PendingActionName);
return;
}
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ForceDisableConversation: shutting down conversation for action '%s', blending to neutral."), *ActionName);
PendingActionName = ActionName;
bConversationDisabledByAction = true;
bWaitingForBlendOut = true;
BlendOutElapsedTime = 0.0f;
// 1. End the conversation (closes WebSocket, stops audio, removes pawns).
// In persistent session mode EndConversation keeps the WebSocket open,
// but it still fires OnAgentDisconnected which deactivates sub-components.
EndConversation();
// 2. Also force-close the WebSocket in persistent mode so the agent is truly idle.
// This prevents any stale audio or events from arriving during the action.
if (bPersistentSession && WebSocketProxy)
{
bIntentionalDisconnect = true;
WebSocketProxy->Disconnect();
WebSocketProxy = nullptr;
}
// 3. Reset emotion to neutral so FacialExpression blends back to idle.
if (CurrentEmotion != EPS_AI_ConvAgent_Emotion::Neutral)
{
CurrentEmotion = EPS_AI_ConvAgent_Emotion::Neutral;
CurrentEmotionIntensity = EPS_AI_ConvAgent_EmotionIntensity::Medium;
OnAgentEmotionChanged.Broadcast(CurrentEmotion, CurrentEmotionIntensity);
}
// 4. Explicitly force bActive = false on ALL sub-components.
// The delegate chain (OnAgentDisconnected) may not fire if no conversation
// was active, and InteractionComponent may have independently set
// Gaze->bActive = true. This guarantees the interpolation target is 0.
AActor* Owner = GetOwner();
if (Owner)
{
if (auto* Gaze = Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>())
{
Gaze->bActive = false;
Gaze->TargetActor = nullptr;
Gaze->bEnableBodyTracking = false;
}
if (auto* LipSync = Owner->FindComponentByClass<UPS_AI_ConvAgent_LipSyncComponent>())
{
LipSync->bActive = false;
}
if (auto* FacialExpr = Owner->FindComponentByClass<UPS_AI_ConvAgent_FacialExpressionComponent>())
{
FacialExpr->bActive = false;
}
if (auto* BodyExpr = Owner->FindComponentByClass<UPS_AI_ConvAgent_BodyExpressionComponent>())
{
BodyExpr->bActive = false;
}
}
// 5. Sub-components now have bActive = false. Their CurrentActiveAlpha
// will interpolate to 0 over ActivationBlendDuration in their TickComponent.
// We monitor completion in our own TickComponent (bWaitingForBlendOut)
// with a safety timeout in case any component stalls.
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ForceEnableConversation()
{
if (!bConversationDisabledByAction)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("ForceEnableConversation: not currently disabled."));
return;
}
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("ForceEnableConversation: conversation re-enabled."));
bConversationDisabledByAction = false;
bWaitingForBlendOut = false;
BlendOutElapsedTime = 0.0f;
// The InteractionComponent will naturally re-select this agent
// and auto-start on the next tick (if bAutoStartConversation is true
// and the player is still looking at it).
}
void UPS_AI_ConvAgent_ElevenLabsComponent::FeedExternalAudio(const TArray<float>& FloatPCM)
{
// Same logic as OnMicrophoneDataCaptured but called from an external source
@ -1259,6 +1433,30 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleClientToolCall(const FPS_AI_Con
false);
}
}
else if (ToolCall.ToolName == TEXT("perform_action"))
{
// Built-in handler for the "perform_action" tool: parse action name, auto-respond, broadcast.
const FString* ActionStr = ToolCall.Parameters.Find(TEXT("action"));
FString ActionName = ActionStr ? *ActionStr : TEXT("unknown");
if (bDebug)
{
const double T = FPlatformTime::Seconds() - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[T+%.2fs] Agent action requested: %s"), T, *ActionName);
}
OnAgentActionRequested.Broadcast(ActionName);
// Auto-respond so the agent can continue speaking.
if (WebSocketProxy)
{
WebSocketProxy->SendClientToolResult(
ToolCall.ToolCallId,
FString::Printf(TEXT("action '%s' executed"), *ActionName),
false);
}
}
else
{
// Unknown tool — forward to Blueprint for custom handling.
@ -1896,6 +2094,14 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::ServerJoinConversation_Implementation
APawn* Pawn = RequestingPlayer ? RequestingPlayer->GetPawn() : nullptr;
if (!Pawn) return;
// Block join while force-disabled (ForceDisableConversation active).
if (bConversationDisabledByAction)
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[NET] ServerJoinConversation: blocked — conversation disabled by ForceDisableConversation()."));
return;
}
// Already connected? No-op (idempotent).
if (NetConnectedPawns.Contains(Pawn)) return;
@ -2456,17 +2662,19 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::DrawDebugHUD() const
{
if (!GEngine) return;
const int32 BaseKey = 2040;
// Per-actor key range: stride 150 per actor, offset 60 for ElevenLabs
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 60;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FColor WarnColor = FColor::Yellow;
const FColor GoodColor = FColor::Green;
const FString OwnerName = GetOwner()->GetName();
const bool bConnected = IsConnected();
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime,
bConnected ? GoodColor : FColor::Red,
FString::Printf(TEXT("=== ELEVENLABS: %s ==="),
FString::Printf(TEXT("=== ELEVENLABS [%s]: %s ==="), *OwnerName,
bConnected ? TEXT("CONNECTED") : TEXT("DISCONNECTED")));
// Session info
@ -2530,6 +2738,38 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::DrawDebugHUD() const
FString::Printf(TEXT(" Reconnect: %d/%d attempts%s"),
ReconnectAttemptCount, MaxReconnectAttempts,
bWantsReconnect ? TEXT(" (ACTIVE)") : TEXT("")));
// ForceDisable state
if (bConversationDisabledByAction || bWaitingForBlendOut)
{
const FColor DisableColor = FColor::Orange;
GEngine->AddOnScreenDebugMessage(BaseKey + 9, DisplayTime, DisableColor,
FString::Printf(TEXT(" FORCE DISABLED — action: '%s' blendOut: %s (%.1fs)"),
*PendingActionName,
bWaitingForBlendOut ? TEXT("WAITING") : TEXT("DONE"),
BlendOutElapsedTime));
// Show per-component alpha values while blending
if (bWaitingForBlendOut)
{
AActor* Owner = GetOwner();
if (Owner)
{
float GazeA = 0.f, LipA = 0.f, FaceA = 0.f, BodyA = 0.f;
if (auto* G = Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>())
GazeA = G->GetActiveAlpha();
if (auto* L = Owner->FindComponentByClass<UPS_AI_ConvAgent_LipSyncComponent>())
LipA = L->GetActiveAlpha();
if (auto* F = Owner->FindComponentByClass<UPS_AI_ConvAgent_FacialExpressionComponent>())
FaceA = F->GetActiveAlpha();
if (auto* B = Owner->FindComponentByClass<UPS_AI_ConvAgent_BodyExpressionComponent>())
BodyA = B->GetActiveAlpha();
GEngine->AddOnScreenDebugMessage(BaseKey + 10, DisplayTime, DisableColor,
FString::Printf(TEXT(" Alpha — Gaze:%.2f Lip:%.2f Face:%.2f Body:%.2f"),
GazeA, LipA, FaceA, BodyA));
}
}
}
}
// ─────────────────────────────────────────────────────────────────────────────
@ -2572,8 +2812,8 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::DrawLatencyHUD() const
{
if (!GEngine) return;
// Separate BaseKey range so it never collides with DrawDebugHUD
const int32 BaseKey = 93700;
// Per-actor key range: stride 150 per actor, offset 105 for Latency
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 105;
const float DisplayTime = 1.0f; // long enough to avoid flicker between ticks
const FColor TitleColor = FColor::Cyan;

View File

@ -426,16 +426,18 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::DrawDebugHUD() const
{
if (!GEngine) return;
const int32 BaseKey = 2010;
// Per-actor key range: stride 150 per actor, offset 15 for FacialExpression
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 15;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FColor WarnColor = FColor::Yellow;
const FString OwnerName = GetOwner()->GetName();
// State label
FString StateStr = bActive ? TEXT("ACTIVE") : TEXT("INACTIVE");
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime, MainColor,
FString::Printf(TEXT("=== FACIAL EXPR: %s ==="), *StateStr));
FString::Printf(TEXT("=== FACIAL EXPR [%s]: %s ==="), *OwnerName, *StateStr));
GEngine->AddOnScreenDebugMessage(BaseKey + 1, DisplayTime, MainColor,
FString::Printf(TEXT(" ActivationAlpha: %.3f (target: %s)"),

View File

@ -685,14 +685,16 @@ void UPS_AI_ConvAgent_GazeComponent::DrawDebugHUD() const
{
if (!GEngine) return;
const int32 BaseKey = 2020;
// Per-actor key range: stride 150 per actor, offset 30 for Gaze
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 30;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FString OwnerName = GetOwner()->GetName();
FString StateStr = bActive ? TEXT("ACTIVE") : TEXT("INACTIVE");
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime, MainColor,
FString::Printf(TEXT("=== GAZE: %s ==="), *StateStr));
FString::Printf(TEXT("=== GAZE [%s]: %s ==="), *OwnerName, *StateStr));
FString TargetName = TargetActor ? TargetActor->GetName() : TEXT("(none)");
GEngine->AddOnScreenDebugMessage(BaseKey + 1, DisplayTime, MainColor,

View File

@ -150,6 +150,98 @@ void UPS_AI_ConvAgent_InteractionComponent::TickComponent(float DeltaTime, ELeve
SetSelectedAgent(BestAgent);
}
// ── Deferred leave: wait for old agent to finish speaking ────────────
// After a mid-conversation switch, the old agent keeps speaking and
// looking at the player. Once audio finishes, we leave the conversation
// but retain gaze until the player walks out of range.
if (PendingLeaveAgent.IsValid())
{
UPS_AI_ConvAgent_ElevenLabsComponent* Pending = PendingLeaveAgent.Get();
if (!Pending)
{
// Agent was destroyed — clean up.
PendingLeaveAgent.Reset();
}
else
{
bool bForceLeave = false;
// Check distance — if the player walked away, force-leave immediately.
if (AActor* AgentActor = Pending->GetOwner())
{
FVector ViewLoc, ViewDir;
GetPawnViewPoint(ViewLoc, ViewDir);
const float DistSq = FVector::DistSquared(ViewLoc, AgentActor->GetActorLocation());
bForceLeave = DistSq > FMath::Square(MaxInteractionDistance);
}
if (bForceLeave)
{
// Player out of range — force stop everything.
ExecuteLeave(Pending);
CleanupRetainedGaze(Pending);
PendingLeaveAgent.Reset();
}
else if (!Pending->IsAgentSpeaking())
{
// Agent finished speaking — leave conversation, retain gaze.
ExecuteLeave(Pending);
GazeRetainedAgent = Pending;
PendingLeaveAgent.Reset();
// Re-activate passive gaze using configured checkboxes.
// ExecuteLeave killed the gaze — re-enable passively.
if (bAutoManageGaze)
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(Pending))
{
Gaze->TargetActor = GetOwner();
Gaze->bActive = bPassiveGazeHeadEyes;
Gaze->bEnableBodyTracking = bPassiveGazeBody;
}
}
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log,
TEXT("Deferred leave completed (agent stopped speaking): %s → retaining gaze (head+eyes)"),
Pending->GetOwner() ? *Pending->GetOwner()->GetName() : TEXT("(null)"));
}
}
}
}
// ── Gaze retention: clear gaze when player walks out of range ────────
if (GazeRetainedAgent.IsValid())
{
UPS_AI_ConvAgent_ElevenLabsComponent* Retained = GazeRetainedAgent.Get();
if (!Retained)
{
GazeRetainedAgent.Reset();
}
else if (AActor* AgentActor = Retained->GetOwner())
{
FVector ViewLoc, ViewDir;
GetPawnViewPoint(ViewLoc, ViewDir);
const float DistSq = FVector::DistSquared(ViewLoc, AgentActor->GetActorLocation());
if (DistSq > FMath::Square(MaxInteractionDistance))
{
CleanupRetainedGaze(Retained);
GazeRetainedAgent.Reset();
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log,
TEXT("Retained gaze cleared (player out of range): %s"),
*AgentActor->GetName());
}
}
}
}
// ── Passive gaze: nearby agents track the player ─────────────────────
UpdatePassiveGaze();
// ── On-screen debug HUD ───────────────────────────────────────────────
{
const int32 CVarVal = CVarDebugInteraction.GetValueOnGameThread();
@ -162,6 +254,103 @@ void UPS_AI_ConvAgent_InteractionComponent::TickComponent(float DeltaTime, ELeve
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Passive gaze (proximity awareness)
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_InteractionComponent::UpdatePassiveGaze()
{
// Passive gaze disabled or gaze management off — clean up any active passives.
if (!bAutoPassiveGaze || !bAutoManageGaze)
{
for (auto It = PassiveGazeAgents.CreateIterator(); It; ++It)
{
if (UPS_AI_ConvAgent_ElevenLabsComponent* Agent = It->Get())
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(Agent))
{
if (Gaze->TargetActor == GetOwner())
{
Gaze->TargetActor = nullptr;
}
}
}
}
PassiveGazeAgents.Empty();
return;
}
UWorld* World = GetWorld();
if (!World) return;
UPS_AI_ConvAgent_InteractionSubsystem* Subsystem = World->GetSubsystem<UPS_AI_ConvAgent_InteractionSubsystem>();
if (!Subsystem) return;
TArray<UPS_AI_ConvAgent_ElevenLabsComponent*> Agents = Subsystem->GetRegisteredAgents();
FVector ViewLocation, ViewDirection;
GetPawnViewPoint(ViewLocation, ViewDirection);
const float MaxDistSq = MaxInteractionDistance * MaxInteractionDistance;
// Build the desired set of passive agents.
TSet<TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent>> DesiredPassive;
for (UPS_AI_ConvAgent_ElevenLabsComponent* Agent : Agents)
{
// Skip agents already managed by other systems.
if (Agent == SelectedAgent.Get()) continue;
if (Agent == GazeRetainedAgent.Get()) continue;
if (Agent == PendingLeaveAgent.Get()) continue;
if (Agent->IsConversationDisabled()) continue;
AActor* AgentActor = Agent->GetOwner();
if (!AgentActor) continue;
const float DistSq = FVector::DistSquared(ViewLocation, AgentActor->GetActorLocation());
if (DistSq > MaxDistSq) continue;
DesiredPassive.Add(Agent);
}
// Activate gaze on newly passive agents.
for (const TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent>& Weak : DesiredPassive)
{
if (!PassiveGazeAgents.Contains(Weak))
{
if (UPS_AI_ConvAgent_ElevenLabsComponent* Agent = Weak.Get())
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(Agent))
{
Gaze->TargetActor = GetOwner();
Gaze->bActive = bPassiveGazeHeadEyes;
Gaze->bEnableBodyTracking = bPassiveGazeBody;
}
Agent->OnPassiveGazeStarted.Broadcast();
}
}
}
// Deactivate gaze on agents that are no longer passive.
for (auto It = PassiveGazeAgents.CreateIterator(); It; ++It)
{
if (!DesiredPassive.Contains(*It))
{
if (UPS_AI_ConvAgent_ElevenLabsComponent* Agent = It->Get())
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(Agent))
{
if (Gaze->TargetActor == GetOwner())
{
Gaze->TargetActor = nullptr;
}
}
Agent->OnPassiveGazeStopped.Broadcast();
}
}
}
PassiveGazeAgents = MoveTemp(DesiredPassive);
}
// ─────────────────────────────────────────────────────────────────────────────
// Selection evaluation
// ─────────────────────────────────────────────────────────────────────────────
@ -229,9 +418,27 @@ UPS_AI_ConvAgent_ElevenLabsComponent* UPS_AI_ConvAgent_InteractionComponent::Eva
float AngleDeg = 0.0f;
if (bRequireLookAt)
{
const FVector DirToAgent = ToAgent.GetSafeNormal();
const float Dot = FVector::DotProduct(ViewDirection, DirToAgent);
AngleDeg = FMath::RadiansToDegrees(FMath::Acos(FMath::Clamp(Dot, -1.0f, 1.0f)));
// Project both vectors onto the horizontal (XY) plane so that
// vertical height difference between the player and the agent
// does not inflate the selection angle.
FVector ViewDirXY = ViewDirection;
ViewDirXY.Z = 0.0f;
ViewDirXY = ViewDirXY.GetSafeNormal();
FVector DirToAgentXY = ToAgent;
DirToAgentXY.Z = 0.0f;
DirToAgentXY = DirToAgentXY.GetSafeNormal();
if (ViewDirXY.IsNearlyZero() || DirToAgentXY.IsNearlyZero())
{
// Degenerate case: looking straight up/down or agent exactly above/below.
AngleDeg = 180.0f;
}
else
{
const float Dot = FVector::DotProduct(ViewDirXY, DirToAgentXY);
AngleDeg = FMath::RadiansToDegrees(FMath::Acos(FMath::Clamp(Dot, -1.0f, 1.0f)));
}
// Use wider sticky angle for the currently selected agent.
const float ConeThreshold = (Agent == CurrentAgent) ? SelectionStickyAngle : ViewConeHalfAngle;
@ -327,62 +534,91 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
// ── Conversation: leave shared conversation if auto-started ─────
// Use Leave instead of End so other players can keep talking to the agent.
// When switching agents mid-conversation, we defer the Leave and retain
// the gaze so the old agent can finish speaking and keep looking at the
// player until they walk out of interaction range.
if (bAutoStartConversation && (OldAgent->IsConnected() || OldAgent->bNetIsConversing))
{
if (GetOwnerRole() == ROLE_Authority || (GetWorld() && GetWorld()->GetNetMode() == NM_Standalone))
// If a previous pending leave exists, force-complete it now.
if (UPS_AI_ConvAgent_ElevenLabsComponent* PrevPending = PendingLeaveAgent.Get())
{
APlayerController* PC = nullptr;
if (APawn* Pawn = Cast<APawn>(GetOwner()))
ExecuteLeave(PrevPending);
CleanupRetainedGaze(PrevPending);
PendingLeaveAgent.Reset();
}
// Similarly, clean up any existing retained gaze.
if (UPS_AI_ConvAgent_ElevenLabsComponent* PrevRetained = GazeRetainedAgent.Get())
{
CleanupRetainedGaze(PrevRetained);
GazeRetainedAgent.Reset();
}
if (OldAgent->IsAgentSpeaking())
{
// Agent is still speaking — defer the Leave.
// Gaze and body tracking stay active so the agent keeps
// looking at the player while finishing its sentence.
PendingLeaveAgent = OldAgent;
if (bDebug)
{
PC = Cast<APlayerController>(Pawn->GetController());
}
if (PC)
{
OldAgent->ServerLeaveConversation_Implementation(PC);
UE_LOG(LogPS_AI_ConvAgent_Select, Log,
TEXT(" Deferred leave (agent still speaking): %s"),
OldAgent->GetOwner() ? *OldAgent->GetOwner()->GetName() : TEXT("(null)"));
}
}
else
{
ServerRelayLeaveConversation(OldAgent->GetOwner());
// Agent is silent — leave immediately, but retain gaze
// until the player walks out of interaction range.
ExecuteLeave(OldAgent);
GazeRetainedAgent = OldAgent;
// Re-activate passive gaze using configured checkboxes.
// ExecuteLeave → ServerLeaveConversation → ApplyConversationGaze()
// already killed bActive and cleared TargetActor. Override that:
// the agent should keep looking at the player passively.
if (bAutoManageGaze)
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(OldAgent))
{
Gaze->TargetActor = GetOwner();
Gaze->bActive = bPassiveGazeHeadEyes;
Gaze->bEnableBodyTracking = bPassiveGazeBody;
}
}
}
}
else if (bAutoManageListening)
{
OldAgent->StopListening();
}
// Disable body tracking on deselection — but only if we were the
// one who set the TargetActor. The conversation system (OnRep or
// server ApplyConversationGaze) may have set TargetActor to a
// different player; don't overwrite that.
if (bAutoManageGaze)
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(OldAgent))
// No conversation was active — still detach gaze normally.
if (bAutoManageGaze)
{
if (Gaze->TargetActor == GetOwner())
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(OldAgent))
{
Gaze->bEnableBodyTracking = false;
if (Gaze->TargetActor == GetOwner())
{
Gaze->bEnableBodyTracking = false;
}
}
}
}
// ── Gaze: detach ────────────────────────────────────────────────
if (bAutoManageGaze && World)
{
// Cancel any pending gaze attach — agent left before attach fired.
World->GetTimerManager().ClearTimer(GazeAttachTimerHandle);
if (GazeDetachDelay > 0.0f)
if (bAutoManageGaze && World)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakOld = OldAgent;
World->GetTimerManager().SetTimer(GazeDetachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::DetachGazeTarget, WeakOld),
GazeDetachDelay, false);
}
else
{
DetachGazeTarget(OldAgent);
World->GetTimerManager().ClearTimer(GazeAttachTimerHandle);
if (GazeDetachDelay > 0.0f)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakOld = OldAgent;
World->GetTimerManager().SetTimer(GazeDetachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::DetachGazeTarget, WeakOld),
GazeDetachDelay, false);
}
else
{
DetachGazeTarget(OldAgent);
}
}
}
@ -392,6 +628,26 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
// Select new agent.
if (NewAgent)
{
// If the player re-selects an agent that was pending leave or had retained
// gaze, cancel the deferred state — the player is coming back to it.
if (PendingLeaveAgent.Get() == NewAgent)
{
PendingLeaveAgent.Reset();
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log,
TEXT(" Cancelled pending leave (player re-selected): %s"),
NewAgent->GetOwner() ? *NewAgent->GetOwner()->GetName() : TEXT("(null)"));
}
}
if (GazeRetainedAgent.Get() == NewAgent)
{
GazeRetainedAgent.Reset();
}
// Remove from passive gaze set — now actively managed.
PassiveGazeAgents.Remove(NewAgent);
SelectedAgent = NewAgent;
if (bDebug)
@ -635,6 +891,12 @@ void UPS_AI_ConvAgent_InteractionComponent::AttachGazeTarget(
UPS_AI_ConvAgent_ElevenLabsComponent* AgentPtr = Agent.Get();
if (!AgentPtr) return;
// Don't re-activate gaze while conversation is force-disabled.
// ForceDisableConversation() explicitly sets bActive = false on sub-components
// and monitors blend-out. Re-activating gaze here would prevent OnReadyForAction
// from ever firing.
if (AgentPtr->IsConversationDisabled()) return;
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(AgentPtr))
{
Gaze->TargetActor = GetOwner();
@ -688,6 +950,81 @@ void UPS_AI_ConvAgent_InteractionComponent::DetachGazeTarget(
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Deferred leave & gaze retention
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_InteractionComponent::ExecuteLeave(UPS_AI_ConvAgent_ElevenLabsComponent* Agent)
{
if (!Agent) return;
if (GetOwnerRole() == ROLE_Authority || (GetWorld() && GetWorld()->GetNetMode() == NM_Standalone))
{
APlayerController* PC = nullptr;
if (APawn* Pawn = Cast<APawn>(GetOwner()))
{
PC = Cast<APlayerController>(Pawn->GetController());
}
if (PC)
{
Agent->ServerLeaveConversation_Implementation(PC);
}
}
else
{
ServerRelayLeaveConversation(Agent->GetOwner());
}
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("ExecuteLeave: %s"),
Agent->GetOwner() ? *Agent->GetOwner()->GetName() : TEXT("(null)"));
}
}
void UPS_AI_ConvAgent_InteractionComponent::CleanupRetainedGaze(UPS_AI_ConvAgent_ElevenLabsComponent* Agent)
{
if (!Agent) return;
// Disable body tracking.
if (bAutoManageGaze)
{
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(Agent))
{
if (Gaze->TargetActor == GetOwner())
{
Gaze->bEnableBodyTracking = false;
}
}
}
// Detach gaze target (with or without delay).
if (bAutoManageGaze)
{
if (UWorld* World = GetWorld())
{
if (GazeDetachDelay > 0.0f)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakAgent = Agent;
World->GetTimerManager().SetTimer(GazeDetachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::DetachGazeTarget, WeakAgent),
GazeDetachDelay, false);
}
else
{
DetachGazeTarget(Agent);
}
}
}
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("CleanupRetainedGaze: %s"),
Agent->GetOwner() ? *Agent->GetOwner()->GetName() : TEXT("(null)"));
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Mic routing
// ─────────────────────────────────────────────────────────────────────────────
@ -707,12 +1044,14 @@ void UPS_AI_ConvAgent_InteractionComponent::DrawDebugHUD() const
{
if (!GEngine) return;
const int32 BaseKey = 2060;
// Per-actor key range: stride 150 per actor, offset 90 for Interaction
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 90;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FString OwnerName = GetOwner()->GetName();
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime, MainColor,
TEXT("=== INTERACTION ==="));
FString::Printf(TEXT("=== INTERACTION [%s] ==="), *OwnerName));
UPS_AI_ConvAgent_ElevenLabsComponent* Agent = SelectedAgent.Get();
if (Agent)
@ -732,9 +1071,21 @@ void UPS_AI_ConvAgent_InteractionComponent::DrawDebugHUD() const
+ FVector(0.0f, 0.0f, AgentEyeLevelOffset);
FVector ToAgent = AgentLoc - ViewLoc;
Dist = ToAgent.Size();
FVector DirToAgent = ToAgent.GetSafeNormal();
Angle = FMath::RadiansToDegrees(
FMath::Acos(FMath::Clamp(FVector::DotProduct(ViewDir, DirToAgent), -1.0f, 1.0f)));
// Use horizontal-only (XY) angle — consistent with EvaluateBestAgent().
FVector ViewDirXY = ViewDir;
ViewDirXY.Z = 0.0f;
ViewDirXY = ViewDirXY.GetSafeNormal();
FVector DirToAgentXY = ToAgent;
DirToAgentXY.Z = 0.0f;
DirToAgentXY = DirToAgentXY.GetSafeNormal();
if (!ViewDirXY.IsNearlyZero() && !DirToAgentXY.IsNearlyZero())
{
Angle = FMath::RadiansToDegrees(
FMath::Acos(FMath::Clamp(FVector::DotProduct(ViewDirXY, DirToAgentXY), -1.0f, 1.0f)));
}
}
GEngine->AddOnScreenDebugMessage(BaseKey + 1, DisplayTime, MainColor,

View File

@ -2610,15 +2610,17 @@ void UPS_AI_ConvAgent_LipSyncComponent::DrawDebugHUD() const
{
if (!GEngine) return;
const int32 BaseKey = 2030;
// Per-actor key range: stride 150 per actor, offset 45 for LipSync
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 45;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FColor WarnColor = FColor::Yellow;
const FString OwnerName = GetOwner()->GetName();
FString StateStr = bActive ? TEXT("ACTIVE") : TEXT("INACTIVE");
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime, MainColor,
FString::Printf(TEXT("=== LIP SYNC: %s ==="), *StateStr));
FString::Printf(TEXT("=== LIP SYNC [%s]: %s ==="), *OwnerName, *StateStr));
GEngine->AddOnScreenDebugMessage(BaseKey + 1, DisplayTime,
bIsSpeaking ? FColor::Green : MainColor,

View File

@ -58,16 +58,18 @@ void UPS_AI_ConvAgent_MicrophoneCaptureComponent::DrawDebugHUD() const
{
if (!GEngine) return;
const int32 BaseKey = 2050;
// Per-actor key range: stride 150 per actor, offset 75 for MicrophoneCapture
const int32 BaseKey = 10000 + (GetOwner()->GetUniqueID() % 500) * 150 + 75;
const float DisplayTime = 1.0f;
const FColor MainColor = FColor::Cyan;
const FString OwnerName = GetOwner()->GetName();
const bool bCapt = bCapturing.load();
const bool bEchoSuppressed = EchoSuppressFlag && EchoSuppressFlag->load(std::memory_order_relaxed);
GEngine->AddOnScreenDebugMessage(BaseKey, DisplayTime,
bCapt ? FColor::Green : FColor::Red,
FString::Printf(TEXT("=== MIC: %s ==="),
FString::Printf(TEXT("=== MIC [%s]: %s ==="), *OwnerName,
bCapt ? TEXT("CAPTURING") : TEXT("STOPPED")));
GEngine->AddOnScreenDebugMessage(BaseKey + 1, DisplayTime, MainColor,

View File

@ -267,6 +267,18 @@ void UPS_AI_ConvAgent_WebSocket_ElevenLabsProxy::OnWsConnected()
InitMsg->SetStringField(TEXT("type"), PS_AI_ConvAgent_MessageType_ElevenLabs::ConversationClientData);
InitMsg->SetObjectField(TEXT("conversation_config_override"), ConversationConfigOverride);
// Send dynamic variables (global context + per-agent variables).
// These resolve {{variable_name}} placeholders in the agent's system prompt.
if (DynamicVariables.Num() > 0)
{
TSharedPtr<FJsonObject> DynVarsObj = MakeShareable(new FJsonObject());
for (const auto& Pair : DynamicVariables)
{
DynVarsObj->SetStringField(Pair.Key, Pair.Value);
}
InitMsg->SetObjectField(TEXT("dynamic_variables"), DynVarsObj);
}
// NOTE: We bypass SendJsonMessage() here intentionally.
// SendJsonMessage() guards on WebSocket->IsConnected(), but OnWsConnected fires
// during the handshake before IsConnected() returns true in some UE WS backends.

View File

@ -59,6 +59,17 @@ public:
UPROPERTY(Config, EditAnywhere, AdvancedDisplay, Category = "PS AI ConvAgent|ElevenLabs API")
bool bVerboseLogging = false;
/**
* Global context prompt shared by ALL agents.
* Use this for world-building, era, common rules that every character should know.
* Injected at conversation start via dynamic variables changes take effect
* on the next conversation without re-syncing agents.
*/
UPROPERTY(Config, EditAnywhere, Category = "PS AI ConvAgent|Global Prompt",
meta = (MultiLine = "true",
ToolTip = "Context prompt shared by all agents.\nDescribe the world, era, common rules.\nChanges take effect on next conversation start (no re-sync needed)."))
FString GlobalContextPrompt;
/** Return the API base URL (https) for the selected region. */
FString GetAPIBaseURL() const
{

View File

@ -0,0 +1,53 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Engine/DataAsset.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.generated.h"
/**
* Reusable set of physical actions that an agent can trigger during conversation.
*
* Create this as a Data Asset in the Content Browser
* (right-click > Miscellaneous > Data Asset > PS AI ConvAgent Action Set),
* define your actions once, then drag it into any Agent Config that should
* use these actions.
*
* The same ActionSet can be shared by multiple agents modify it once
* and re-sync all agents to update them.
*/
UCLASS(BlueprintType, Blueprintable,
DisplayName = "PS AI ConvAgent Action Set (ElevenLabs)")
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_ActionSet_ElevenLabs : public UPrimaryDataAsset
{
GENERATED_BODY()
public:
/** List of actions available to agents using this set.
* Each action has a Name (identifier sent by the LLM) and a Description
* (guidance for when the LLM should use it). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Actions",
meta = (TitleProperty = "Name",
ToolTip = "Available actions.\nName = identifier sent by LLM (snake_case).\nDescription = guidance for the LLM."))
TArray<FPS_AI_ConvAgent_AgentAction_ElevenLabs> Actions;
/** System prompt fragment appended to CharacterPrompt when this action set is active.
* Describes when and how the agent should use the perform_action tool.
* Customize this for your scenario. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Actions",
meta = (MultiLine = "true",
ToolTip = "Prompt instructions for the action tool.\nAppended to CharacterPrompt when creating/updating the agent."))
FString ActionToolPromptFragment = TEXT(
"## Physical Actions\n"
"You have a perform_action tool to trigger physical actions.\n"
"Use it when the conversation naturally leads to a physical reaction.\n"
"Only call it when the action is clearly motivated by the situation.");
// UPrimaryDataAsset interface
virtual FPrimaryAssetId GetPrimaryAssetId() const override
{
return FPrimaryAssetId(TEXT("ActionSet_ElevenLabs"), GetFName());
}
};

View File

@ -5,6 +5,7 @@
#include "CoreMinimal.h"
#include "Engine/DataAsset.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.generated.h"
/**
@ -236,6 +237,24 @@ public:
"- \"high\" for strong reactions (big laugh, deep sadness, shock)\n\n"
"Always return to neutral when the emotional moment passes.");
// ── Action Tool ─────────────────────────────────────────────────────────
/** Include a configurable "perform_action" client tool in the agent configuration.
* Allows the LLM to trigger physical actions defined in the referenced ActionSet.
* Actions are handled via the OnAgentActionRequested event in Blueprint. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Action Tool",
meta = (ToolTip = "Include the perform_action client tool.\nRequires an ActionSet Data Asset with at least one action."))
bool bIncludeActionTool = false;
/** Reference to a reusable Action Set Data Asset.
* Create one in Content Browser (Miscellaneous > Data Asset > PS AI ConvAgent Action Set),
* define your actions there, then drag it here.
* The same ActionSet can be shared by multiple agents. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Action Tool",
meta = (EditCondition = "bIncludeActionTool",
ToolTip = "Drag an ActionSet Data Asset here.\nDefines which actions the agent can trigger."))
TObjectPtr<UPS_AI_ConvAgent_ActionSet_ElevenLabs> ActionSet;
// ── Dynamic Variables ────────────────────────────────────────────────────
/** Key-value pairs sent as dynamic_variables at conversation start.

View File

@ -135,6 +135,10 @@ public:
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|BodyExpression|Events")
FOnBodyExpressionChanged OnBodyExpressionChanged;
/** Current activation blend alpha (0 = fully inactive, 1 = fully active).
* Used by ForceDisableConversation to monitor blend-out completion. */
float GetActiveAlpha() const { return CurrentActiveAlpha; }
// ── UActorComponent overrides ─────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;

View File

@ -166,3 +166,21 @@ struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_ClientToolCall_ElevenLabs
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|ElevenLabs")
TMap<FString, FString> Parameters;
};
// ─────────────────────────────────────────────────────────────────────────────
// Agent action definition (used by ActionSet Data Asset)
// ─────────────────────────────────────────────────────────────────────────────
/** Defines a single action that an agent can perform during conversation. */
USTRUCT(BlueprintType)
struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_AgentAction_ElevenLabs
{
GENERATED_BODY()
/** Action identifier sent by the LLM (e.g. "flee", "draw_weapon"). Keep short, snake_case. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs")
FString Name;
/** Human-readable description — helps the LLM understand when to use this action. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs")
FString Description;
};

View File

@ -84,13 +84,31 @@ DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FOnAgentEmotionChanged,
EPS_AI_ConvAgent_EmotionIntensity, Intensity);
/**
* Fired for any client tool call that is NOT automatically handled (i.e. not "set_emotion").
* Fired when the agent triggers a "perform_action" client tool call.
* ActionName is the identifier from the ActionSet (e.g. "flee", "draw_weapon").
* Handle this in Blueprint to play animations, trigger AI behavior, etc.
* The tool call is auto-acknowledged no need to call SendClientToolResult.
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FOnAgentActionRequested,
const FString&, ActionName);
/**
* Fired for any client tool call that is NOT automatically handled (i.e. not "set_emotion", not "perform_action").
* Use this to implement custom client tools in Blueprint.
* You MUST call SendClientToolResult on the WebSocketProxy to acknowledge the call.
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FOnAgentClientToolCall,
const FPS_AI_ConvAgent_ClientToolCall_ElevenLabs&, ToolCall);
/**
* Fired when ForceDisableConversation() has finished blending all visual
* components (gaze, lip sync, facial expression, body expression) back to
* their neutral state. At this point the NPC is fully idle and the game
* can safely start the physical action (flee, draw weapon, etc.).
*/
DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FOnReadyForAction,
const FString&, ActionName);
/**
* Fired when the active speaker changes in a multi-player shared conversation.
* Use this for UI indicators showing who is talking, or to drive camera focus.
@ -98,6 +116,12 @@ DECLARE_DYNAMIC_MULTICAST_DELEGATE_OneParam(FOnAgentClientToolCall,
DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FOnActiveSpeakerChanged,
APawn*, NewSpeaker, APawn*, PreviousSpeaker);
/** Fired when this agent starts passive gaze (proximity awareness — not selected). */
DECLARE_DYNAMIC_MULTICAST_DELEGATE(FOnAgentPassiveGazeStarted);
/** Fired when this agent stops passive gaze (left range or became selected). */
DECLARE_DYNAMIC_MULTICAST_DELEGATE(FOnAgentPassiveGazeStopped);
// Non-dynamic delegate for raw agent audio (high-frequency, C++ consumers only).
// Delivers PCM chunks as int16, 16kHz mono, little-endian.
DECLARE_MULTICAST_DELEGATE_OneParam(FOnAgentAudioData, const TArray<uint8>& /*PCMData*/);
@ -315,9 +339,16 @@ public:
meta = (ToolTip = "Fires when the agent sets an emotion (joy, sadness, surprise, fear, anger, disgust).\nDriven by the 'set_emotion' client tool. Arrives before the audio."))
FOnAgentEmotionChanged OnAgentEmotionChanged;
/** Fired for client tool calls that are NOT automatically handled (i.e. not "set_emotion"). You must call GetWebSocketProxy()->SendClientToolResult() to respond. */
/** Fired when the LLM triggers a perform_action tool call.
* ActionName is the identifier from the ActionSet (e.g. "flee", "draw_weapon").
* Handle this in Blueprint to play animations, trigger AI behavior, etc. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|ElevenLabs|Events",
meta = (ToolTip = "Fires for custom client tool calls (not set_emotion).\nYou must respond via GetWebSocketProxy()->SendClientToolResult()."))
meta = (ToolTip = "Fires when the agent triggers a physical action.\nActionName matches the Name field from the ActionSet Data Asset."))
FOnAgentActionRequested OnAgentActionRequested;
/** Fired for client tool calls that are NOT automatically handled (i.e. not "set_emotion", not "perform_action"). You must call GetWebSocketProxy()->SendClientToolResult() to respond. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|ElevenLabs|Events",
meta = (ToolTip = "Fires for custom client tool calls (not set_emotion, not perform_action).\nYou must respond via GetWebSocketProxy()->SendClientToolResult()."))
FOnAgentClientToolCall OnAgentClientToolCall;
/** Fired when the active speaker changes in a multi-player shared conversation.
@ -326,6 +357,24 @@ public:
meta = (ToolTip = "Fires when the speaking player changes.\nNewSpeaker is null when no one is speaking."))
FOnActiveSpeakerChanged OnActiveSpeakerChanged;
/** Fired when ForceDisableConversation() has finished blending all visual
* components back to neutral. The NPC is fully idle and the game can start
* the physical action (flee, draw weapon, etc.).
* Call ForceEnableConversation() later to allow conversation again. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|ElevenLabs|Events",
meta = (ToolTip = "Fires when all animation components have returned to neutral after ForceDisableConversation().\nThe agent is ready for a game action."))
FOnReadyForAction OnReadyForAction;
/** Fired when this agent starts passively tracking a nearby player (head/eyes gaze). */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|ElevenLabs|Events",
meta = (ToolTip = "Fires when this agent starts passively tracking a nearby player.\nTriggered by proximity awareness — the agent is not selected for conversation."))
FOnAgentPassiveGazeStarted OnPassiveGazeStarted;
/** Fired when this agent stops passively tracking a player (left range or selected). */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|ElevenLabs|Events",
meta = (ToolTip = "Fires when this agent stops passively tracking a player.\nThe player left range or selected this agent for active conversation."))
FOnAgentPassiveGazeStopped OnPassiveGazeStopped;
/** The current emotion of the agent, as set by the "set_emotion" client tool. Defaults to Neutral. */
UPROPERTY(ReplicatedUsing = OnRep_Emotion, BlueprintReadOnly, Category = "PS AI ConvAgent|ElevenLabs")
EPS_AI_ConvAgent_Emotion CurrentEmotion = EPS_AI_ConvAgent_Emotion::Neutral;
@ -498,6 +547,35 @@ public:
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|ElevenLabs")
void InterruptAgent();
/**
* Cleanly disable conversation on this agent so the NPC can switch to game AI.
* Ends the WebSocket connection, stops all audio, and begins blending all
* visual components (gaze, lip sync, facial expression, body expression)
* back to their neutral state. While disabled, the InteractionComponent
* cannot auto-restart a conversation with this agent.
*
* When all components have blended to neutral, fires OnReadyForAction.
* Call ForceEnableConversation() later to allow conversation again.
*
* Typical usage from Blueprint (OnAgentActionRequested):
* 1. ForceDisableConversation("flee")
* 2. Wait for OnReadyForAction receives the same ActionName
* 3. Play action montage based on ActionName
* 4. When done, call ForceEnableConversation()
*
* @param ActionName The action to perform once neutral (passed through to OnReadyForAction).
*/
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|ElevenLabs")
void ForceDisableConversation(const FString& ActionName);
/**
* Re-enable conversation after a ForceDisableConversation() call.
* Clears the disabled flag so the InteractionComponent can auto-start
* a new conversation on the next tick (if the player is still looking).
*/
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|ElevenLabs")
void ForceEnableConversation();
/**
* Feed microphone audio from an external source (e.g. InteractionComponent on the pawn).
* When an InteractionComponent exists on the player pawn, or in a network scenario,
@ -526,6 +604,11 @@ public:
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|ElevenLabs")
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& GetConversationInfo() const;
/** True while conversation is force-disabled (ForceDisableConversation was called).
* While disabled, StartConversation and ServerJoinConversation are blocked. */
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|ElevenLabs")
bool IsConversationDisabled() const { return bConversationDisabledByAction; }
/** True while audio is being pre-buffered (playback hasn't started yet).
* Used by the LipSync component to pause viseme queue consumption. */
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|ElevenLabs")
@ -618,6 +701,20 @@ private:
UPROPERTY()
USoundWaveProcedural* ProceduralSoundWave = nullptr;
// ── ForceDisableConversation state ───────────────────────────────────────
// Set by ForceDisableConversation(), cleared by ForceEnableConversation().
// While true, StartConversation() and ServerJoinConversation() are blocked.
bool bConversationDisabledByAction = false;
// True while waiting for all visual components to blend back to neutral.
// Monitored in TickComponent — fires OnReadyForAction when complete.
bool bWaitingForBlendOut = false;
// The action name passed to ForceDisableConversation(), forwarded to OnReadyForAction.
FString PendingActionName;
// Accumulated time waiting for blend-out. Safety timeout fires OnReadyForAction
// if components haven't reached neutral within this limit.
float BlendOutElapsedTime = 0.0f;
static constexpr float BlendOutTimeoutSeconds = 5.0f;
// ── State ─────────────────────────────────────────────────────────────────
// Atomic: read from WASAPI background thread (OnMicrophoneDataCaptured), written from game thread.
std::atomic<bool> bIsListening{false};

View File

@ -114,6 +114,10 @@ public:
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|FacialExpression|Events")
FOnExpressionChanged OnExpressionChanged;
/** Current activation blend alpha (0 = fully inactive, 1 = fully active).
* Used by ForceDisableConversation to monitor blend-out completion. */
float GetActiveAlpha() const { return CurrentActiveAlpha; }
// ── UActorComponent overrides ─────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;

View File

@ -315,6 +315,10 @@ public:
* fresh instead of chasing a stale yaw from the previous interaction. */
void ResetBodyTarget();
/** Current activation blend alpha (0 = fully inactive, 1 = fully active).
* Used by ForceDisableConversation to monitor blend-out completion. */
float GetActiveAlpha() const { return CurrentActiveAlpha; }
// ── UActorComponent overrides ────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;

View File

@ -115,6 +115,28 @@ public:
ToolTip = "Seconds to wait before the agent stops looking at the pawn.\n0 = immediate."))
float GazeDetachDelay = 0.0f;
/** Agents within range but NOT selected will passively track the player
* with head+eyes (and optionally body). This creates natural "awareness"
* without requiring an active conversation.
* Also controls the passive gaze retained after switching conversations. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Gaze",
meta = (EditCondition = "bAutoManageGaze",
ToolTip = "Agents in range but not selected will passively track the player.\nAlso applies to retained gaze after switching conversations."))
bool bAutoPassiveGaze = true;
/** In passive gaze mode, the agent's head and eyes follow the player. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Gaze",
meta = (EditCondition = "bAutoManageGaze && bAutoPassiveGaze",
ToolTip = "Enable head+eyes tracking in passive gaze mode."))
bool bPassiveGazeHeadEyes = true;
/** In passive gaze mode, the agent's body also turns toward the player.
* When false, only head and eyes track the body stays in its default animation. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Gaze",
meta = (EditCondition = "bAutoManageGaze && bAutoPassiveGaze",
ToolTip = "Enable body rotation in passive gaze mode.\nWhen false, only head and eyes track."))
bool bPassiveGazeBody = false;
// ── Conversation management ──────────────────────────────────────────────
/** How long (seconds) the player must look at a different agent before switching
@ -310,4 +332,32 @@ private:
// while in an active conversation. Switch only happens after the delay.
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> PendingSwitchAgent;
double PendingSwitchStartTime = 0.0;
// ── Deferred leave & gaze retention ──────────────────────────────────
// When switching agents mid-conversation:
// 1. If the old agent is still speaking, the Leave is deferred until
// audio finishes → PendingLeaveAgent.
// 2. Once the Leave executes (or if the old agent was already silent),
// the gaze stays on the player until they walk out of range →
// GazeRetainedAgent.
/** Agent whose Leave is deferred because it is still speaking. */
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> PendingLeaveAgent;
/** Agent that already left the conversation but retains gaze
* on the player until MaxInteractionDistance is exceeded. */
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> GazeRetainedAgent;
/** Execute the actual Leave + stop listening on an agent. Does NOT touch gaze. */
void ExecuteLeave(UPS_AI_ConvAgent_ElevenLabsComponent* Agent);
/** Detach gaze from a retained agent and clean up. */
void CleanupRetainedGaze(UPS_AI_ConvAgent_ElevenLabsComponent* Agent);
// ── Passive gaze (proximity awareness) ──────────────────────────────
/** Agents currently tracked passively (in range, not selected). */
TSet<TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent>> PassiveGazeAgents;
/** Update passive gaze on all nearby agents. Called each tick. */
void UpdatePassiveGaze();
};

View File

@ -188,6 +188,10 @@ public:
* Smooth transition between silence and speech states. */
float GetSpeechBlendAlpha() const { return SpeechBlendAlpha; }
/** Current activation blend alpha (0 = fully inactive, 1 = fully active).
* Used by ForceDisableConversation to monitor blend-out completion. */
float GetActiveAlpha() const { return CurrentActiveAlpha; }
// ── UActorComponent overrides ─────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;

View File

@ -288,4 +288,9 @@ public:
// Start generating before confirming end-of-speech (reduces latency, may cause false starts).
bool bSpeculativeTurn = false;
// Dynamic variables sent in conversation_initiation_client_data.
// Populated by UPS_AI_ConvAgent_ElevenLabsComponent before calling Connect().
// Includes global_context (from Settings) + per-agent DefaultDynamicVariables.
TMap<FString, FString> DynamicVariables;
};

View File

@ -33,6 +33,8 @@ public class PS_AI_ConvAgentEditor : ModuleRules
"HTTP",
"Json",
"JsonUtilities",
// Asset Registry for scanning AgentConfig assets (batch update)
"AssetRegistry",
});
}
}

View File

@ -4,6 +4,8 @@
#include "PropertyEditorModule.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSetCustomization_ElevenLabs.h"
/**
* Editor module for PS_AI_ConvAgent plugin.
@ -21,6 +23,11 @@ public:
UPS_AI_ConvAgent_AgentConfig_ElevenLabs::StaticClass()->GetFName(),
FOnGetDetailCustomizationInstance::CreateStatic(
&FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::MakeInstance));
PropertyModule.RegisterCustomClassLayout(
UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass()->GetFName(),
FOnGetDetailCustomizationInstance::CreateStatic(
&FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::MakeInstance));
}
virtual void ShutdownModule() override
@ -32,6 +39,9 @@ public:
PropertyModule.UnregisterCustomClassLayout(
UPS_AI_ConvAgent_AgentConfig_ElevenLabs::StaticClass()->GetFName());
PropertyModule.UnregisterCustomClassLayout(
UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass()->GetFName());
}
}
};

View File

@ -0,0 +1,293 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_ActionSetCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent.h"
#include "DetailLayoutBuilder.h"
#include "DetailCategoryBuilder.h"
#include "DetailWidgetRow.h"
#include "Widgets/Input/SButton.h"
#include "Widgets/Text/STextBlock.h"
#include "AssetRegistry/AssetRegistryModule.h"
#include "HttpModule.h"
#include "Interfaces/IHttpRequest.h"
#include "Interfaces/IHttpResponse.h"
#include "Dom/JsonObject.h"
#include "Serialization/JsonWriter.h"
#include "Serialization/JsonSerializer.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ActionSetEditor, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// Factory
// ─────────────────────────────────────────────────────────────────────────────
TSharedRef<IDetailCustomization> FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::MakeInstance()
{
return MakeShareable(new FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs());
}
// ─────────────────────────────────────────────────────────────────────────────
// CustomizeDetails
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::CustomizeDetails(
IDetailLayoutBuilder& DetailBuilder)
{
DetailBuilder.GetObjectsBeingCustomized(SelectedObjects);
// ── Agent Sync category ─────────────────────────────────────────────────
IDetailCategoryBuilder& SyncCat = DetailBuilder.EditCategory(
TEXT("Agent Sync"),
FText::FromString(TEXT("Agent Sync")),
ECategoryPriority::Important);
SyncCat.AddCustomRow(FText::FromString(TEXT("Update All Agents")))
.WholeRowContent()
[
SNew(SVerticalBox)
+ SVerticalBox::Slot()
.AutoHeight()
.Padding(0, 4)
[
SNew(SButton)
.Text(FText::FromString(TEXT("Update All Agents")))
.ToolTipText(FText::FromString(
TEXT("PATCH all AgentConfig assets that reference this ActionSet.")))
.OnClicked_Lambda([this]()
{
OnUpdateAllAgentsClicked();
return FReply::Handled();
})
]
+ SVerticalBox::Slot()
.AutoHeight()
.Padding(0, 2)
[
SAssignNew(StatusTextBlock, STextBlock)
.Text(FText::GetEmpty())
.Font(IDetailLayoutBuilder::GetDetailFont())
.ColorAndOpacity(FSlateColor(FLinearColor(0.3f, 0.7f, 1.0f)))
]
];
}
// ─────────────────────────────────────────────────────────────────────────────
// Update All Agents
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::OnUpdateAllAgentsClicked()
{
const UPS_AI_ConvAgent_ActionSet_ElevenLabs* ActionSetAsset = GetEditedAsset();
if (!ActionSetAsset)
{
SetStatusError(TEXT("No ActionSet asset selected."));
return;
}
const FString APIKey = GetAPIKey();
if (APIKey.IsEmpty())
{
SetStatusError(TEXT("API Key not set in Project Settings > PS AI ConvAgent - ElevenLabs."));
return;
}
// ── Scan all AgentConfig assets via Asset Registry ───────────────────────
FAssetRegistryModule& ARModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>("AssetRegistry");
IAssetRegistry& AssetRegistry = ARModule.Get();
TArray<FAssetData> AllAgentConfigs;
AssetRegistry.GetAssetsByClass(
UPS_AI_ConvAgent_AgentConfig_ElevenLabs::StaticClass()->GetClassPathName(),
AllAgentConfigs, true);
// ── Filter: bIncludeActionTool && ActionSet == this asset && AgentID not empty ─
TArray<UPS_AI_ConvAgent_AgentConfig_ElevenLabs*> MatchingConfigs;
for (const FAssetData& AD : AllAgentConfigs)
{
UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Config =
Cast<UPS_AI_ConvAgent_AgentConfig_ElevenLabs>(AD.GetAsset());
if (!Config) continue;
if (!Config->bIncludeActionTool) continue;
if (Config->ActionSet != ActionSetAsset) continue;
if (Config->AgentID.IsEmpty()) continue;
MatchingConfigs.Add(Config);
}
if (MatchingConfigs.Num() == 0)
{
SetStatusError(TEXT("No AgentConfig assets reference this ActionSet (with AgentID set)."));
return;
}
SetStatusText(FString::Printf(TEXT("Updating %d agent(s)..."), MatchingConfigs.Num()));
// ── Shared counter for async completion tracking ─────────────────────────
struct FBatchState
{
int32 Total = 0;
FThreadSafeCounter Succeeded;
FThreadSafeCounter Failed;
TArray<FString> Errors;
FCriticalSection ErrorLock;
};
TSharedPtr<FBatchState> State = MakeShareable(new FBatchState());
State->Total = MatchingConfigs.Num();
TWeakPtr<FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs>(this->AsShared());
for (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Config : MatchingConfigs)
{
TSharedPtr<FJsonObject> Payload =
FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildAgentPayloadForAsset(Config);
FString PayloadStr;
TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr);
FJsonSerializer::Serialize(Payload.ToSharedRef(), Writer);
const FString URL = FString::Printf(
TEXT("https://api.elevenlabs.io/v1/convai/agents/%s"), *Config->AgentID);
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(URL);
Request->SetVerb(TEXT("PATCH"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Content-Type"), TEXT("application/json"));
Request->SetContentAsString(PayloadStr);
// Capture Config as weak pointer for safety.
TWeakObjectPtr<UPS_AI_ConvAgent_AgentConfig_ElevenLabs> WeakConfig(Config);
FString AgentName = Config->AgentName.IsEmpty() ? Config->AgentID : Config->AgentName;
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf, State, WeakConfig, AgentName]
(FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{
bool bSuccess = false;
FString ErrorMsg;
if (!bConnected || !Resp.IsValid())
{
ErrorMsg = FString::Printf(TEXT("%s: connection failed"), *AgentName);
}
else if (Resp->GetResponseCode() != 200)
{
ErrorMsg = FString::Printf(TEXT("%s: HTTP %d"),
*AgentName, Resp->GetResponseCode());
}
else
{
bSuccess = true;
// Update LastSyncTimestamp on the asset.
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Cfg = WeakConfig.Get())
{
Cfg->Modify();
Cfg->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
}
}
if (bSuccess)
{
State->Succeeded.Increment();
}
else
{
State->Failed.Increment();
FScopeLock Lock(&State->ErrorLock);
State->Errors.Add(ErrorMsg);
}
// Check if all requests are done.
const int32 Done = State->Succeeded.GetValue() + State->Failed.GetValue();
if (Done >= State->Total)
{
auto Pinned = WeakSelf.Pin();
if (!Pinned.IsValid()) return;
if (State->Failed.GetValue() == 0)
{
Pinned->SetStatusSuccess(FString::Printf(
TEXT("Updated %d/%d agents successfully."),
State->Succeeded.GetValue(), State->Total));
}
else
{
FString AllErrors;
{
FScopeLock Lock(&State->ErrorLock);
AllErrors = FString::Join(State->Errors, TEXT(", "));
}
Pinned->SetStatusError(FString::Printf(
TEXT("Updated %d/%d agents. Failures: %s"),
State->Succeeded.GetValue(), State->Total, *AllErrors));
}
}
});
Request->ProcessRequest();
UE_LOG(LogPS_AI_ActionSetEditor, Log,
TEXT(" → PATCH agent '%s' (ID: %s)"), *AgentName, *Config->AgentID);
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Helpers
// ─────────────────────────────────────────────────────────────────────────────
FString FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::GetAPIKey() const
{
if (FPS_AI_ConvAgentModule::IsAvailable())
{
if (const UPS_AI_ConvAgent_Settings_ElevenLabs* Settings = FPS_AI_ConvAgentModule::Get().GetSettings())
{
return Settings->API_Key;
}
}
return FString();
}
UPS_AI_ConvAgent_ActionSet_ElevenLabs* FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::GetEditedAsset() const
{
for (const TWeakObjectPtr<UObject>& Obj : SelectedObjects)
{
if (UPS_AI_ConvAgent_ActionSet_ElevenLabs* Asset =
Cast<UPS_AI_ConvAgent_ActionSet_ElevenLabs>(Obj.Get()))
{
return Asset;
}
}
return nullptr;
}
void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::SetStatusText(const FString& Text)
{
UE_LOG(LogPS_AI_ActionSetEditor, Log, TEXT("%s"), *Text);
if (StatusTextBlock.IsValid())
{
StatusTextBlock->SetText(FText::FromString(Text));
StatusTextBlock->SetColorAndOpacity(FSlateColor(FLinearColor(0.3f, 0.7f, 1.0f))); // cyan/info
}
}
void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::SetStatusError(const FString& Text)
{
UE_LOG(LogPS_AI_ActionSetEditor, Error, TEXT("%s"), *Text);
if (StatusTextBlock.IsValid())
{
StatusTextBlock->SetText(FText::FromString(Text));
StatusTextBlock->SetColorAndOpacity(FSlateColor(FLinearColor(1.0f, 0.25f, 0.25f))); // red
}
}
void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::SetStatusSuccess(const FString& Text)
{
UE_LOG(LogPS_AI_ActionSetEditor, Log, TEXT("%s"), *Text);
if (StatusTextBlock.IsValid())
{
StatusTextBlock->SetText(FText::FromString(Text));
StatusTextBlock->SetColorAndOpacity(FSlateColor(FLinearColor(0.2f, 0.9f, 0.3f))); // green
}
}

View File

@ -0,0 +1,43 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "IDetailCustomization.h"
class IDetailLayoutBuilder;
/**
* Detail Customization for UPS_AI_ConvAgent_ActionSet_ElevenLabs data assets.
*
* Provides:
* - Agent Sync category: "Update All Agents" button + STextBlock status
* Scans all UPS_AI_ConvAgent_AgentConfig_ElevenLabs assets that reference
* this ActionSet, then sends HTTP PATCH to ElevenLabs for each one.
*/
class FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs : public IDetailCustomization
{
public:
static TSharedRef<IDetailCustomization> MakeInstance();
virtual void CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) override;
private:
// ── Update All Agents ────────────────────────────────────────────────────
void OnUpdateAllAgentsClicked();
// ── Helpers ──────────────────────────────────────────────────────────────
FString GetAPIKey() const;
/** Retrieve the ActionSet data asset being edited (first selected object). */
class UPS_AI_ConvAgent_ActionSet_ElevenLabs* GetEditedAsset() const;
/** Display a status message. Color: red for errors, green for success, cyan for info. */
void SetStatusText(const FString& Text);
void SetStatusError(const FString& Text);
void SetStatusSuccess(const FString& Text);
// ── Cached state ─────────────────────────────────────────────────────────
TArray<TWeakObjectPtr<UObject>> SelectedObjects;
TSharedPtr<class STextBlock> StatusTextBlock;
};

View File

@ -0,0 +1,29 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_ActionSetFactory_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"
#include "AssetTypeCategories.h"
UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs::UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs()
{
SupportedClass = UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass();
bCreateNew = true;
bEditAfterNew = true;
}
UObject* UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs::FactoryCreateNew(
UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags,
UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UPS_AI_ConvAgent_ActionSet_ElevenLabs>(InParent, Class, Name, Flags);
}
FText UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs::GetDisplayName() const
{
return FText::FromString(TEXT("PS AI ConvAgent Action Set (ElevenLabs)"));
}
uint32 UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs::GetMenuCategories() const
{
return EAssetTypeCategories::Misc;
}

View File

@ -0,0 +1,27 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Factories/Factory.h"
#include "PS_AI_ConvAgent_ActionSetFactory_ElevenLabs.generated.h"
/**
* Factory that lets users create PS_AI_ConvAgent_ActionSet_ElevenLabs assets
* directly from the Content Browser (right-click > Miscellaneous).
*/
UCLASS()
class UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs : public UFactory
{
GENERATED_BODY()
public:
UPS_AI_ConvAgent_ActionSetFactory_ElevenLabs();
virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent,
FName Name, EObjectFlags Flags, UObject* Context,
FFeedbackContext* Warn) override;
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
};

View File

@ -1182,6 +1182,25 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
// to avoid doubling them on next Update.
// Order matters: strip from earliest marker to preserve CharacterPrompt.
// 0. Strip global context placeholder prepended by BuildAgentPayload.
// Must be removed so it never appears in the CharacterPrompt UI field.
{
const FString GlobalContextPrefix = TEXT("{{global_context}}\n\n");
if (Prompt.StartsWith(GlobalContextPrefix))
{
Prompt.RightChopInline(GlobalContextPrefix.Len());
}
else
{
// Fallback: ElevenLabs may collapse double-newline to single
const FString GlobalContextPrefixAlt = TEXT("{{global_context}}\n");
if (Prompt.StartsWith(GlobalContextPrefixAlt))
{
Prompt.RightChopInline(GlobalContextPrefixAlt.Len());
}
}
}
// 1. Language instruction — try exact fragment first, then marker fallback.
// Mirrors the emotion-tool approach: ElevenLabs may normalise
// double-newlines, so the "\n\n## …" marker alone can fail.
@ -1277,6 +1296,28 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
}
}
// 4. Action tool fragment (from ActionSet)
if (Asset->ActionSet && !Asset->ActionSet->ActionToolPromptFragment.IsEmpty())
{
int32 Idx = Prompt.Find(Asset->ActionSet->ActionToolPromptFragment,
ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
{
Prompt.LeftInline(Idx);
}
else
{
// Fallback: strip by marker
const FString ActionMarker = TEXT("\n\n## Physical Actions");
int32 MarkerIdx = Prompt.Find(ActionMarker,
ESearchCase::CaseSensitive);
if (MarkerIdx != INDEX_NONE)
{
Prompt.LeftInline(MarkerIdx);
}
}
}
Asset->CharacterPrompt = Prompt;
}
@ -1499,12 +1540,26 @@ FString FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::ParseAPIError(
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildAgentPayload() const
{
const UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = GetEditedAsset();
return BuildAgentPayloadForAsset(GetEditedAsset());
}
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildAgentPayloadForAsset(
const UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset)
{
if (!Asset) return MakeShareable(new FJsonObject());
// Build the full system prompt by appending automated fragments.
// Order: CharacterPrompt + Language/Multilingual instruction + Emotion tool
FString FullPrompt = Asset->CharacterPrompt;
// Order: [GlobalContext placeholder] + CharacterPrompt + Language/Multilingual instruction + Emotion tool
// Prepend global context placeholder — resolved at runtime via dynamic_variables.
// Only added when the user has set a GlobalContextPrompt in project settings.
FString FullPrompt;
const UPS_AI_ConvAgent_Settings_ElevenLabs* Settings = FPS_AI_ConvAgentModule::Get().GetSettings();
if (Settings && !Settings->GlobalContextPrompt.IsEmpty())
{
FullPrompt = TEXT("{{global_context}}\n\n");
}
FullPrompt += Asset->CharacterPrompt;
UE_LOG(LogPS_AI_AgentConfigEditor, Log,
TEXT("BuildAgentPayload: CharacterPrompt=%d chars, bMultilingual=%d, bAutoLangInstr=%d, Language='%s', "
@ -1553,6 +1608,16 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" → Appended EmotionToolPromptFragment"));
}
// Append action tool instructions from ActionSet.
if (Asset->bIncludeActionTool && Asset->ActionSet
&& Asset->ActionSet->Actions.Num() > 0
&& !Asset->ActionSet->ActionToolPromptFragment.IsEmpty())
{
FullPrompt += TEXT("\n\n");
FullPrompt += Asset->ActionSet->ActionToolPromptFragment;
UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" → Appended ActionToolPromptFragment from ActionSet"));
}
UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT("BuildAgentPayload: FullPrompt = %d chars"), FullPrompt.Len());
// prompt object (includes LLM selection + tools)
@ -1563,12 +1628,24 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
PromptObj->SetStringField(TEXT("llm"), Asset->LLMModel);
}
// If emotion tool is enabled, add to prompt.tools[] (API path: conversation_config.agent.prompt.tools)
// Build tools array: emotion tool + action tool (API path: conversation_config.agent.prompt.tools)
TArray<TSharedPtr<FJsonValue>> Tools;
if (Asset->bIncludeEmotionTool)
{
TSharedPtr<FJsonObject> EmotionTool = BuildEmotionToolDefinition();
TArray<TSharedPtr<FJsonValue>> Tools;
Tools.Add(MakeShareable(new FJsonValueObject(EmotionTool)));
}
if (Asset->bIncludeActionTool && Asset->ActionSet
&& Asset->ActionSet->Actions.Num() > 0)
{
TSharedPtr<FJsonObject> ActionTool = BuildActionToolDefinition(Asset);
if (ActionTool)
{
Tools.Add(MakeShareable(new FJsonValueObject(ActionTool)));
}
}
if (Tools.Num() > 0)
{
PromptObj->SetArrayField(TEXT("tools"), Tools);
}
@ -1670,7 +1747,7 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
return Root;
}
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildEmotionToolDefinition() const
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildEmotionToolDefinition()
{
// Build the set_emotion client tool definition.
// Parameters: emotion (enum), intensity (enum).
@ -1724,3 +1801,54 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
return Tool;
}
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildActionToolDefinition(
const UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset)
{
if (!Asset || !Asset->ActionSet || Asset->ActionSet->Actions.Num() == 0)
return nullptr;
// Build description with action list from the referenced ActionSet.
FString ParamDesc = TEXT("The action to perform. Available actions:");
TArray<TSharedPtr<FJsonValue>> ActionEnum;
for (const auto& Action : Asset->ActionSet->Actions)
{
if (!Action.Name.IsEmpty())
{
ActionEnum.Add(MakeShareable(new FJsonValueString(Action.Name)));
if (!Action.Description.IsEmpty())
{
ParamDesc += FString::Printf(TEXT("\n- %s: %s"),
*Action.Name, *Action.Description);
}
}
}
if (ActionEnum.Num() == 0) return nullptr;
// action parameter
TSharedPtr<FJsonObject> ActionParam = MakeShareable(new FJsonObject());
ActionParam->SetStringField(TEXT("type"), TEXT("string"));
ActionParam->SetStringField(TEXT("description"), ParamDesc);
ActionParam->SetArrayField(TEXT("enum"), ActionEnum);
// properties + required
TSharedPtr<FJsonObject> Properties = MakeShareable(new FJsonObject());
Properties->SetObjectField(TEXT("action"), ActionParam);
TArray<TSharedPtr<FJsonValue>> Required;
Required.Add(MakeShareable(new FJsonValueString(TEXT("action"))));
TSharedPtr<FJsonObject> Parameters = MakeShareable(new FJsonObject());
Parameters->SetStringField(TEXT("type"), TEXT("object"));
Parameters->SetObjectField(TEXT("properties"), Properties);
Parameters->SetArrayField(TEXT("required"), Required);
// Tool definition
TSharedPtr<FJsonObject> Tool = MakeShareable(new FJsonObject());
Tool->SetStringField(TEXT("type"), TEXT("client"));
Tool->SetStringField(TEXT("name"), TEXT("perform_action"));
Tool->SetStringField(TEXT("description"),
TEXT("Trigger a physical action or reaction for the character."));
Tool->SetObjectField(TEXT("parameters"), Parameters);
return Tool;
}

View File

@ -50,7 +50,17 @@ private:
// ── Helpers ──────────────────────────────────────────────────────────────
FString GetAPIKey() const;
TSharedPtr<FJsonObject> BuildAgentPayload() const;
TSharedPtr<FJsonObject> BuildEmotionToolDefinition() const;
public:
/** Build the full ElevenLabs API payload for any AgentConfig asset.
* Static so it can be reused from other customizations (e.g. ActionSet batch update). */
static TSharedPtr<FJsonObject> BuildAgentPayloadForAsset(
const class UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset);
static TSharedPtr<FJsonObject> BuildEmotionToolDefinition();
static TSharedPtr<FJsonObject> BuildActionToolDefinition(
const class UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset);
private:
/** Display a status message in the Identity category.
* Color: red for errors, green for success, blue/cyan for info. */