Compare commits

...

6 Commits

Author SHA1 Message Date
453450d7eb Commit Scene + BP 2026-02-27 18:38:16 +01:00
301efee982 Enable body tracking on both voice and text input
Body tracking is now activated by ElevenLabsComponent directly
in StartListening() and SendTextMessage(), instead of being
managed by InteractionComponent. This ensures the agent turns
its body toward the player on any form of conversation input.

InteractionComponent still disables body tracking on deselection.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-27 18:33:08 +01:00
92a8b70a7f Split posture: eyes+head on selection, body on conversation start
PostureComponent gains bEnableBodyTracking flag. When false, only
head and eyes track the target — body stays frozen.

InteractionComponent now:
- On posture attach: sets TargetActor but disables body tracking
  (agent notices player with eyes+head only)
- On StartListening: enables body tracking (agent fully engages)
- On StopListening: disables body tracking

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-27 18:26:27 +01:00
23e216b211 Add bAutoManageListening to InteractionComponent
Automatically calls StartListening/StopListening on the agent's
ElevenLabsComponent on selection/deselection. Enabled by default.
Disable for manual control (e.g. push-to-talk).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-27 18:08:16 +01:00
6c081e1207 Fix: body keeps orientation on target loss, fix close-range selection
PostureComponent: body no longer returns to original yaw when
TargetActor is cleared — only head and eyes return to neutral.

InteractionComponent: add AgentEyeLevelOffset (default 150cm) so
the view cone check targets chest height instead of feet, preventing
selection loss at close range.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-27 17:49:03 +01:00
c6aed472f9 Add auto posture management to InteractionComponent
InteractionComponent now automatically sets/clears the agent's
PostureComponent TargetActor on selection/deselection, with
configurable attach/detach delays and a master toggle for manual control.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-27 17:39:05 +01:00
10 changed files with 235 additions and 18 deletions

View File

@ -1,8 +1,8 @@
[/Script/EngineSettings.GameMapsSettings]
GameDefaultMap=/PS_AI_ConvAgent/Demo_VoiceOnly.Demo_VoiceOnly
EditorStartupMap=/PS_AI_ConvAgent/Demo_VoiceOnly.Demo_VoiceOnly
GameDefaultMap=/PS_AI_ConvAgent/Demo_Metahuman.Demo_Metahuman
EditorStartupMap=/PS_AI_ConvAgent/Demo_Metahuman.Demo_Metahuman
[/Script/Engine.RendererSettings]
r.AllowStaticLighting=False

View File

@ -2,6 +2,7 @@
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "PS_AI_ConvAgent_MicrophoneCaptureComponent.h"
#include "PS_AI_ConvAgent_PostureComponent.h"
#include "PS_AI_ConvAgent_InteractionSubsystem.h"
#include "PS_AI_ConvAgent.h"
@ -312,6 +313,17 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartListening()
Mic->StartCapture();
}
// Enable body tracking on the sibling PostureComponent (if present).
// Voice input counts as conversation engagement, same as text.
if (AActor* OwnerActor = GetOwner())
{
if (UPS_AI_ConvAgent_PostureComponent* Posture =
OwnerActor->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>())
{
Posture->bEnableBodyTracking = true;
}
}
const double T = TurnStartTime - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+%.2fs] [Turn %d] Mic opened%s — user speaking."),
T, TurnIndex, bExternalMicManagement ? TEXT(" (external)") : TEXT(""));
@ -404,6 +416,17 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::SendTextMessage(const FString& Text)
return;
}
WebSocketProxy->SendTextMessage(Text);
// Enable body tracking on the sibling PostureComponent (if present).
// Text input counts as conversation engagement, same as voice.
if (AActor* Owner = GetOwner())
{
if (UPS_AI_ConvAgent_PostureComponent* Posture =
Owner->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>())
{
Posture->bEnableBodyTracking = true;
}
}
}
void UPS_AI_ConvAgent_ElevenLabsComponent::InterruptAgent()

View File

@ -4,10 +4,12 @@
#include "PS_AI_ConvAgent_InteractionSubsystem.h"
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "PS_AI_ConvAgent_MicrophoneCaptureComponent.h"
#include "PS_AI_ConvAgent_PostureComponent.h"
#include "GameFramework/Pawn.h"
#include "GameFramework/PlayerController.h"
#include "Camera/PlayerCameraManager.h"
#include "TimerManager.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ConvAgent_Select, Log, All);
@ -48,6 +50,13 @@ void UPS_AI_ConvAgent_InteractionComponent::BeginPlay()
void UPS_AI_ConvAgent_InteractionComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
// Cancel any pending posture timers.
if (UWorld* World = GetWorld())
{
World->GetTimerManager().ClearTimer(PostureAttachTimerHandle);
World->GetTimerManager().ClearTimer(PostureDetachTimerHandle);
}
if (MicComponent)
{
MicComponent->StopCapture();
@ -57,6 +66,16 @@ void UPS_AI_ConvAgent_InteractionComponent::EndPlay(const EEndPlayReason::Type E
// Fire deselection event for cleanup.
if (UPS_AI_ConvAgent_ElevenLabsComponent* Agent = SelectedAgent.Get())
{
// Stop listening and clear posture immediately on shutdown — no delay.
if (bAutoManageListening)
{
Agent->StopListening();
}
if (bAutoManagePosture)
{
DetachPostureTarget(Agent);
}
SelectedAgent.Reset();
OnAgentDeselected.Broadcast(Agent);
}
@ -114,7 +133,7 @@ UPS_AI_ConvAgent_ElevenLabsComponent* UPS_AI_ConvAgent_InteractionComponent::Eva
AActor* AgentActor = Agent->GetOwner();
if (!AgentActor) continue;
const FVector AgentLocation = AgentActor->GetActorLocation();
const FVector AgentLocation = AgentActor->GetActorLocation() + FVector(0.0f, 0.0f, AgentEyeLevelOffset);
const FVector ToAgent = AgentLocation - ViewLocation;
const float DistSq = ToAgent.SizeSquared();
@ -162,6 +181,7 @@ UPS_AI_ConvAgent_ElevenLabsComponent* UPS_AI_ConvAgent_InteractionComponent::Eva
void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_ElevenLabsComponent* NewAgent)
{
UPS_AI_ConvAgent_ElevenLabsComponent* OldAgent = SelectedAgent.Get();
UWorld* World = GetWorld();
// Deselect old agent.
if (OldAgent)
@ -174,6 +194,41 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
OldAgent->GetOwner() ? *OldAgent->GetOwner()->GetName() : TEXT("(null)"));
}
// ── Listening: stop ──────────────────────────────────────────────
if (bAutoManageListening)
{
OldAgent->StopListening();
}
// Disable body tracking on deselection.
if (bAutoManagePosture)
{
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(OldAgent))
{
Posture->bEnableBodyTracking = false;
}
}
// ── Posture: detach ──────────────────────────────────────────────
if (bAutoManagePosture && World)
{
// Cancel any pending attach — agent left before attach fired.
World->GetTimerManager().ClearTimer(PostureAttachTimerHandle);
if (PostureDetachDelay > 0.0f)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakOld = OldAgent;
World->GetTimerManager().SetTimer(PostureDetachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::DetachPostureTarget, WeakOld),
PostureDetachDelay, false);
}
else
{
DetachPostureTarget(OldAgent);
}
}
OnAgentDeselected.Broadcast(OldAgent);
}
@ -194,6 +249,34 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
MicComponent->StartCapture();
}
// ── Listening: start ─────────────────────────────────────────────
// Body tracking is enabled by ElevenLabsComponent itself (in StartListening
// and SendTextMessage) so it works for both voice and text input.
if (bAutoManageListening)
{
NewAgent->StartListening();
}
// ── Posture: attach ──────────────────────────────────────────────
if (bAutoManagePosture && World)
{
// Cancel any pending detach — agent came back before detach fired.
World->GetTimerManager().ClearTimer(PostureDetachTimerHandle);
if (PostureAttachDelay > 0.0f)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakNew = NewAgent;
World->GetTimerManager().SetTimer(PostureAttachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::AttachPostureTarget, WeakNew),
PostureAttachDelay, false);
}
else
{
AttachPostureTarget(NewAgent);
}
}
OnAgentSelected.Broadcast(NewAgent);
}
else
@ -252,6 +335,57 @@ void UPS_AI_ConvAgent_InteractionComponent::ClearSelection()
SetSelectedAgent(nullptr);
}
// ─────────────────────────────────────────────────────────────────────────────
// Posture helpers
// ─────────────────────────────────────────────────────────────────────────────
UPS_AI_ConvAgent_PostureComponent* UPS_AI_ConvAgent_InteractionComponent::FindPostureOnAgent(
UPS_AI_ConvAgent_ElevenLabsComponent* Agent)
{
if (!Agent) return nullptr;
AActor* AgentActor = Agent->GetOwner();
if (!AgentActor) return nullptr;
return AgentActor->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>();
}
void UPS_AI_ConvAgent_InteractionComponent::AttachPostureTarget(
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent)
{
UPS_AI_ConvAgent_ElevenLabsComponent* AgentPtr = Agent.Get();
if (!AgentPtr) return;
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(AgentPtr))
{
Posture->TargetActor = GetOwner();
// Eyes+head only at first — body tracking is enabled when listening starts.
Posture->bEnableBodyTracking = false;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("Posture attached (eyes+head only): %s -> %s"),
AgentPtr->GetOwner() ? *AgentPtr->GetOwner()->GetName() : TEXT("(null)"),
GetOwner() ? *GetOwner()->GetName() : TEXT("(null)"));
}
}
}
void UPS_AI_ConvAgent_InteractionComponent::DetachPostureTarget(
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent)
{
UPS_AI_ConvAgent_ElevenLabsComponent* AgentPtr = Agent.Get();
if (!AgentPtr) return;
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(AgentPtr))
{
Posture->TargetActor = nullptr;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("Posture detached: %s"),
AgentPtr->GetOwner() ? *AgentPtr->GetOwner()->GetName() : TEXT("(null)"));
}
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Mic routing
// ─────────────────────────────────────────────────────────────────────────────

View File

@ -240,12 +240,16 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
}
// Body smoothly interpolates toward its persistent target
const float BodyDelta = FMath::FindDeltaAngleDegrees(
Owner->GetActorRotation().Yaw, TargetBodyWorldYaw);
if (FMath::Abs(BodyDelta) > 0.1f)
// (only when body tracking is enabled — otherwise only head+eyes move).
if (bEnableBodyTracking)
{
const float BodyStep = FMath::FInterpTo(0.0f, BodyDelta, SafeDeltaTime, BodyInterpSpeed);
Owner->AddActorWorldRotation(FRotator(0.0f, BodyStep, 0.0f));
const float BodyDelta = FMath::FindDeltaAngleDegrees(
Owner->GetActorRotation().Yaw, TargetBodyWorldYaw);
if (FMath::Abs(BodyDelta) > 0.1f)
{
const float BodyStep = FMath::FInterpTo(0.0f, BodyDelta, SafeDeltaTime, BodyInterpSpeed);
Owner->AddActorWorldRotation(FRotator(0.0f, BodyStep, 0.0f));
}
}
// ── 3. Compute DeltaYaw after body interp ──────────────────────────
@ -274,7 +278,7 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
BodyTargetFacing, TargetWorldYaw);
bool bBodyOverflowed = false;
if (FMath::Abs(DeltaFromBodyTarget) > MaxHeadYaw + MaxEyeHorizontal)
if (bEnableBodyTracking && FMath::Abs(DeltaFromBodyTarget) > MaxHeadYaw + MaxEyeHorizontal)
{
// Body realigns to face target
TargetBodyWorldYaw = TargetWorldYaw - MeshForwardYawOffset;
@ -337,15 +341,8 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
{
// ── No target: smoothly return to neutral ──────────────────────────
// Body: return to original facing via sticky target
TargetBodyWorldYaw = OriginalActorYaw - MeshForwardYawOffset;
const float NeutralDelta = FMath::FindDeltaAngleDegrees(
Owner->GetActorRotation().Yaw, TargetBodyWorldYaw);
if (FMath::Abs(NeutralDelta) > 0.1f)
{
const float NeutralStep = FMath::FInterpTo(0.0f, NeutralDelta, SafeDeltaTime, ReturnToNeutralSpeed);
Owner->AddActorWorldRotation(FRotator(0.0f, NeutralStep, 0.0f));
}
// Body: keep current orientation — don't rotate back to original facing.
// The body stays wherever the last tracking left it; only head and eyes reset.
// Head + Eyes: return to center
TargetHeadYaw = 0.0f;

View File

@ -8,6 +8,7 @@
class UPS_AI_ConvAgent_ElevenLabsComponent;
class UPS_AI_ConvAgent_MicrophoneCaptureComponent;
class UPS_AI_ConvAgent_PostureComponent;
// ─────────────────────────────────────────────────────────────────────────────
// Delegates
@ -76,12 +77,52 @@ public:
ToolTip = "Sticky cone half-angle for the current agent (degrees).\nMust be >= ViewConeHalfAngle for hysteresis to work.\n60 = agent stays selected until 120-degree total cone is exceeded."))
float SelectionStickyAngle = 60.0f;
/** Height offset (cm) added to the agent's origin for the view cone check.
* Agent origins are typically at foot level; this shifts the target point
* up to chest/head height so close-range look-at checks don't fail. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction",
meta = (ClampMin = "0",
ToolTip = "Height offset (cm) above the agent origin for the view cone check.\nPrevents losing selection at close range because the camera looks above the agent's feet.\n150 = roughly chest height for a MetaHuman."))
float AgentEyeLevelOffset = 150.0f;
/** When false, only distance matters for selection (no view cone check).
* The closest agent within MaxInteractionDistance is always selected. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction",
meta = (ToolTip = "Require the player to look at an agent to select it.\nWhen false, the closest agent within range is always selected."))
bool bRequireLookAt = true;
// ── Posture management ───────────────────────────────────────────────────
/** Automatically set/clear the agent's PostureComponent TargetActor
* when the agent is selected/deselected. When false, posture must
* be managed from Blueprint (e.g. on conversation start). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Posture",
meta = (ToolTip = "Automatically point the agent's posture at the pawn on selection.\nDisable for manual control (e.g. set target only when conversation starts)."))
bool bAutoManagePosture = true;
/** Delay (seconds) before setting the agent's posture target after selection.
* 0 = immediate. Useful to let the agent "notice" the player with a beat. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Posture",
meta = (EditCondition = "bAutoManagePosture", ClampMin = "0",
ToolTip = "Seconds to wait before the agent looks at the pawn.\n0 = immediate."))
float PostureAttachDelay = 0.0f;
/** Delay (seconds) before clearing the agent's posture target after deselection.
* 0 = immediate. Useful to have the agent keep looking briefly as the player leaves. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Posture",
meta = (EditCondition = "bAutoManagePosture", ClampMin = "0",
ToolTip = "Seconds to wait before the agent stops looking at the pawn.\n0 = immediate."))
float PostureDetachDelay = 0.0f;
// ── Listening management ─────────────────────────────────────────────────
/** Automatically call StartListening/StopListening on the agent's
* ElevenLabsComponent when selected/deselected. When false, listening
* must be managed from Blueprint. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Listening",
meta = (ToolTip = "Automatically open/close the agent's mic turn on selection.\nDisable for manual control (e.g. push-to-talk)."))
bool bAutoManageListening = true;
// ── Debug ────────────────────────────────────────────────────────────────
/** Enable debug logging for this component. */
@ -144,6 +185,17 @@ private:
/** Get the pawn's view location and direction (uses camera or control rotation). */
void GetPawnViewPoint(FVector& OutLocation, FVector& OutDirection) const;
// ── Posture helpers ──────────────────────────────────────────────────────
/** Find the PostureComponent on an agent's owner actor (null if absent). */
static UPS_AI_ConvAgent_PostureComponent* FindPostureOnAgent(UPS_AI_ConvAgent_ElevenLabsComponent* Agent);
/** Set the agent's PostureComponent target to the pawn (attach). */
void AttachPostureTarget(TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent);
/** Clear the agent's PostureComponent target (detach). */
void DetachPostureTarget(TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent);
// ── Mic routing ──────────────────────────────────────────────────────────
/** Forward captured mic audio to the currently selected agent. */
@ -160,4 +212,9 @@ private:
/** True while ForceSelectAgent is active (suppresses automatic re-evaluation for one frame). */
bool bForceSelectionActive = false;
// ── Posture timers ───────────────────────────────────────────────────────
FTimerHandle PostureAttachTimerHandle;
FTimerHandle PostureDetachTimerHandle;
};

View File

@ -73,6 +73,12 @@ public:
meta = (ToolTip = "Target actor to look at.\nSet to null to return to neutral."))
TObjectPtr<AActor> TargetActor;
/** When false, body rotation is frozen — only head and eyes track the target.
* Useful to have the agent notice the player (eyes+head) before fully engaging (body). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
meta = (ToolTip = "Enable body rotation toward the target.\nWhen false, only head and eyes track."))
bool bEnableBodyTracking = true;
/** Offset from the target actor's origin to aim at.
* Useful for actors without a skeleton (e.g. (0,0,160) for eye-level). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",