Compare commits

...

2 Commits

Author SHA1 Message Date
8bb4371a74 Metahuman 2026-03-04 18:27:02 +01:00
acfae96420 Body Expression system, auto external mic, gaze auto-target eyes, cleanup bActive
- Add BodyExpression system: emotion-driven body animations with per-emotion
  anim lists (Idle/Normal/Medium/Extreme), random selection, auto-cycle on
  loop complete, crossfade transitions, upper-body-only or full-body mode
- Replace bExternalMicManagement with auto-detecting ShouldUseExternalMic()
- Add bAutoTargetEyes to GazeComponent: auto-aim at target's eye bones
  (MetaHuman), with fallback chain (eyes > head > FallbackEyeHeight)
- Hide bActive from Details panel on all 4 anim components (read-only,
  code-managed): FacialExpression, Gaze, LipSync, BodyExpression
- Remove misleading mh_arkit_mapping_pose warning from LipSync
- Add bUpperBodyOnly toggle to BodyExpression AnimNode

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-04 18:26:39 +01:00
30 changed files with 1779 additions and 354 deletions

View File

@ -3,7 +3,7 @@
"Version": 1,
"VersionName": "1.0.0",
"FriendlyName": "PS AI Conversational Agent",
"Description": "Conversational AI Agent framework for Unreal Engine 5.5. Supports lip sync, facial expressions, posture/look-at, and pluggable backends (ElevenLabs, etc.).",
"Description": "Conversational AI Agent framework for Unreal Engine 5.5. Supports lip sync, facial expressions, gaze/look-at, and pluggable backends (ElevenLabs, etc.).",
"Category": "AI",
"CreatedBy": "ASTERION",
"CreatedByURL": "",

View File

@ -0,0 +1,250 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimNode_PS_AI_ConvAgent_BodyExpression.h"
#include "PS_AI_ConvAgent_BodyExpressionComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "Animation/AnimInstanceProxy.h"
#include "Animation/AnimSequence.h"
#include "GameFramework/Actor.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ConvAgent_BodyExprAnimNode, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// FAnimNode_Base interface
// ─────────────────────────────────────────────────────────────────────────────
void FAnimNode_PS_AI_ConvAgent_BodyExpression::Initialize_AnyThread(const FAnimationInitializeContext& Context)
{
BasePose.Initialize(Context);
BodyExpressionComponent.Reset();
CachedSnapshot = FPS_AI_ConvAgent_BodyExpressionSnapshot();
BoneMask.Reset();
bBoneMaskValid = false;
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
if (const USkeletalMeshComponent* SkelMesh = Proxy->GetSkelMeshComponent())
{
if (AActor* Owner = SkelMesh->GetOwner())
{
UPS_AI_ConvAgent_BodyExpressionComponent* Comp =
Owner->FindComponentByClass<UPS_AI_ConvAgent_BodyExpressionComponent>();
if (Comp)
{
BodyExpressionComponent = Comp;
}
}
}
}
}
void FAnimNode_PS_AI_ConvAgent_BodyExpression::CacheBones_AnyThread(const FAnimationCacheBonesContext& Context)
{
BasePose.CacheBones(Context);
// Build per-bone weight mask from BlendRootBone
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
const FBoneContainer& RequiredBones = Proxy->GetRequiredBones();
BuildBoneMask(RequiredBones);
}
}
void FAnimNode_PS_AI_ConvAgent_BodyExpression::Update_AnyThread(const FAnimationUpdateContext& Context)
{
BasePose.Update(Context);
// Lazy lookup: in packaged builds, Initialize_AnyThread may run before
// components are created. Retry discovery until found.
if (!BodyExpressionComponent.IsValid())
{
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
if (const USkeletalMeshComponent* SkelMesh = Proxy->GetSkelMeshComponent())
{
if (AActor* Owner = SkelMesh->GetOwner())
{
UPS_AI_ConvAgent_BodyExpressionComponent* Comp =
Owner->FindComponentByClass<UPS_AI_ConvAgent_BodyExpressionComponent>();
if (Comp)
{
BodyExpressionComponent = Comp;
}
}
}
}
}
// Cache snapshot from the component
CachedSnapshot = FPS_AI_ConvAgent_BodyExpressionSnapshot();
if (BodyExpressionComponent.IsValid())
{
CachedSnapshot = BodyExpressionComponent->GetSnapshot();
}
}
void FAnimNode_PS_AI_ConvAgent_BodyExpression::Evaluate_AnyThread(FPoseContext& Output)
{
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
// ── Early-out checks ────────────────────────────────────────────────────
if (!bBoneMaskValid)
return;
const float FinalWeight = CachedSnapshot.ActivationAlpha * CachedSnapshot.BlendWeight;
if (FinalWeight < 0.001f)
return;
UAnimSequence* ActiveSeq = CachedSnapshot.ActiveAnim.Get();
if (!ActiveSeq)
return;
const FBoneContainer& BoneContainer = Output.Pose.GetBoneContainer();
const int32 NumBones = Output.Pose.GetNumBones();
if (BoneMask.Num() != NumBones)
return; // Stale mask — CacheBones will rebuild it
// ── Evaluate active emotion AnimSequence ────────────────────────────────
FCompactPose ActivePose;
if (!EvaluateAnimPose(ActiveSeq, CachedSnapshot.ActiveTime, BoneContainer, ActivePose))
return;
// ── Handle crossfade if previous anim is still blending out ──────────
const bool bCrossfading = CachedSnapshot.PrevAnim.IsValid() && CachedSnapshot.CrossfadeAlpha < 1.0f;
FCompactPose PrevPose;
if (bCrossfading)
{
UAnimSequence* PrevSeq = CachedSnapshot.PrevAnim.Get();
if (PrevSeq)
{
EvaluateAnimPose(PrevSeq, CachedSnapshot.PrevTime, BoneContainer, PrevPose);
}
}
const bool bHavePrevPose = bCrossfading && PrevPose.GetNumBones() > 0;
// ── Per-bone blend ──────────────────────────────────────────────────────
for (int32 CompactIdx = 0; CompactIdx < NumBones; ++CompactIdx)
{
if (BoneMask[CompactIdx] <= 0.0f)
continue;
const FCompactPoseBoneIndex BoneIdx(CompactIdx);
const float W = BoneMask[CompactIdx] * FinalWeight;
if (bHavePrevPose)
{
// Crossfade between previous and active emotion poses
FTransform BlendedEmotion;
BlendedEmotion.Blend(PrevPose[BoneIdx], ActivePose[BoneIdx], CachedSnapshot.CrossfadeAlpha);
Output.Pose[BoneIdx].BlendWith(BlendedEmotion, W);
}
else
{
// Direct blend with active emotion pose
Output.Pose[BoneIdx].BlendWith(ActivePose[BoneIdx], W);
}
}
}
void FAnimNode_PS_AI_ConvAgent_BodyExpression::GatherDebugData(FNodeDebugData& DebugData)
{
FString DebugLine = FString::Printf(TEXT("PS AI ConvAgent Body Expression (alpha=%.2f, bones=%d)"),
CachedSnapshot.ActivationAlpha, BoneMask.Num());
DebugData.AddDebugItem(DebugLine);
BasePose.GatherDebugData(DebugData);
}
// ─────────────────────────────────────────────────────────────────────────────
// Build per-bone weight mask
// ─────────────────────────────────────────────────────────────────────────────
void FAnimNode_PS_AI_ConvAgent_BodyExpression::BuildBoneMask(const FBoneContainer& RequiredBones)
{
const int32 NumBones = RequiredBones.GetCompactPoseNumBones();
BoneMask.SetNumZeroed(NumBones);
bBoneMaskValid = false;
// Full body mode: all bones get weight 1.0
if (!bUpperBodyOnly)
{
for (int32 i = 0; i < NumBones; ++i)
{
BoneMask[i] = 1.0f;
}
bBoneMaskValid = (NumBones > 0);
UE_LOG(LogPS_AI_ConvAgent_BodyExprAnimNode, Log,
TEXT("Bone mask built: FULL BODY (%d bones)."), NumBones);
return;
}
// Upper body mode: only BlendRootBone descendants
const FReferenceSkeleton& RefSkel = RequiredBones.GetReferenceSkeleton();
const int32 RootMeshIdx = RefSkel.FindBoneIndex(BlendRootBone);
if (RootMeshIdx == INDEX_NONE)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExprAnimNode, Warning,
TEXT("BlendRootBone '%s' not found in skeleton. Body expression disabled."),
*BlendRootBone.ToString());
return;
}
const TArray<FBoneIndexType>& BoneIndices = RequiredBones.GetBoneIndicesArray();
int32 MaskedCount = 0;
for (int32 CompactIdx = 0; CompactIdx < NumBones; ++CompactIdx)
{
const int32 MeshIdx = static_cast<int32>(BoneIndices[CompactIdx]);
// Walk up the parent chain: if BlendRootBone is an ancestor
// (or is this bone itself), mark it for blending.
int32 Current = MeshIdx;
while (Current != INDEX_NONE)
{
if (Current == RootMeshIdx)
{
BoneMask[CompactIdx] = 1.0f;
++MaskedCount;
break;
}
Current = RefSkel.GetParentIndex(Current);
}
}
bBoneMaskValid = (MaskedCount > 0);
UE_LOG(LogPS_AI_ConvAgent_BodyExprAnimNode, Log,
TEXT("Bone mask built: %d/%d bones from root '%s'."),
MaskedCount, NumBones, *BlendRootBone.ToString());
}
// ─────────────────────────────────────────────────────────────────────────────
// Evaluate AnimSequence into a compact pose
// ─────────────────────────────────────────────────────────────────────────────
bool FAnimNode_PS_AI_ConvAgent_BodyExpression::EvaluateAnimPose(
UAnimSequence* AnimSeq, float Time,
const FBoneContainer& BoneContainer, FCompactPose& OutPose) const
{
if (!AnimSeq)
return false;
OutPose.SetBoneContainer(&BoneContainer);
OutPose.ResetToRefPose();
FBlendedCurve TempCurve;
UE::Anim::FStackAttributeContainer TempAttrs;
FAnimExtractContext ExtractionCtx(static_cast<double>(Time), false);
FAnimationPoseData PoseData(OutPose, TempCurve, TempAttrs);
AnimSeq->GetAnimationPose(PoseData, ExtractionCtx);
return OutPose.GetNumBones() > 0;
}

View File

@ -20,6 +20,8 @@ void FAnimNode_PS_AI_ConvAgent_FacialExpression::Initialize_AnyThread(const FAni
// This runs during initialization (game thread) so actor access is safe.
FacialExpressionComponent.Reset();
CachedEmotionCurves.Reset();
KnownCurveNames.Reset();
FramesSinceLastActive = 0;
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
@ -84,15 +86,60 @@ void FAnimNode_PS_AI_ConvAgent_FacialExpression::Evaluate_AnyThread(FPoseContext
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
// Inject emotion expression curves into the pose output.
// These are CTRL_expressions_* curves (MetaHuman native format)
// covering eyes, eyebrows, cheeks, nose, and mouth mood.
// The downstream Lip Sync node will override mouth-area curves
// during speech, while non-mouth emotion curves pass through.
// ── Inject emotion expression curves into the pose output ────────────
//
// IMPORTANT: Always write ALL curves that this node has ever touched,
// including at 0.0. If we only write active curves, upstream animation
// values (idle, breathing, etc.) leak through on curves that the emotion
// anim doesn't cover or that momentarily cross zero — causing visible
// pops on the upper face (brows, eyes, cheeks).
//
// Same strategy as the LipSync AnimNode:
// - While active: write all curves, track names in KnownCurveNames
// - After going silent: keep zeroing for a grace period (30 frames)
// - Then release so upstream anims take over smoothly
if (CachedEmotionCurves.Num() > 0)
{
FramesSinceLastActive = 0;
// Register all curve names and write their values
for (const auto& Pair : CachedEmotionCurves)
{
KnownCurveNames.Add(Pair.Key);
Output.Curve.Set(Pair.Key, Pair.Value);
}
// Zero any known curves NOT in the current frame
// (e.g. a curve that was active last frame but decayed away)
for (const FName& Name : KnownCurveNames)
{
if (!CachedEmotionCurves.Contains(Name))
{
Output.Curve.Set(Name, 0.0f);
}
}
}
else if (KnownCurveNames.Num() > 0)
{
// Emotion went inactive — keep zeroing known curves for a grace
// period so upstream values don't pop in abruptly.
++FramesSinceLastActive;
if (FramesSinceLastActive < 30)
{
for (const FName& Name : KnownCurveNames)
{
Output.Curve.Set(Name, 0.0f);
}
}
else
{
// Grace period over — release curves, let upstream through
KnownCurveNames.Reset();
FramesSinceLastActive = 0;
}
}
}
void FAnimNode_PS_AI_ConvAgent_FacialExpression::GatherDebugData(FNodeDebugData& DebugData)

View File

@ -1,12 +1,12 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimNode_PS_AI_ConvAgent_Posture.h"
#include "PS_AI_ConvAgent_PostureComponent.h"
#include "AnimNode_PS_AI_ConvAgent_Gaze.h"
#include "PS_AI_ConvAgent_GazeComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "Animation/AnimInstanceProxy.h"
#include "GameFramework/Actor.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ConvAgent_PostureAnimNode, Log, All);
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ConvAgent_GazeAnimNode, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// ARKit → MetaHuman CTRL eye curve mapping.
@ -77,12 +77,12 @@ static const TMap<FName, FName>& GetARKitToCTRLEyeMap()
// FAnimNode_Base interface
// ─────────────────────────────────────────────────────────────────────────────
void FAnimNode_PS_AI_ConvAgent_Posture::Initialize_AnyThread(const FAnimationInitializeContext& Context)
void FAnimNode_PS_AI_ConvAgent_Gaze::Initialize_AnyThread(const FAnimationInitializeContext& Context)
{
BasePose.Initialize(Context);
// Reset all cached state.
PostureComponent.Reset();
GazeComponent.Reset();
CachedEyeCurves.Reset();
CachedHeadRotation = FQuat::Identity;
CachedHeadCompensation = 1.0f;
@ -103,11 +103,11 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Initialize_AnyThread(const FAnimationIni
{
if (AActor* Owner = SkelMesh->GetOwner())
{
UPS_AI_ConvAgent_PostureComponent* Comp =
Owner->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>();
UPS_AI_ConvAgent_GazeComponent* Comp =
Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>();
if (Comp)
{
PostureComponent = Comp;
GazeComponent = Comp;
HeadBoneName = Comp->GetHeadBoneName();
// Cache neck bone chain configuration
@ -127,13 +127,13 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Initialize_AnyThread(const FAnimationIni
}
}
// No posture component is fine — AnimBP may not require it.
// No gaze component is fine — AnimBP may not require it.
}
}
}
}
void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCacheBonesContext& Context)
void FAnimNode_PS_AI_ConvAgent_Gaze::CacheBones_AnyThread(const FAnimationCacheBonesContext& Context)
{
BasePose.CacheBones(Context);
@ -179,7 +179,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
{
ChainBoneIndices.Add(FCompactPoseBoneIndex(INDEX_NONE));
ChainRefPoseRotations.Add(FQuat::Identity);
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT(" Chain bone [%d] '%s' NOT FOUND in skeleton!"),
i, *ChainBoneNames[i].ToString());
}
@ -204,14 +204,14 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
}
else
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT("Head bone '%s' NOT FOUND in skeleton. Available bones:"),
*HeadBoneName.ToString());
const int32 NumBones = FMath::Min(RefSkeleton.GetNum(), 10);
for (int32 i = 0; i < NumBones; ++i)
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT(" [%d] %s"), i, *RefSkeleton.GetBoneName(i).ToString());
}
}
@ -254,7 +254,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
AncestorBoneIndices.Reset();
RefAccumAboveChain = FQuat::Identity;
{
// Find the first bone of the posture chain (or fallback head bone)
// Find the first bone of the gaze chain (or fallback head bone)
FCompactPoseBoneIndex FirstBone =
(ChainBoneIndices.Num() > 0 && ChainBoneIndices[0].GetInt() != INDEX_NONE)
? ChainBoneIndices[0]
@ -290,13 +290,13 @@ void FAnimNode_PS_AI_ConvAgent_Posture::CacheBones_AnyThread(const FAnimationCac
}
}
void FAnimNode_PS_AI_ConvAgent_Posture::Update_AnyThread(const FAnimationUpdateContext& Context)
void FAnimNode_PS_AI_ConvAgent_Gaze::Update_AnyThread(const FAnimationUpdateContext& Context)
{
BasePose.Update(Context);
// Lazy lookup: in packaged builds, Initialize_AnyThread may run before
// components are created. Retry discovery until found.
if (!PostureComponent.IsValid())
if (!GazeComponent.IsValid())
{
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
@ -304,30 +304,30 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Update_AnyThread(const FAnimationUpdateC
{
if (AActor* Owner = SkelMesh->GetOwner())
{
UPS_AI_ConvAgent_PostureComponent* Comp =
Owner->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>();
UPS_AI_ConvAgent_GazeComponent* Comp =
Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>();
if (Comp)
{
PostureComponent = Comp;
GazeComponent = Comp;
}
}
}
}
}
// Cache posture data from the component (game thread safe copy).
// Cache gaze data from the component (game thread safe copy).
// IMPORTANT: Do NOT reset CachedHeadRotation to Identity when the
// component is momentarily invalid (GC pause, re-registration, etc.).
// Resetting would cause a 1-2 frame flash where posture is skipped
// Resetting would cause a 1-2 frame flash where gaze is skipped
// and the head snaps to the raw animation pose then back.
// Instead, keep the last valid cached values as a hold-over.
if (PostureComponent.IsValid())
if (GazeComponent.IsValid())
{
PostureComponent->FillCurrentEyeCurves(CachedEyeCurves);
CachedHeadRotation = PostureComponent->GetCurrentHeadRotation();
CachedHeadCompensation = PostureComponent->GetHeadAnimationCompensation();
CachedEyeCompensation = PostureComponent->GetEyeAnimationCompensation();
CachedBodyDriftCompensation = PostureComponent->GetBodyDriftCompensation();
GazeComponent->FillCurrentEyeCurves(CachedEyeCurves);
CachedHeadRotation = GazeComponent->GetCurrentHeadRotation();
CachedHeadCompensation = GazeComponent->GetHeadAnimationCompensation();
CachedEyeCompensation = GazeComponent->GetEyeAnimationCompensation();
CachedBodyDriftCompensation = GazeComponent->GetBodyDriftCompensation();
}
}
@ -374,7 +374,7 @@ static FQuat ComputeCompensatedBoneRot(
return (CompensatedContrib * RefPoseRot).GetNormalized();
}
void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
void FAnimNode_PS_AI_ConvAgent_Gaze::Evaluate_AnyThread(FPoseContext& Output)
{
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
@ -389,13 +389,13 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
const bool bHasEyeBones = (LeftEyeBoneIndex.GetInt() != INDEX_NONE);
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
TEXT("[%s] Posture Evaluate: HeadComp=%.2f EyeComp=%.2f DriftComp=%.2f Valid=%s HeadRot=(%s) Eyes=%d Chain=%d Ancestors=%d"),
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT("[%s] Gaze Evaluate: HeadComp=%.2f EyeComp=%.2f DriftComp=%.2f Valid=%s HeadRot=(%s) Eyes=%d Chain=%d Ancestors=%d"),
NodeRole,
CachedHeadCompensation,
CachedEyeCompensation,
CachedBodyDriftCompensation,
PostureComponent.IsValid() ? TEXT("YES") : TEXT("NO"),
GazeComponent.IsValid() ? TEXT("YES") : TEXT("NO"),
*CachedHeadRotation.Rotator().ToCompactString(),
CachedEyeCurves.Num(),
ChainBoneIndices.Num(),
@ -411,7 +411,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
const FQuat Delta = AnimRot * RefRot.Inverse();
const FRotator DeltaRot = Delta.Rotator();
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT(" Chain[0] '%s' AnimDelta from RefPose: Y=%.2f P=%.2f R=%.2f (this gets removed at Comp=1)"),
*ChainBoneNames[0].ToString(),
DeltaRot.Yaw, DeltaRot.Pitch, DeltaRot.Roll);
@ -447,7 +447,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
}
if (++EyeDiagLogCounter % 300 == 1)
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT("[EYE DIAG MODE 1] Forcing CTRL_expressions_eyeLookUpL=1.0 | Left eye should look UP if Control Rig reads CTRL curves"));
}
@ -468,7 +468,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
}
if (++EyeDiagLogCounter % 300 == 1)
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT("[EYE DIAG MODE 2] Forcing ARKit eyeLookUpLeft=1.0 | Left eye should look UP if mh_arkit_mapping_pose drives eyes"));
}
@ -491,7 +491,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
}
if (++EyeDiagLogCounter % 300 == 1)
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT("[EYE DIAG MODE 3] Forcing FACIAL_L_Eye bone -25° pitch | Left eye should look UP if bone rotation drives eyes"));
}
#endif
@ -500,18 +500,18 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
#else // ELEVENLABS_EYE_DIAGNOSTIC == 0 — PRODUCTION
// ── Production eye gaze injection ────────────────────────────────
//
// Smooth blend between animation and posture eye direction using
// Smooth blend between animation and gaze eye direction using
// AnimationCompensation (Comp):
//
// Comp=0.0 → 100% animation (pure passthrough, nothing touched)
// Comp=0.5 → 50% animation + 50% posture
// Comp=1.0 → 100% posture (eyes frozen on target)
// Comp=0.5 → 50% animation + 50% gaze
// Comp=1.0 → 100% gaze (eyes frozen on target)
//
// How it works:
// (a) Eye bone compensation: Slerp FACIAL_L/R_Eye bone rotation
// toward ref pose proportional to Comp. (non-Control-Rig path)
// (b) CTRL curve blend: Read animation's CTRL_expressions_eyeLook*
// via Curve.Get(), Lerp with posture value, Curve.Set() result.
// via Curve.Get(), Lerp with gaze value, Curve.Set() result.
// (c) Zero ARKit eye curves to prevent mh_arkit_mapping_pose from
// overwriting the blended CTRL values.
//
@ -542,7 +542,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
Comp).GetNormalized());
}
// (b) Blend CTRL eye curves: read animation's value, lerp with posture
// (b) Blend CTRL eye curves: read animation's value, lerp with gaze
{
const auto& CTRLMap = GetARKitToCTRLEyeMap();
for (const auto& Pair : CachedEyeCurves)
@ -551,9 +551,9 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
{
// Read the animation's current CTRL value (0.0 if not set)
const float AnimValue = Output.Curve.Get(*CTRLName);
const float PostureValue = Pair.Value;
// Comp=0 → AnimValue, Comp=1 → PostureValue
const float BlendedValue = FMath::Lerp(AnimValue, PostureValue, Comp);
const float GazeValue = Pair.Value;
// Comp=0 → AnimValue, Comp=1 → GazeValue
const float BlendedValue = FMath::Lerp(AnimValue, GazeValue, Comp);
Output.Curve.Set(*CTRLName, BlendedValue);
}
}
@ -573,8 +573,8 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
#if !UE_BUILD_SHIPPING
if (EvalDebugFrameCounter % 300 == 1 && CachedEyeCurves.Num() > 0)
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
TEXT(" Eyes: Comp=%.2f → %s (anim weight=%.0f%%, posture weight=%.0f%%)"),
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT(" Eyes: Comp=%.2f → %s (anim weight=%.0f%%, gaze weight=%.0f%%)"),
Comp,
Comp > 0.001f ? TEXT("BLEND") : TEXT("PASSTHROUGH"),
(1.0f - Comp) * 100.0f,
@ -591,14 +591,14 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
return;
}
// IMPORTANT: Even when posture is near-zero (head looking straight at target),
// IMPORTANT: Even when gaze is near-zero (head looking straight at target),
// we still need to run compensation to REMOVE the animation's head contribution.
// Only skip if BOTH posture is identity AND compensation is inactive (pure additive).
// Only skip if BOTH gaze is identity AND compensation is inactive (pure additive).
// Bug fix: the old check `CachedHeadRotation.Equals(Identity)` would early-return
// even at Comp=1.0, letting the animation's head movement play through unchecked.
const bool bHasPosture = !CachedHeadRotation.Equals(FQuat::Identity, 0.001f);
const bool bHasGaze = !CachedHeadRotation.Equals(FQuat::Identity, 0.001f);
const bool bHasCompensation = CachedHeadCompensation > 0.001f;
if (!bHasPosture && !bHasCompensation)
if (!bHasGaze && !bHasCompensation)
{
return;
}
@ -630,7 +630,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
if (++DiagLogCounter % 90 == 0)
{
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT("DIAG Phase %d: %s | Timer=%.1f"), Phase, PhaseName, DiagTimer);
}
}
@ -640,15 +640,15 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
// ── Body drift compensation ─────────────────────────────────────────
//
// When the animation bends the torso (bow, lean, etc.), all bones above
// the spine shift in world space. The posture rotation (CachedHeadRotation)
// the spine shift in world space. The gaze rotation (CachedHeadRotation)
// was computed relative to the character standing upright, so the head
// drifts away from the target.
//
// Fix: measure how much the ancestor bones (root→parent-of-chain) have
// rotated compared to their ref pose, and counter-rotate the posture.
// rotated compared to their ref pose, and counter-rotate the gaze.
//
// BodyDrift = AnimAccum * RefAccum⁻¹
// AdjustedPosture = BodyDrift⁻¹ * Posture
// AdjustedGaze = BodyDrift⁻¹ * Gaze
//
// The BodyDriftCompensation parameter (0→1) controls how much of the
// drift is cancelled. At 0 the head follows body movement naturally;
@ -662,7 +662,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
// pitch/roll components don't get stripped by the tilt decomposition.
//
// DriftCorrection = AnimAccum⁻¹ * RefAccum
// BoneWorld = AnimAccum * DriftCorrection * CleanPosture = RefAccum * CleanPosture ✓
// BoneWorld = AnimAccum * DriftCorrection * CleanGaze = RefAccum * CleanGaze ✓
FQuat DriftCorrection = FQuat::Identity;
@ -692,7 +692,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
if (EvalDebugFrameCounter % 300 == 1)
{
const FRotator CorrRot = FullCorrection.Rotator();
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
UE_LOG(LogPS_AI_ConvAgent_GazeAnimNode, Verbose,
TEXT(" DriftCorrection: Y=%.1f P=%.1f R=%.1f | Comp=%.2f"),
CorrRot.Yaw, CorrRot.Pitch, CorrRot.Roll,
CachedBodyDriftCompensation);
@ -705,7 +705,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
{
// ── Multi-bone neck chain: per-bone swing-twist ──────────────────
//
// Posture (CachedHeadRotation) is distributed fractionally across bones.
// Gaze (CachedHeadRotation) is distributed fractionally across bones.
// Swing-twist removes parasitic roll per bone.
// Drift correction is applied AFTER swing-twist on the FIRST bone only,
// so it doesn't get stripped and doesn't compound through the chain.
@ -726,7 +726,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
const FQuat CompensatedRot = ComputeCompensatedBoneRot(
AnimBoneRot, ChainRefPoseRotations[i], CachedHeadCompensation);
// Fractional posture rotation (NO drift — drift applied separately)
// Fractional gaze rotation (NO drift — drift applied separately)
const FQuat SafeHeadRot = EnforceShortestPath(CachedHeadRotation, FQuat::Identity);
const FQuat FractionalRot = FQuat::Slerp(
FQuat::Identity, SafeHeadRot, ChainBoneWeights[i]);
@ -763,7 +763,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
const FQuat CompensatedRot = ComputeCompensatedBoneRot(
AnimBoneRot, HeadRefPoseRotation, CachedHeadCompensation);
// Apply posture (NO drift)
// Apply gaze (NO drift)
const FQuat SafeHeadRot = EnforceShortestPath(CachedHeadRotation, CompensatedRot);
const FQuat Combined = SafeHeadRot * CompensatedRot;
@ -781,11 +781,11 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
#endif
}
void FAnimNode_PS_AI_ConvAgent_Posture::GatherDebugData(FNodeDebugData& DebugData)
void FAnimNode_PS_AI_ConvAgent_Gaze::GatherDebugData(FNodeDebugData& DebugData)
{
const FRotator DebugRot = CachedHeadRotation.Rotator();
FString DebugLine = FString::Printf(
TEXT("PS AI ConvAgent Posture (eyes: %d, head: Y=%.1f P=%.1f, chain: %d, headComp: %.1f, eyeComp: %.1f, driftComp: %.1f)"),
TEXT("PS AI ConvAgent Gaze (eyes: %d, head: Y=%.1f P=%.1f, chain: %d, headComp: %.1f, eyeComp: %.1f, driftComp: %.1f)"),
CachedEyeCurves.Num(),
DebugRot.Yaw, DebugRot.Pitch,
ChainBoneIndices.Num(),

View File

@ -20,6 +20,8 @@ void FAnimNode_PS_AI_ConvAgent_LipSync::Initialize_AnyThread(const FAnimationIni
// This runs during initialization (game thread) so actor access is safe.
LipSyncComponent.Reset();
CachedCurves.Reset();
KnownCurveNames.Reset();
FramesSinceLastActive = 0;
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
@ -85,16 +87,60 @@ void FAnimNode_PS_AI_ConvAgent_LipSync::Evaluate_AnyThread(FPoseContext& Output)
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
// Inject lip sync curves into the pose output.
// Skip near-zero values so that the upstream Facial Expression node's
// emotion curves (eyes, brows, mouth mood) pass through during silence.
// During speech, active lip sync curves override emotion's mouth curves.
// ── Inject lip sync curves into the pose output ──────────────────────
//
// IMPORTANT: Always write ALL curves that lip sync has ever touched,
// including at 0.0. If we skip near-zero curves, upstream animation
// values (idle expressions, breathing, etc.) leak through and cause
// visible pops when lip sync curves cross the threshold.
//
// Strategy:
// - While lip sync is producing curves: write them all (including 0s)
// and track every curve name in KnownCurveNames.
// - After lip sync goes silent: keep writing 0s for a grace period
// (30 frames ≈ 0.5s) so the upstream can blend back in smoothly
// via the component's activation alpha, then release.
if (CachedCurves.Num() > 0)
{
FramesSinceLastActive = 0;
// Register all curve names and write their values
for (const auto& Pair : CachedCurves)
{
if (FMath::Abs(Pair.Value) > 0.01f)
{
KnownCurveNames.Add(Pair.Key);
Output.Curve.Set(Pair.Key, Pair.Value);
}
// Zero any known curves NOT in the current frame
// (e.g. a blendshape that was active last frame but decayed away)
for (const FName& Name : KnownCurveNames)
{
if (!CachedCurves.Contains(Name))
{
Output.Curve.Set(Name, 0.0f);
}
}
}
else if (KnownCurveNames.Num() > 0)
{
// Lip sync went silent — keep zeroing known curves for a grace
// period so upstream values don't pop in abruptly.
++FramesSinceLastActive;
if (FramesSinceLastActive < 30)
{
for (const FName& Name : KnownCurveNames)
{
Output.Curve.Set(Name, 0.0f);
}
}
else
{
// Grace period over — release curves, let upstream through
KnownCurveNames.Reset();
FramesSinceLastActive = 0;
}
}
}

View File

@ -0,0 +1,456 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_BodyExpressionComponent.h"
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "PS_AI_ConvAgent_BodyPoseMap.h"
#include "Animation/AnimSequence.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ConvAgent_BodyExpr, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// Construction
// ─────────────────────────────────────────────────────────────────────────────
UPS_AI_ConvAgent_BodyExpressionComponent::UPS_AI_ConvAgent_BodyExpressionComponent()
{
PrimaryComponentTick.bCanEverTick = true;
PrimaryComponentTick.TickGroup = TG_PrePhysics;
}
// ─────────────────────────────────────────────────────────────────────────────
// BeginPlay / EndPlay
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_BodyExpressionComponent::BeginPlay()
{
Super::BeginPlay();
AActor* Owner = GetOwner();
if (!Owner)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Warning, TEXT("No owner actor — body expressions disabled."));
return;
}
// Find and bind to agent component
auto* Agent = Owner->FindComponentByClass<UPS_AI_ConvAgent_ElevenLabsComponent>();
if (Agent)
{
AgentComponent = Agent;
Agent->OnAgentEmotionChanged.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnEmotionChanged);
Agent->OnAgentConnected.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationConnected);
Agent->OnAgentDisconnected.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationDisconnected);
Agent->OnAgentStartedSpeaking.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStarted);
Agent->OnAgentStoppedSpeaking.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStopped);
Agent->OnAgentInterrupted.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnInterrupted);
// Start inactive — activate on conversation connect
bActive = false;
bIsSpeaking = false;
CurrentActiveAlpha = 0.0f;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Body expression bound to agent on %s. Waiting for conversation."),
*Owner->GetName());
}
}
else
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Warning,
TEXT("No PS_AI_ConvAgent_ElevenLabsComponent found on %s — "
"body expressions will not respond to speech/emotion."),
*Owner->GetName());
}
if (bDebug && BodyPoseMap)
{
int32 AnimCount = 0;
for (const auto& Pair : BodyPoseMap->BodyPoses)
{
const FPS_AI_ConvAgent_BodyAnimList& AnimList = Pair.Value;
AnimCount += AnimList.Idle.Num() + AnimList.Normal.Num()
+ AnimList.Medium.Num() + AnimList.Extreme.Num();
}
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("=== Body poses: %d emotions, %d total anims ==="),
BodyPoseMap->BodyPoses.Num(), AnimCount);
}
}
void UPS_AI_ConvAgent_BodyExpressionComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
if (AgentComponent.IsValid())
{
AgentComponent->OnAgentEmotionChanged.RemoveDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnEmotionChanged);
AgentComponent->OnAgentConnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationConnected);
AgentComponent->OnAgentDisconnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationDisconnected);
AgentComponent->OnAgentStartedSpeaking.RemoveDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStarted);
AgentComponent->OnAgentStoppedSpeaking.RemoveDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStopped);
AgentComponent->OnAgentInterrupted.RemoveDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnInterrupted);
}
Super::EndPlay(EndPlayReason);
}
// ─────────────────────────────────────────────────────────────────────────────
// Animation selection helpers
// ─────────────────────────────────────────────────────────────────────────────
const TArray<TObjectPtr<UAnimSequence>>* UPS_AI_ConvAgent_BodyExpressionComponent::FindAnimList(
EPS_AI_ConvAgent_Emotion Emotion,
EPS_AI_ConvAgent_EmotionIntensity Intensity,
bool bSpeaking) const
{
if (!BodyPoseMap) return nullptr;
const FPS_AI_ConvAgent_BodyAnimList* AnimList = BodyPoseMap->BodyPoses.Find(Emotion);
if (!AnimList) return nullptr;
if (!bSpeaking)
{
// Idle state — return Idle list if non-empty
return AnimList->Idle.Num() > 0 ? &AnimList->Idle : nullptr;
}
// Speaking state — try requested intensity, then fallback
const TArray<TObjectPtr<UAnimSequence>>* Result = nullptr;
switch (Intensity)
{
case EPS_AI_ConvAgent_EmotionIntensity::Low: Result = &AnimList->Normal; break;
case EPS_AI_ConvAgent_EmotionIntensity::Medium: Result = &AnimList->Medium; break;
case EPS_AI_ConvAgent_EmotionIntensity::High: Result = &AnimList->Extreme; break;
}
if (Result && Result->Num() > 0) return Result;
// Fallback: Medium → Normal → Extreme
if (AnimList->Medium.Num() > 0) return &AnimList->Medium;
if (AnimList->Normal.Num() > 0) return &AnimList->Normal;
if (AnimList->Extreme.Num() > 0) return &AnimList->Extreme;
// All speaking lists empty — fall back to Idle
if (AnimList->Idle.Num() > 0) return &AnimList->Idle;
return nullptr;
}
const TArray<TObjectPtr<UAnimSequence>>* UPS_AI_ConvAgent_BodyExpressionComponent::GetCurrentAnimList() const
{
return FindAnimList(ActiveEmotion, ActiveEmotionIntensity, bIsSpeaking);
}
UAnimSequence* UPS_AI_ConvAgent_BodyExpressionComponent::PickRandomAnim(
const TArray<TObjectPtr<UAnimSequence>>& List) const
{
if (List.Num() == 0) return nullptr;
if (List.Num() == 1) return List[0];
// Pick random, avoiding the currently active anim if possible
int32 Attempts = 0;
int32 Idx;
do
{
Idx = FMath::RandRange(0, List.Num() - 1);
++Attempts;
}
while (List[Idx] == ActiveAnim && Attempts < 4);
return List[Idx];
}
void UPS_AI_ConvAgent_BodyExpressionComponent::SwitchToNewAnim(UAnimSequence* NewAnim, bool bForce)
{
if (!bForce && NewAnim == ActiveAnim) return;
if (!NewAnim) return;
// Current active becomes previous for crossfade
PrevAnim = ActiveAnim;
PrevPlaybackTime = ActivePlaybackTime;
// New anim starts from the beginning
ActiveAnim = NewAnim;
ActivePlaybackTime = 0.0f;
// Begin crossfade
CrossfadeAlpha = 0.0f;
if (bDebug && DebugVerbosity >= 1)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Body anim switch: %s → %s (%s, %s)"),
PrevAnim ? *PrevAnim->GetName() : TEXT("(none)"),
*NewAnim->GetName(),
bIsSpeaking ? TEXT("speaking") : TEXT("idle"),
*UEnum::GetValueAsString(ActiveEmotion));
}
}
void UPS_AI_ConvAgent_BodyExpressionComponent::PickAndSwitchAnim()
{
const TArray<TObjectPtr<UAnimSequence>>* List = GetCurrentAnimList();
if (!List || List->Num() == 0) return;
UAnimSequence* NewAnim = PickRandomAnim(*List);
if (NewAnim)
{
SwitchToNewAnim(NewAnim, true); // bForce=true: allow same anim restart for loop cycle
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Conversation connect / disconnect handlers
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationConnected(
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& Info)
{
bActive = true;
bIsSpeaking = false;
// Start with an idle anim
PickAndSwitchAnim();
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Conversation connected — body expression activating (idle)."));
}
}
void UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationDisconnected(
int32 StatusCode, const FString& Reason)
{
bActive = false;
bIsSpeaking = false;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Conversation disconnected (code=%d) — body expression deactivating."), StatusCode);
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Speaking / interruption handlers
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStarted()
{
bIsSpeaking = true;
// Crossfade from idle anim to a speaking anim
PickAndSwitchAnim();
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Agent started speaking — switching to speaking body anim."));
}
}
void UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStopped()
{
bIsSpeaking = false;
// Crossfade from speaking anim to an idle anim
PickAndSwitchAnim();
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Agent stopped speaking — switching to idle body anim."));
}
}
void UPS_AI_ConvAgent_BodyExpressionComponent::OnInterrupted()
{
bIsSpeaking = false;
// Crossfade to idle anim
PickAndSwitchAnim();
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Agent interrupted — switching to idle body anim."));
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Emotion change handler
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_BodyExpressionComponent::OnEmotionChanged(
EPS_AI_ConvAgent_Emotion Emotion, EPS_AI_ConvAgent_EmotionIntensity Intensity)
{
if (Emotion == ActiveEmotion && Intensity == ActiveEmotionIntensity)
return; // No change
ActiveEmotion = Emotion;
ActiveEmotionIntensity = Intensity;
// Pick a new anim from the appropriate list for the new emotion
PickAndSwitchAnim();
OnBodyExpressionChanged.Broadcast(Emotion, Intensity);
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Body emotion changed: %s (%s) — %s"),
*UEnum::GetValueAsString(Emotion), *UEnum::GetValueAsString(Intensity),
bIsSpeaking ? TEXT("speaking") : TEXT("idle"));
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Tick — advance playback, detect loop end, crossfade, update snapshot
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_BodyExpressionComponent::TickComponent(
float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
// ── Lazy binding: in packaged builds, BeginPlay may run before the
// ElevenLabsComponent is fully initialized. Retry discovery until bound.
if (!AgentComponent.IsValid())
{
if (AActor* Owner = GetOwner())
{
auto* Agent = Owner->FindComponentByClass<UPS_AI_ConvAgent_ElevenLabsComponent>();
if (Agent)
{
AgentComponent = Agent;
Agent->OnAgentEmotionChanged.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnEmotionChanged);
Agent->OnAgentConnected.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationConnected);
Agent->OnAgentDisconnected.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnConversationDisconnected);
Agent->OnAgentStartedSpeaking.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStarted);
Agent->OnAgentStoppedSpeaking.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnSpeakingStopped);
Agent->OnAgentInterrupted.AddDynamic(
this, &UPS_AI_ConvAgent_BodyExpressionComponent::OnInterrupted);
}
}
}
// ── Smooth activation blend ──────────────────────────────────────────
{
const float TargetAlpha = bActive ? 1.0f : 0.0f;
if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f))
{
const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f);
CurrentActiveAlpha = FMath::FInterpConstantTo(
CurrentActiveAlpha, TargetAlpha, DeltaTime, BlendSpeed);
}
else
{
CurrentActiveAlpha = TargetAlpha;
}
}
// Nothing to play — still update snapshot to zero alpha
if (!ActiveAnim && !PrevAnim)
{
FScopeLock Lock(&SnapshotLock);
CurrentSnapshot.ActiveAnim = nullptr;
CurrentSnapshot.PrevAnim = nullptr;
CurrentSnapshot.ActivationAlpha = CurrentActiveAlpha;
CurrentSnapshot.BlendWeight = BlendWeight;
return;
}
// ── Advance playback cursors ─────────────────────────────────────────
bool bLoopCompleted = false;
if (ActiveAnim)
{
const float Duration = ActiveAnim->GetPlayLength();
ActivePlaybackTime += DeltaTime;
if (Duration > 0.0f && ActivePlaybackTime >= Duration)
{
bLoopCompleted = true;
ActivePlaybackTime = FMath::Fmod(ActivePlaybackTime, Duration);
}
}
if (PrevAnim && CrossfadeAlpha < 1.0f)
{
PrevPlaybackTime += DeltaTime;
const float Duration = PrevAnim->GetPlayLength();
if (Duration > 0.0f)
{
PrevPlaybackTime = FMath::Fmod(PrevPlaybackTime, Duration);
}
}
// ── Auto-cycle: pick a new random anim when the current one finishes ─
if (bLoopCompleted && CrossfadeAlpha >= 1.0f)
{
const TArray<TObjectPtr<UAnimSequence>>* List = GetCurrentAnimList();
if (List && List->Num() > 1)
{
// Only cycle if there are multiple anims in the list
UAnimSequence* NewAnim = PickRandomAnim(*List);
if (NewAnim)
{
SwitchToNewAnim(NewAnim, true);
if (bDebug && DebugVerbosity >= 2)
{
UE_LOG(LogPS_AI_ConvAgent_BodyExpr, Log,
TEXT("Auto-cycle: loop complete → new anim %s"),
*NewAnim->GetName());
}
}
}
}
// ── Advance crossfade ───────────────────────────────────────────────────
if (CrossfadeAlpha < 1.0f)
{
const float BlendSpeed = 1.0f / FMath::Max(0.05f, EmotionBlendDuration);
CrossfadeAlpha = FMath::Min(1.0f, CrossfadeAlpha + DeltaTime * BlendSpeed);
// Crossfade complete — release previous anim
if (CrossfadeAlpha >= 1.0f)
{
PrevAnim = nullptr;
PrevPlaybackTime = 0.0f;
}
}
// ── Update snapshot under lock ──────────────────────────────────────────
{
FScopeLock Lock(&SnapshotLock);
CurrentSnapshot.ActiveAnim = ActiveAnim;
CurrentSnapshot.PrevAnim = PrevAnim;
CurrentSnapshot.ActiveTime = ActivePlaybackTime;
CurrentSnapshot.PrevTime = PrevPlaybackTime;
CurrentSnapshot.CrossfadeAlpha = CrossfadeAlpha;
CurrentSnapshot.ActivationAlpha = CurrentActiveAlpha;
CurrentSnapshot.BlendWeight = BlendWeight;
}
}

View File

@ -0,0 +1,3 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_BodyPoseMap.h"

View File

@ -3,7 +3,7 @@
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h"
#include "PS_AI_ConvAgent_MicrophoneCaptureComponent.h"
#include "PS_AI_ConvAgent_PostureComponent.h"
#include "PS_AI_ConvAgent_GazeComponent.h"
#include "PS_AI_ConvAgent_FacialExpressionComponent.h"
#include "PS_AI_ConvAgent_LipSyncComponent.h"
#include "PS_AI_ConvAgent_InteractionSubsystem.h"
@ -174,7 +174,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::TickComponent(float DeltaTime, ELevel
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Error,
TEXT("Reconnection failed after %d attempts — giving up."), MaxReconnectAttempts);
bNetIsConversing = false;
ApplyConversationPosture();
ApplyConversationGaze();
NetConversatingPlayer = nullptr;
NetConversatingPawn = nullptr;
OnAgentDisconnected.Broadcast(1006, TEXT("Reconnection failed"));
@ -260,7 +260,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation()
{
if (GetOwnerRole() == ROLE_Authority)
{
// Set conversation state (used by ApplyConversationPosture, posture, LOD, etc.).
// Set conversation state (used by ApplyConversationGaze, gaze, LOD, etc.).
// In standalone these aren't replicated but are still needed as local state flags.
APlayerController* PC = GetWorld() ? GetWorld()->GetFirstPlayerController() : nullptr;
bNetIsConversing = true;
@ -271,8 +271,8 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartConversation()
// Reuse the existing connection — only set up conversation state.
if (bPersistentSession && IsConnected())
{
// WebSocket already alive — just set up conversation state (posture, etc.).
ApplyConversationPosture();
// WebSocket already alive — just set up conversation state (gaze, etc.).
ApplyConversationGaze();
OnAgentConnected.Broadcast(WebSocketProxy->GetConversationInfo());
// Auto-start listening if configured (same as HandleConnected).
@ -373,7 +373,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::EndConversation()
// Reset replicated state so other players can talk to this NPC.
bNetIsConversing = false;
ApplyConversationPosture();
ApplyConversationGaze();
NetConversatingPlayer = nullptr;
NetConversatingPawn = nullptr;
}
@ -451,7 +451,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartListening()
// External mic mode: the InteractionComponent on the pawn manages the mic and
// feeds audio via FeedExternalAudio(). We only manage turn state here.
if (!bExternalMicManagement)
if (!ShouldUseExternalMic())
{
// Find the microphone component on our owner actor, or create one.
UPS_AI_ConvAgent_MicrophoneCaptureComponent* Mic =
@ -489,7 +489,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StartListening()
{
const double T = TurnStartTime - SessionStartTime;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log, TEXT("[T+%.2fs] [Turn %d] Mic opened%s — user speaking."),
T, TurnIndex, bExternalMicManagement ? TEXT(" (external)") : TEXT(""));
T, TurnIndex, ShouldUseExternalMic() ? TEXT(" (external)") : TEXT(""));
}
}
@ -499,7 +499,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::StopListening()
bIsListening = false;
// External mic mode: mic is managed by the InteractionComponent, not us.
if (!bExternalMicManagement)
if (!ShouldUseExternalMic())
{
if (UPS_AI_ConvAgent_MicrophoneCaptureComponent* Mic =
GetOwner() ? GetOwner()->FindComponentByClass<UPS_AI_ConvAgent_MicrophoneCaptureComponent>() : nullptr)
@ -611,14 +611,14 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::SendTextMessage(const FString& Text)
}
}
// Enable body tracking on the sibling PostureComponent (if present).
// Enable body tracking on the sibling GazeComponent (if present).
// Text input counts as conversation engagement, same as voice.
if (AActor* Owner = GetOwner())
{
if (UPS_AI_ConvAgent_PostureComponent* Posture =
Owner->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>())
if (UPS_AI_ConvAgent_GazeComponent* Gaze =
Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>())
{
Posture->bEnableBodyTracking = true;
Gaze->bEnableBodyTracking = true;
}
}
}
@ -869,7 +869,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleDisconnected(int32 StatusCode,
if (GetOwnerRole() == ROLE_Authority)
{
bNetIsConversing = false;
ApplyConversationPosture();
ApplyConversationGaze();
NetConversatingPlayer = nullptr;
NetConversatingPawn = nullptr;
}
@ -1015,10 +1015,10 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::HandleAgentResponseStarted()
// so the agent "notices" the player first and turns its body only when engaging.
if (AActor* OwnerActor = GetOwner())
{
if (UPS_AI_ConvAgent_PostureComponent* Posture =
OwnerActor->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>())
if (UPS_AI_ConvAgent_GazeComponent* Gaze =
OwnerActor->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>())
{
Posture->bEnableBodyTracking = true;
Gaze->bEnableBodyTracking = true;
}
}
@ -1572,30 +1572,30 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::OnRep_ConversationState()
if (Owner)
{
// Update posture target on all clients so the NPC head/eyes track the
// Update gaze target on all clients so the NPC head/eyes track the
// conversating player. TargetActor is normally set by InteractionComponent
// on the local pawn, but remote clients never run that code path.
if (UPS_AI_ConvAgent_PostureComponent* Posture = Owner->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>())
if (UPS_AI_ConvAgent_GazeComponent* Gaze = Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>())
{
// Use NetConversatingPawn (replicated to ALL clients) instead of
// NetConversatingPlayer->GetPawn() — PlayerControllers are only
// replicated to their owning client (bOnlyRelevantToOwner=true).
if (bNetIsConversing && NetConversatingPawn)
{
Posture->bActive = true;
Posture->TargetActor = NetConversatingPawn;
Posture->ResetBodyTarget();
Posture->bEnableBodyTracking = true;
Gaze->bActive = true;
Gaze->TargetActor = NetConversatingPawn;
Gaze->ResetBodyTarget();
Gaze->bEnableBodyTracking = true;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Log,
TEXT("[NET-REP] Posture ACTIVATED, TargetActor set to %s"), *NetConversatingPawn->GetName());
TEXT("[NET-REP] Gaze ACTIVATED, TargetActor set to %s"), *NetConversatingPawn->GetName());
}
else
{
Posture->bActive = false;
Posture->TargetActor = nullptr;
Posture->bEnableBodyTracking = false;
Gaze->bActive = false;
Gaze->TargetActor = nullptr;
Gaze->bEnableBodyTracking = false;
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[NET-REP] Posture TargetActor cleared — bNetIsConversing=%s Pawn=%s"),
TEXT("[NET-REP] Gaze TargetActor cleared — bNetIsConversing=%s Pawn=%s"),
bNetIsConversing ? TEXT("true") : TEXT("false"),
NetConversatingPawn ? TEXT("valid") : TEXT("NULL"));
}
@ -1603,7 +1603,7 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::OnRep_ConversationState()
else
{
UE_LOG(LogPS_AI_ConvAgent_ElevenLabs, Warning,
TEXT("[NET-REP] PostureComponent NOT FOUND on %s"), *Owner->GetName());
TEXT("[NET-REP] GazeComponent NOT FOUND on %s"), *Owner->GetName());
}
// Activate/deactivate facial expressions and lip sync for remote clients.
@ -1664,8 +1664,8 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::ServerRequestConversation_Implementat
NetConversatingPlayer = RequestingPlayer;
NetConversatingPawn = RequestingPlayer ? RequestingPlayer->GetPawn() : nullptr;
// Update NPC posture on the server (OnRep never fires on Authority).
ApplyConversationPosture();
// Update NPC gaze on the server (OnRep never fires on Authority).
ApplyConversationGaze();
// In persistent mode the WebSocket is already open — skip reconnection.
if (bPersistentSession && IsConnected())
@ -1715,10 +1715,10 @@ void UPS_AI_ConvAgent_ElevenLabsComponent::ServerReleaseConversation_Implementat
}
}
// Clear posture before nullifying the pawn pointer (ApplyConversationPosture
// Clear gaze before nullifying the pawn pointer (ApplyConversationGaze
// uses NetConversatingPawn to guard against clearing someone else's target).
bNetIsConversing = false;
ApplyConversationPosture();
ApplyConversationGaze();
NetConversatingPlayer = nullptr;
NetConversatingPawn = nullptr;
}
@ -1955,32 +1955,56 @@ UPS_AI_ConvAgent_InteractionComponent* UPS_AI_ConvAgent_ElevenLabsComponent::Fin
return nullptr;
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ApplyConversationPosture()
bool UPS_AI_ConvAgent_ElevenLabsComponent::ShouldUseExternalMic() const
{
// Same logic as OnRep_ConversationState's posture section, but callable
// Network client: audio arrives via relay RPCs from InteractionComponent
if (GetOwnerRole() != ROLE_Authority) return true;
// Authority with a remote player: audio arrives via ServerSendMicAudio RPC
if (NetConversatingPlayer && !NetConversatingPlayer->IsLocalController()) return true;
// InteractionComponent on local player's pawn: it manages mic + routes audio
if (UWorld* World = GetWorld())
{
if (APlayerController* PC = World->GetFirstPlayerController())
{
if (APawn* Pawn = PC->GetPawn())
{
if (Pawn->FindComponentByClass<UPS_AI_ConvAgent_InteractionComponent>())
return true;
}
}
}
return false;
}
void UPS_AI_ConvAgent_ElevenLabsComponent::ApplyConversationGaze()
{
// Same logic as OnRep_ConversationState's gaze section, but callable
// from the server side where OnRep never fires (Authority).
AActor* Owner = GetOwner();
if (!Owner) return;
auto* Posture = Owner->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>();
if (!Posture) return;
auto* Gaze = Owner->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>();
if (!Gaze) return;
if (bNetIsConversing && NetConversatingPawn)
{
Posture->bActive = true;
Posture->TargetActor = NetConversatingPawn;
Posture->ResetBodyTarget();
Posture->bEnableBodyTracking = true;
Gaze->bActive = true;
Gaze->TargetActor = NetConversatingPawn;
Gaze->ResetBodyTarget();
Gaze->bEnableBodyTracking = true;
}
else
{
// Only clear if the posture is still pointing at the departing player.
// Only clear if the gaze is still pointing at the departing player.
// Another InteractionComponent may have already set a new TargetActor.
if (!Posture->TargetActor || Posture->TargetActor == NetConversatingPawn)
if (!Gaze->TargetActor || Gaze->TargetActor == NetConversatingPawn)
{
Posture->bActive = false;
Posture->TargetActor = nullptr;
Posture->bEnableBodyTracking = false;
Gaze->bActive = false;
Gaze->TargetActor = nullptr;
Gaze->bEnableBodyTracking = false;
}
}
}

View File

@ -179,10 +179,9 @@ TMap<FName, float> UPS_AI_ConvAgent_FacialExpressionComponent::EvaluateAnimCurve
BlendedCurve.ForEachElement([&CurveValues](const UE::Anim::FCurveElement& Element)
{
if (FMath::Abs(Element.Value) > 0.001f)
{
// Include all curves even at 0 — the AnimNode needs to see them
// to block upstream values from popping through.
CurveValues.Add(Element.Name, Element.Value);
}
});
}
@ -365,12 +364,11 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::TickComponent(
? ActiveCurves[CurveName] : 0.0f;
const float Blended = FMath::Lerp(PrevVal, ActiveVal, CrossfadeAlpha);
if (FMath::Abs(Blended) > 0.001f)
{
// Always include the curve even at 0 — the AnimNode needs
// to see it to block upstream values from popping through.
NewCurves.Add(CurveName, Blended);
}
}
}
// ── Apply activation alpha to output curves ──────────────────────────
if (CurrentActiveAlpha < 0.001f)

View File

@ -1,13 +1,13 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_PostureComponent.h"
#include "PS_AI_ConvAgent_GazeComponent.h"
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "GameFramework/Actor.h"
#include "Math/UnrealMathUtility.h"
#include "DrawDebugHelpers.h"
DEFINE_LOG_CATEGORY(LogPS_AI_ConvAgent_Posture);
DEFINE_LOG_CATEGORY(LogPS_AI_ConvAgent_Gaze);
// ── ARKit eye curve names ────────────────────────────────────────────────────
static const FName EyeLookUpLeft(TEXT("eyeLookUpLeft"));
@ -27,11 +27,68 @@ static const FName EyeLookOutRight(TEXT("eyeLookOutRight"));
static constexpr float ARKitEyeRangeHorizontal = 40.0f;
static constexpr float ARKitEyeRangeVertical = 35.0f;
// ── Target eye bone names (MetaHuman convention) ─────────────────────────────
static const FName TargetEyeBoneL(TEXT("FACIAL_L_Eye"));
static const FName TargetEyeBoneR(TEXT("FACIAL_R_Eye"));
static const FName TargetHeadBone(TEXT("head"));
/**
* Resolve the world-space target position on the TargetActor.
*
* bAutoTargetEyes = true:
* 1. Try eye bones (FACIAL_L_Eye / FACIAL_R_Eye midpoint) on the target's Face mesh.
* 2. Fallback to "head" bone on any skeletal mesh.
* 3. Fallback to TargetActor origin + (0, 0, FallbackEyeHeight).
*
* bAutoTargetEyes = false:
* Always returns TargetActor origin + TargetOffset.
*/
static FVector ResolveTargetPosition(const AActor* Target, bool bAutoEyes,
float FallbackHeight, const FVector& ManualOffset)
{
if (!Target) return FVector::ZeroVector;
if (bAutoEyes)
{
// Try to find a Face mesh with eye bones on the target
TArray<USkeletalMeshComponent*> SkelMeshes;
const_cast<AActor*>(Target)->GetComponents<USkeletalMeshComponent>(SkelMeshes);
for (const USkeletalMeshComponent* SMC : SkelMeshes)
{
if (!SMC) continue;
if (SMC->DoesSocketExist(TargetEyeBoneL) && SMC->DoesSocketExist(TargetEyeBoneR))
{
// Midpoint between both eye bones
return (SMC->GetSocketLocation(TargetEyeBoneL)
+ SMC->GetSocketLocation(TargetEyeBoneR)) * 0.5f;
}
}
// Fallback: head bone on any mesh
for (const USkeletalMeshComponent* SMC : SkelMeshes)
{
if (!SMC) continue;
if (SMC->DoesSocketExist(TargetHeadBone))
{
return SMC->GetSocketLocation(TargetHeadBone);
}
}
// No skeleton — use FallbackEyeHeight
return Target->GetActorLocation() + FVector(0.0f, 0.0f, FallbackHeight);
}
// Manual mode: actor origin + user-defined offset
return Target->GetActorLocation() + ManualOffset;
}
// ─────────────────────────────────────────────────────────────────────────────
// Construction
// ─────────────────────────────────────────────────────────────────────────────
UPS_AI_ConvAgent_PostureComponent::UPS_AI_ConvAgent_PostureComponent()
UPS_AI_ConvAgent_GazeComponent::UPS_AI_ConvAgent_GazeComponent()
{
PrimaryComponentTick.bCanEverTick = true;
PrimaryComponentTick.TickGroup = TG_PrePhysics;
@ -47,14 +104,14 @@ UPS_AI_ConvAgent_PostureComponent::UPS_AI_ConvAgent_PostureComponent()
// BeginPlay
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
void UPS_AI_ConvAgent_GazeComponent::BeginPlay()
{
Super::BeginPlay();
AActor* Owner = GetOwner();
if (!Owner)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Warning, TEXT("No owner actor — posture disabled."));
UE_LOG(LogPS_AI_ConvAgent_Gaze, Warning, TEXT("No owner actor — gaze disabled."));
return;
}
@ -83,13 +140,13 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
}
if (!CachedMesh.IsValid())
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Warning,
UE_LOG(LogPS_AI_ConvAgent_Gaze, Warning,
TEXT("No SkeletalMeshComponent found on %s — head bone lookup will be unavailable."),
*Owner->GetName());
}
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
UE_LOG(LogPS_AI_ConvAgent_Gaze, Log,
TEXT("Mesh cache: Body=%s Face=%s"),
CachedMesh.IsValid() ? *CachedMesh->GetName() : TEXT("NONE"),
CachedFaceMesh.IsValid() ? *CachedFaceMesh->GetName() : TEXT("NONE"));
@ -103,8 +160,8 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
TEXT("Posture initialized on %s. MeshOffset=%.0f OriginalYaw=%.0f MaxEye=%.0f/%.0f MaxHead=%.0f/%.0f"),
UE_LOG(LogPS_AI_ConvAgent_Gaze, Log,
TEXT("Gaze initialized on %s. MeshOffset=%.0f OriginalYaw=%.0f MaxEye=%.0f/%.0f MaxHead=%.0f/%.0f"),
*Owner->GetName(), MeshForwardYawOffset, OriginalActorYaw,
MaxEyeHorizontal, MaxEyeVertical, MaxHeadYaw, MaxHeadPitch);
}
@ -116,9 +173,9 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
{
AgentComponent = Agent;
Agent->OnAgentConnected.AddDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationConnected);
this, &UPS_AI_ConvAgent_GazeComponent::OnConversationConnected);
Agent->OnAgentDisconnected.AddDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected);
this, &UPS_AI_ConvAgent_GazeComponent::OnConversationDisconnected);
// Start inactive — will activate when conversation connects.
bActive = false;
@ -126,7 +183,7 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
UE_LOG(LogPS_AI_ConvAgent_Gaze, Log,
TEXT("Auto-activation bound to agent on %s. Waiting for conversation."),
*Owner->GetName());
}
@ -137,14 +194,14 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
// EndPlay
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
void UPS_AI_ConvAgent_GazeComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
if (AgentComponent.IsValid())
{
AgentComponent->OnAgentConnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationConnected);
this, &UPS_AI_ConvAgent_GazeComponent::OnConversationConnected);
AgentComponent->OnAgentDisconnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected);
this, &UPS_AI_ConvAgent_GazeComponent::OnConversationDisconnected);
}
Super::EndPlay(EndPlayReason);
@ -154,28 +211,28 @@ void UPS_AI_ConvAgent_PostureComponent::EndPlay(const EEndPlayReason::Type EndPl
// Auto-activation handlers
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::OnConversationConnected(
void UPS_AI_ConvAgent_GazeComponent::OnConversationConnected(
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo)
{
bActive = true;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log, TEXT("Conversation connected — posture activating."));
UE_LOG(LogPS_AI_ConvAgent_Gaze, Log, TEXT("Conversation connected — gaze activating."));
}
}
void UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected(
void UPS_AI_ConvAgent_GazeComponent::OnConversationDisconnected(
int32 StatusCode, const FString& Reason)
{
bActive = false;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
TEXT("Conversation disconnected (code=%d) — posture deactivating."), StatusCode);
UE_LOG(LogPS_AI_ConvAgent_Gaze, Log,
TEXT("Conversation disconnected (code=%d) — gaze deactivating."), StatusCode);
}
}
void UPS_AI_ConvAgent_PostureComponent::ResetBodyTarget()
void UPS_AI_ConvAgent_GazeComponent::ResetBodyTarget()
{
if (AActor* Owner = GetOwner())
{
@ -188,7 +245,7 @@ void UPS_AI_ConvAgent_PostureComponent::ResetBodyTarget()
// Map eye angles to 8 ARKit eye curves
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::UpdateEyeCurves(float EyeYaw, float EyePitch)
void UPS_AI_ConvAgent_GazeComponent::UpdateEyeCurves(float EyeYaw, float EyePitch)
{
CurrentEyeCurves.Reset();
@ -197,7 +254,7 @@ void UPS_AI_ConvAgent_PostureComponent::UpdateEyeCurves(float EyeYaw, float EyeP
// uses Output.Curve.Set() which overwrites. If we only emit the active
// directions (e.g. eyeLookUpLeft), the opposing curves (eyeLookDownLeft)
// from the emotion/base animation leak through uncleared.
// Emitting all 8 ensures posture fully controls eye direction at Comp=1.
// Emitting all 8 ensures gaze fully controls eye direction at Comp=1.
float LookOutL = 0.0f, LookInL = 0.0f, LookOutR = 0.0f, LookInR = 0.0f;
float LookUpL = 0.0f, LookDownL = 0.0f, LookUpR = 0.0f, LookDownR = 0.0f;
@ -245,7 +302,7 @@ void UPS_AI_ConvAgent_PostureComponent::UpdateEyeCurves(float EyeYaw, float EyeP
}
// ─────────────────────────────────────────────────────────────────────────────
// Tick — relative cascade 360° posture tracking
// Tick — relative cascade 360° gaze tracking
//
// Eyes always track first, relative to the current head direction.
// When eyes exceed MaxEyeHorizontal → head realigns to target (eyes ≈ 0°).
@ -258,7 +315,7 @@ void UPS_AI_ConvAgent_PostureComponent::UpdateEyeCurves(float EyeYaw, float EyeP
// point shifts so small movements don't re-trigger higher layers.
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::TickComponent(
void UPS_AI_ConvAgent_GazeComponent::TickComponent(
float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
@ -292,7 +349,7 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
{
// ── 1. Compute target position and eye origin ──────────────────────
const FVector TargetPos = TargetActor->GetActorLocation() + TargetOffset;
const FVector TargetPos = ResolveTargetPosition(TargetActor, bAutoTargetEyes, FallbackEyeHeight, TargetOffset);
// Eye origin = midpoint of FACIAL_L_Eye / FACIAL_R_Eye on the Face mesh
// (most accurate for pitch calculation). Falls back to head bone, then actor.
@ -472,7 +529,7 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
// ── 6. Output for AnimNode (thread-safe write) ────────────────────────
{
FScopeLock Lock(&PostureDataLock);
FScopeLock Lock(&GazeDataLock);
// MetaHuman head bone axis mapping:
// Z-axis rotation = nod up/down → our HeadPitch
@ -504,7 +561,7 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
? CachedFaceMesh.Get()
: (CachedMesh.IsValid() ? CachedMesh.Get() : nullptr);
const FVector TargetPos = TargetActor->GetActorLocation() + TargetOffset;
const FVector TargetPos = ResolveTargetPosition(TargetActor, bAutoTargetEyes, FallbackEyeHeight, TargetOffset);
if (EyeMesh
&& EyeMesh->DoesSocketExist(LeftEyeBone)
@ -560,13 +617,13 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
if (DebugFrameCounter % 120 == 0)
{
const float FacingYaw = SmoothedBodyYaw + MeshForwardYawOffset;
const FVector TP = TargetActor->GetActorLocation() + TargetOffset;
const FVector TP = ResolveTargetPosition(TargetActor, bAutoTargetEyes, FallbackEyeHeight, TargetOffset);
const FVector Dir = TP - Owner->GetActorLocation();
const float TgtYaw = FVector(Dir.X, Dir.Y, 0.0f).Rotation().Yaw;
const float Delta = FMath::FindDeltaAngleDegrees(FacingYaw, TgtYaw);
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
TEXT("Posture [%s -> %s]: Delta=%.1f | Head=%.1f/%.1f | Eyes=%.1f/%.1f | Body: enabled=%s TargetYaw=%.1f SmoothedYaw=%.1f (raw=%.1f)"),
UE_LOG(LogPS_AI_ConvAgent_Gaze, Log,
TEXT("Gaze [%s -> %s]: Delta=%.1f | Head=%.1f/%.1f | Eyes=%.1f/%.1f | Body: enabled=%s TargetYaw=%.1f SmoothedYaw=%.1f (raw=%.1f)"),
*Owner->GetName(), *TargetActor->GetName(),
Delta,
CurrentHeadYaw, CurrentHeadPitch,

View File

@ -4,7 +4,7 @@
#include "PS_AI_ConvAgent_InteractionSubsystem.h"
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "PS_AI_ConvAgent_MicrophoneCaptureComponent.h"
#include "PS_AI_ConvAgent_PostureComponent.h"
#include "PS_AI_ConvAgent_GazeComponent.h"
#include "GameFramework/Pawn.h"
#include "GameFramework/PlayerController.h"
@ -39,11 +39,11 @@ void UPS_AI_ConvAgent_InteractionComponent::BeginPlay()
void UPS_AI_ConvAgent_InteractionComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
// Cancel any pending posture timers.
// Cancel any pending gaze timers.
if (UWorld* World = GetWorld())
{
World->GetTimerManager().ClearTimer(PostureAttachTimerHandle);
World->GetTimerManager().ClearTimer(PostureDetachTimerHandle);
World->GetTimerManager().ClearTimer(GazeAttachTimerHandle);
World->GetTimerManager().ClearTimer(GazeDetachTimerHandle);
}
if (MicComponent)
@ -55,14 +55,14 @@ void UPS_AI_ConvAgent_InteractionComponent::EndPlay(const EEndPlayReason::Type E
// Fire deselection event for cleanup.
if (UPS_AI_ConvAgent_ElevenLabsComponent* Agent = SelectedAgent.Get())
{
// Stop listening and clear posture immediately on shutdown — no delay.
// Stop listening and clear gaze immediately on shutdown — no delay.
if (bAutoManageListening)
{
Agent->StopListening();
}
if (bAutoManagePosture)
if (bAutoManageGaze)
{
DetachPostureTarget(Agent);
DetachGazeTarget(Agent);
}
SelectedAgent.Reset();
@ -264,36 +264,36 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
// Disable body tracking on deselection — but only if we were the
// one who set the TargetActor. The conversation system (OnRep or
// server ApplyConversationPosture) may have set TargetActor to a
// server ApplyConversationGaze) may have set TargetActor to a
// different player; don't overwrite that.
if (bAutoManagePosture)
if (bAutoManageGaze)
{
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(OldAgent))
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(OldAgent))
{
if (Posture->TargetActor == GetOwner())
if (Gaze->TargetActor == GetOwner())
{
Posture->bEnableBodyTracking = false;
Gaze->bEnableBodyTracking = false;
}
}
}
// ── Posture: detach ──────────────────────────────────────────────
if (bAutoManagePosture && World)
// ── Gaze: detach ────────────────────────────────────────────────
if (bAutoManageGaze && World)
{
// Cancel any pending attach — agent left before attach fired.
World->GetTimerManager().ClearTimer(PostureAttachTimerHandle);
// Cancel any pending gaze attach — agent left before attach fired.
World->GetTimerManager().ClearTimer(GazeAttachTimerHandle);
if (PostureDetachDelay > 0.0f)
if (GazeDetachDelay > 0.0f)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakOld = OldAgent;
World->GetTimerManager().SetTimer(PostureDetachTimerHandle,
World->GetTimerManager().SetTimer(GazeDetachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::DetachPostureTarget, WeakOld),
PostureDetachDelay, false);
&UPS_AI_ConvAgent_InteractionComponent::DetachGazeTarget, WeakOld),
GazeDetachDelay, false);
}
else
{
DetachPostureTarget(OldAgent);
DetachGazeTarget(OldAgent);
}
}
@ -314,7 +314,7 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
// Network: auto-start conversation if no active conversation.
// In persistent session mode, the WebSocket stays connected but
// bNetIsConversing is false between interactions — we still need
// to call StartConversation() to re-activate posture and mic.
// to call StartConversation() to re-activate gaze and mic.
// Only when bAutoStartConversation is true — otherwise the user must
// call StartConversationWithSelectedAgent() explicitly (e.g. on key press).
if (bAutoStartConversation && !NewAgent->bNetIsConversing)
@ -338,24 +338,24 @@ void UPS_AI_ConvAgent_InteractionComponent::SetSelectedAgent(UPS_AI_ConvAgent_El
}
}
// ── Posture: attach (eyes+head only — body tracking is enabled later
// ── Gaze: attach (eyes+head only — body tracking is enabled later
// by ElevenLabsComponent when the agent starts responding) ──
if (bAutoManagePosture && World)
if (bAutoManageGaze && World)
{
// Cancel any pending detach — agent came back before detach fired.
World->GetTimerManager().ClearTimer(PostureDetachTimerHandle);
// Cancel any pending gaze detach — agent came back before detach fired.
World->GetTimerManager().ClearTimer(GazeDetachTimerHandle);
if (PostureAttachDelay > 0.0f)
if (GazeAttachDelay > 0.0f)
{
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> WeakNew = NewAgent;
World->GetTimerManager().SetTimer(PostureAttachTimerHandle,
World->GetTimerManager().SetTimer(GazeAttachTimerHandle,
FTimerDelegate::CreateUObject(this,
&UPS_AI_ConvAgent_InteractionComponent::AttachPostureTarget, WeakNew),
PostureAttachDelay, false);
&UPS_AI_ConvAgent_InteractionComponent::AttachGazeTarget, WeakNew),
GazeAttachDelay, false);
}
else
{
AttachPostureTarget(NewAgent);
AttachGazeTarget(NewAgent);
}
}
@ -477,33 +477,33 @@ void UPS_AI_ConvAgent_InteractionComponent::StartConversationWithSelectedAgent()
}
// ─────────────────────────────────────────────────────────────────────────────
// Posture helpers
// Gaze helpers
// ─────────────────────────────────────────────────────────────────────────────
UPS_AI_ConvAgent_PostureComponent* UPS_AI_ConvAgent_InteractionComponent::FindPostureOnAgent(
UPS_AI_ConvAgent_GazeComponent* UPS_AI_ConvAgent_InteractionComponent::FindGazeOnAgent(
UPS_AI_ConvAgent_ElevenLabsComponent* Agent)
{
if (!Agent) return nullptr;
AActor* AgentActor = Agent->GetOwner();
if (!AgentActor) return nullptr;
return AgentActor->FindComponentByClass<UPS_AI_ConvAgent_PostureComponent>();
return AgentActor->FindComponentByClass<UPS_AI_ConvAgent_GazeComponent>();
}
void UPS_AI_ConvAgent_InteractionComponent::AttachPostureTarget(
void UPS_AI_ConvAgent_InteractionComponent::AttachGazeTarget(
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent)
{
UPS_AI_ConvAgent_ElevenLabsComponent* AgentPtr = Agent.Get();
if (!AgentPtr) return;
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(AgentPtr))
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(AgentPtr))
{
Posture->TargetActor = GetOwner();
Posture->bActive = true;
Gaze->TargetActor = GetOwner();
Gaze->bActive = true;
// Reset the body target to the actor's current facing so body tracking
// starts fresh on re-entry. Without this, TargetBodyWorldYaw retains
// the stale value from the previous interaction and the body never moves
// (BodyDelta ≈ 0 because the actor is already at the old target yaw).
Posture->ResetBodyTarget();
Gaze->ResetBodyTarget();
// If the agent is already in an active conversation (re-entry),
// enable body tracking immediately — the conversation is already engaged,
@ -512,35 +512,35 @@ void UPS_AI_ConvAgent_InteractionComponent::AttachPostureTarget(
// eyes+head only and let HandleAgentResponseStarted enable body later.
// Network: on clients IsConnected() is always false (no local WebSocket),
// so also check the replicated bNetIsConversing flag.
Posture->bEnableBodyTracking = AgentPtr->IsConnected() || AgentPtr->bNetIsConversing;
Gaze->bEnableBodyTracking = AgentPtr->IsConnected() || AgentPtr->bNetIsConversing;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("Posture attached (eyes+head only): %s -> %s"),
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("Gaze attached (eyes+head only): %s -> %s"),
AgentPtr->GetOwner() ? *AgentPtr->GetOwner()->GetName() : TEXT("(null)"),
GetOwner() ? *GetOwner()->GetName() : TEXT("(null)"));
}
}
}
void UPS_AI_ConvAgent_InteractionComponent::DetachPostureTarget(
void UPS_AI_ConvAgent_InteractionComponent::DetachGazeTarget(
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent)
{
UPS_AI_ConvAgent_ElevenLabsComponent* AgentPtr = Agent.Get();
if (!AgentPtr) return;
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(AgentPtr))
if (UPS_AI_ConvAgent_GazeComponent* Gaze = FindGazeOnAgent(AgentPtr))
{
// Only clear if we are the one who set the TargetActor.
// The conversation system (OnRep / ApplyConversationPosture on server)
// The conversation system (OnRep / ApplyConversationGaze on server)
// may have set TargetActor to a different player — don't overwrite that.
if (Posture->TargetActor == GetOwner())
if (Gaze->TargetActor == GetOwner())
{
Posture->TargetActor = nullptr;
Gaze->TargetActor = nullptr;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("Posture detached: %s"),
UE_LOG(LogPS_AI_ConvAgent_Select, Log, TEXT("Gaze detached: %s"),
AgentPtr->GetOwner() ? *AgentPtr->GetOwner()->GetName() : TEXT("(null)"));
}
}

View File

@ -419,9 +419,30 @@ void UPS_AI_ConvAgent_LipSyncComponent::BeginPlay()
}
}
// Detect curve mode: MetaHuman Face mesh has 0 morph targets but 1000+ animation curves.
// In that case, use AddCurveValue (CTRL_expressions_*) instead of SetMorphTarget.
if (TargetMesh && TargetMesh->GetSkeletalMeshAsset())
// Extract curve data from phoneme pose AnimSequences (if assigned).
InitializePoseMappings();
// Detect curve mode.
//
// When a PoseMap is assigned, the AnimNode is the intended pipeline
// (it writes curves into the AnimGraph where they're properly evaluated
// alongside upstream animations). Calling SetMorphTarget() simultaneously
// causes double-application — the mesh deforms from BOTH the AnimGraph
// curves AND the direct morph target writes, producing visible glitches.
//
// Force curve mode when PoseMap is active, regardless of morph target count.
// Fallback: also force curve mode when mesh has 0 morph targets (MetaHuman).
if (PoseExtractedCurveMap.Num() > 0)
{
bUseCurveMode = true;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("PoseMap active (%d visemes) — using AnimNode curve mode (no SetMorphTarget)."),
PoseExtractedCurveMap.Num());
}
}
else if (TargetMesh && TargetMesh->GetSkeletalMeshAsset())
{
const int32 MorphCount = TargetMesh->GetSkeletalMeshAsset()->GetMorphTargets().Num();
if (MorphCount == 0)
@ -430,19 +451,15 @@ void UPS_AI_ConvAgent_LipSyncComponent::BeginPlay()
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("No morph targets found — switching to MetaHuman curve mode (CTRL_expressions_*)."));
TEXT("No morph targets found — using curve mode (CTRL_expressions_*)."));
}
}
else if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Found %d morph targets — using standard SetMorphTarget mode."), MorphCount);
TEXT("Found %d morph targets, no PoseMap — using SetMorphTarget mode."), MorphCount);
}
}
// Extract curve data from phoneme pose AnimSequences (if assigned).
// Must be called after TargetMesh / bUseCurveMode detection above.
InitializePoseMappings();
}
// ─────────────────────────────────────────────────────────────────────────────
@ -569,14 +586,6 @@ void UPS_AI_ConvAgent_LipSyncComponent::InitializePoseMappings()
bPosesUseCTRLNaming ? TEXT("CTRL_expressions_* (MetaHuman native)") : TEXT("ARKit / standard"));
}
if (bPosesUseCTRLNaming)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Warning,
TEXT("IMPORTANT: Poses use CTRL_expressions_* curves. "
"Move the PS AI ConvAgent Lip Sync AnimNode AFTER mh_arkit_mapping_pose "
"in the Face AnimBP for correct results."));
}
// Log sample curves from the first non-empty pose for debugging
if (bDebug && DebugVerbosity >= 2)
{

View File

@ -0,0 +1,81 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Animation/AnimNodeBase.h"
#include "PS_AI_ConvAgent_BodyExpressionComponent.h"
#include "AnimNode_PS_AI_ConvAgent_BodyExpression.generated.h"
class UAnimSequence;
/**
* Animation node that blends emotion-driven body poses onto the skeleton.
*
* Place this node in the character's Body AnimBP. It evaluates body
* AnimSequences from the PS_AI_ConvAgent_BodyExpressionComponent and blends
* them per-bone onto the upstream pose (idle, locomotion).
*
* Two modes:
* - bUpperBodyOnly = true (default): only bones at and below BlendRootBone
* are blended; lower body passes through from the upstream pose.
* - bUpperBodyOnly = false: the emotion pose is applied to the entire skeleton.
*
* Graph layout:
* [Upstream body anims (idle, locomotion)] [PS AI ConvAgent Body Expression] [Output]
*
* The node auto-discovers the BodyExpressionComponent no manual wiring needed.
*/
USTRUCT(BlueprintInternalUseOnly)
struct PS_AI_CONVAGENT_API FAnimNode_PS_AI_ConvAgent_BodyExpression : public FAnimNode_Base
{
GENERATED_USTRUCT_BODY()
/** Input pose to pass through. Connect your upstream pose source here. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Links)
FPoseLink BasePose;
/** When true, only the upper body (BlendRootBone and descendants) is blended.
* When false, the emotion pose is applied to the entire skeleton. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Settings",
meta = (ToolTip = "When true, only the upper body is blended (from BlendRootBone down).\nWhen false, the full body is blended."))
bool bUpperBodyOnly = true;
/** Root bone for upper body blend. All descendants of this bone
* (including the bone itself) are blended with the emotion body pose.
* Only used when bUpperBodyOnly is true.
* Default: "spine_02" (includes arms, upper spine, neck, head). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Settings",
meta = (EditCondition = "bUpperBodyOnly",
ToolTip = "Root bone for upper body blend.\nAll descendants of this bone are blended with the emotion pose.\nDefault: spine_02 (arms, spine, neck, head)."))
FName BlendRootBone = FName(TEXT("spine_02"));
// ── FAnimNode_Base interface ──────────────────────────────────────────────
virtual void Initialize_AnyThread(const FAnimationInitializeContext& Context) override;
virtual void CacheBones_AnyThread(const FAnimationCacheBonesContext& Context) override;
virtual void Update_AnyThread(const FAnimationUpdateContext& Context) override;
virtual void Evaluate_AnyThread(FPoseContext& Output) override;
virtual void GatherDebugData(FNodeDebugData& DebugData) override;
private:
/** Build the per-bone weight mask from BlendRootBone. */
void BuildBoneMask(const FBoneContainer& RequiredBones);
/** Evaluate an AnimSequence into a compact pose. Returns false on failure. */
bool EvaluateAnimPose(UAnimSequence* AnimSeq, float Time,
const FBoneContainer& BoneContainer, FCompactPose& OutPose) const;
/** Cached reference to the body expression component on the owning actor. */
TWeakObjectPtr<UPS_AI_ConvAgent_BodyExpressionComponent> BodyExpressionComponent;
/** Cached snapshot from the component (lightweight: anim refs + times + alphas). */
FPS_AI_ConvAgent_BodyExpressionSnapshot CachedSnapshot;
/** Per-bone blend weights: 1.0 for upper body descendants, 0.0 for others.
* Indexed by compact pose bone index. */
TArray<float> BoneMask;
/** True when BoneMask is valid and has at least one non-zero weight. */
bool bBoneMaskValid = false;
};

View File

@ -48,4 +48,13 @@ private:
/** Emotion expression curves to inject (CTRL_expressions_* format).
* Copied from the component during Update (game thread safe). */
TMap<FName, float> CachedEmotionCurves;
/** All curve names that facial expression has ever written.
* Used to explicitly zero curves when inactive, preventing
* upstream animation values from popping through. */
TSet<FName> KnownCurveNames;
/** Frames since CachedEmotionCurves was last non-empty.
* After a grace period, stop overriding so upstream anims take over. */
int32 FramesSinceLastActive = 0;
};

View File

@ -5,12 +5,12 @@
#include "CoreMinimal.h"
#include "Animation/AnimNodeBase.h"
#include "BoneContainer.h"
#include "AnimNode_PS_AI_ConvAgent_Posture.generated.h"
#include "AnimNode_PS_AI_ConvAgent_Gaze.generated.h"
class UPS_AI_ConvAgent_PostureComponent;
class UPS_AI_ConvAgent_GazeComponent;
/**
* Animation node that injects posture data into the AnimGraph.
* Animation node that injects gaze data into the AnimGraph.
*
* Handles two types of output (each can be toggled independently):
* 1. Head bone rotation (yaw + pitch) applied directly to the bone transform
@ -23,12 +23,12 @@ class UPS_AI_ConvAgent_PostureComponent;
*
* Face AnimBP: bApplyHeadRotation = false, bApplyEyeCurves = true
* Injects ARKit eye curves before mh_arkit_mapping_pose.
* Graph: [Source] [Facial Expression] [Posture] [Lip Sync] [mh_arkit] ...
* Graph: [Source] [Facial Expression] [Gaze] [Lip Sync] [mh_arkit] ...
*
* The node auto-discovers the PS_AI_ConvAgent_PostureComponent no manual wiring needed.
* The node auto-discovers the PS_AI_ConvAgent_GazeComponent no manual wiring needed.
*/
USTRUCT(BlueprintInternalUseOnly)
struct PS_AI_CONVAGENT_API FAnimNode_PS_AI_ConvAgent_Posture : public FAnimNode_Base
struct PS_AI_CONVAGENT_API FAnimNode_PS_AI_ConvAgent_Gaze : public FAnimNode_Base
{
GENERATED_USTRUCT_BODY()
@ -55,8 +55,8 @@ struct PS_AI_CONVAGENT_API FAnimNode_PS_AI_ConvAgent_Posture : public FAnimNode_
virtual void GatherDebugData(FNodeDebugData& DebugData) override;
private:
/** Cached reference to the posture component on the owning actor. */
TWeakObjectPtr<UPS_AI_ConvAgent_PostureComponent> PostureComponent;
/** Cached reference to the gaze component on the owning actor. */
TWeakObjectPtr<UPS_AI_ConvAgent_GazeComponent> GazeComponent;
/** Eye gaze curves to inject (8 ARKit eye look curves).
* Copied from the component during Update (game thread safe). */
@ -109,13 +109,13 @@ private:
// ── Body drift compensation ─────────────────────────────────────────────
//
// When the animation bends the torso (bow, lean, etc.), all bones above
// the spine shift in world space. The posture rotation is calculated
// the spine shift in world space. The gaze rotation is calculated
// relative to the character standing upright, so without compensation
// the head drifts away from the target.
//
// We walk from the parent of the first chain bone up to root, accumulate
// the animated vs ref-pose rotation delta ("body drift"), and pre-rotate
// the posture to cancel it out.
// the gaze to cancel it out.
/** Ancestor bone indices from parent-of-chain to root (child→root order).
* Resolved at CacheBones; used in Evaluate to compute animated drift. */
@ -127,11 +127,11 @@ private:
// ── Animation compensation (separate head and eye) ─────────────────────
/** How much posture overrides the animation's head/neck rotation (0=additive, 1=override).
/** How much gaze overrides the animation's head/neck rotation (0=additive, 1=override).
* Cached from the component during Update. */
float CachedHeadCompensation = 1.0f;
/** How much posture overrides the animation's eye gaze (0=additive, 1=override).
/** How much gaze overrides the animation's eye curves (0=additive, 1=override).
* Cached from the component during Update. */
float CachedEyeCompensation = 1.0f;

View File

@ -46,4 +46,13 @@ private:
/** ARKit blendshape curves to inject (jawOpen, mouthFunnel, etc.).
* Copied from the component during Update (game thread safe). */
TMap<FName, float> CachedCurves;
/** All curve names that lip sync has ever written.
* Used to explicitly zero curves when lip sync goes inactive,
* preventing upstream animation values from popping through. */
TSet<FName> KnownCurveNames;
/** Frames since CachedCurves was last non-empty.
* After a grace period, stop overriding so upstream anims take over. */
int32 FramesSinceLastActive = 0;
};

View File

@ -0,0 +1,235 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "HAL/CriticalSection.h"
#include "Components/ActorComponent.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_BodyExpressionComponent.generated.h"
class UPS_AI_ConvAgent_ElevenLabsComponent;
class UPS_AI_ConvAgent_BodyPoseMap;
class UAnimSequence;
// ─────────────────────────────────────────────────────────────────────────────
// Thread-safe snapshot of body expression state for the AnimNode.
// Written by the component on the game thread, read by the AnimNode on
// a worker thread via GetSnapshot().
// ─────────────────────────────────────────────────────────────────────────────
struct FPS_AI_ConvAgent_BodyExpressionSnapshot
{
TWeakObjectPtr<UAnimSequence> ActiveAnim;
TWeakObjectPtr<UAnimSequence> PrevAnim;
float ActiveTime = 0.0f;
float PrevTime = 0.0f;
float CrossfadeAlpha = 1.0f;
float ActivationAlpha = 0.0f;
float BlendWeight = 1.0f;
};
DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FOnBodyExpressionChanged,
EPS_AI_ConvAgent_Emotion, Emotion,
EPS_AI_ConvAgent_EmotionIntensity, Intensity);
// ─────────────────────────────────────────────────────────────────────────────
// UPS_AI_ConvAgent_BodyExpressionComponent
//
// Drives emotion-based body expressions (upper body gestures, posture shifts)
// during conversation. Provides animation variety via random selection from
// per-emotion animation lists with smooth crossfades.
//
// Two states:
// - Idle (conversation active, agent NOT speaking): plays from Idle lists
// - Speaking (agent IS speaking): plays from Normal/Medium/Extreme lists
//
// Activates on conversation connect, deactivates on disconnect.
// Within a conversation, switches between idle/speaking lists automatically.
// At the end of each animation loop, picks a new random animation from the
// same list for continuous variety.
//
// Workflow:
// 1. Assign a BodyPoseMap data asset with body animation lists per emotion.
// 2. Add the AnimNode "PS AI ConvAgent Body Expression" in the Body AnimBP.
// 3. Set BlendRootBone on the AnimNode (e.g. "spine_02").
// 4. The component listens to conversation lifecycle + emotion events.
// 5. The AnimNode reads GetSnapshot() and blends the upper body pose.
// ─────────────────────────────────────────────────────────────────────────────
UCLASS(ClassGroup = "PS AI ConvAgent", meta = (BlueprintSpawnableComponent),
DisplayName = "PS AI ConvAgent Body Expression")
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_BodyExpressionComponent : public UActorComponent
{
GENERATED_BODY()
public:
UPS_AI_ConvAgent_BodyExpressionComponent();
// ── Configuration ─────────────────────────────────────────────────────────
/** Body pose map asset containing body animation lists per emotion.
* Create a dedicated PS_AI_ConvAgent_BodyPoseMap asset in the Content Browser. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|BodyExpression",
meta = (ToolTip = "Body Pose Map asset with animation lists per emotion.\nRight-click Content Browser > Miscellaneous > PS AI ConvAgent Body Pose Map."))
TObjectPtr<UPS_AI_ConvAgent_BodyPoseMap> BodyPoseMap;
/** Whether body expressions are currently active (read-only).
* Managed automatically: activates on conversation connect, deactivates on disconnect. */
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|BodyExpression")
bool bActive = false;
/** How long (seconds) to blend in/out when conversation starts/ends. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|BodyExpression",
meta = (ClampMin = "0.05", ClampMax = "3.0"))
float ActivationBlendDuration = 0.5f;
/** Crossfade duration (seconds) when switching between animations. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|BodyExpression",
meta = (ClampMin = "0.1", ClampMax = "3.0",
ToolTip = "How long (seconds) to crossfade between animations.\n0.5 = snappy, 1.5 = smooth."))
float EmotionBlendDuration = 0.5f;
/** Overall blend weight for body expressions. 1.0 = full, 0.5 = subtle. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|BodyExpression",
meta = (ClampMin = "0.0", ClampMax = "1.0",
ToolTip = "Overall blend weight for body expressions.\n1.0 = full, 0.5 = subtle."))
float BlendWeight = 1.0f;
// ── Debug ────────────────────────────────────────────────────────────────
/** Enable debug logging for this component. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Debug")
bool bDebug = false;
/** Verbosity level when bDebug is true.
* 0 = minimal extras, 1 = normal debug, 2 = detailed, 3 = per-frame data. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Debug",
meta = (ClampMin = "0", ClampMax = "3", EditCondition = "bDebug"))
int32 DebugVerbosity = 1;
// ── Getters ───────────────────────────────────────────────────────────────
/** Get a thread-safe snapshot of the current body expression state.
* Called by the AnimNode from a worker thread. */
FPS_AI_ConvAgent_BodyExpressionSnapshot GetSnapshot() const
{
FScopeLock Lock(&SnapshotLock);
return CurrentSnapshot;
}
/** Get the active emotion. */
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|BodyExpression")
EPS_AI_ConvAgent_Emotion GetActiveEmotion() const { return ActiveEmotion; }
/** Get the active emotion intensity. */
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|BodyExpression")
EPS_AI_ConvAgent_EmotionIntensity GetActiveIntensity() const { return ActiveEmotionIntensity; }
/** True when the agent is currently speaking (body uses N/M/E lists). */
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|BodyExpression")
bool IsSpeaking() const { return bIsSpeaking; }
// ── Events ───────────────────────────────────────────────────────────────
/** Fired when the body expression changes (emotion + intensity).
* Only fires when the emotion actually differs from the previous one. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|BodyExpression|Events")
FOnBodyExpressionChanged OnBodyExpressionChanged;
// ── UActorComponent overrides ─────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;
virtual void TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction* ThisTickFunction) override;
private:
// ── Event handlers ────────────────────────────────────────────────────────
/** Called when the agent changes emotion via client tool. */
UFUNCTION()
void OnEmotionChanged(EPS_AI_ConvAgent_Emotion Emotion, EPS_AI_ConvAgent_EmotionIntensity Intensity);
/** Called when the conversation connects. */
UFUNCTION()
void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& Info);
/** Called when the conversation disconnects. */
UFUNCTION()
void OnConversationDisconnected(int32 StatusCode, const FString& Reason);
/** Transition to speaking state. */
UFUNCTION()
void OnSpeakingStarted();
/** Transition to idle state. */
UFUNCTION()
void OnSpeakingStopped();
/** Handle interruption (back to idle). */
UFUNCTION()
void OnInterrupted();
// ── Animation selection helpers ──────────────────────────────────────────
/** Get the animation list appropriate for the current state (idle vs speaking + intensity).
* Returns nullptr if no list is available. Applies intensity fallback logic. */
const TArray<TObjectPtr<UAnimSequence>>* GetCurrentAnimList() const;
/** Get the animation list for a specific emotion/intensity/speaking state.
* Applies fallback: requested intensity -> Medium -> Normal -> Extreme.
* If all speaking lists are empty and bSpeaking is true, falls back to Idle. */
const TArray<TObjectPtr<UAnimSequence>>* FindAnimList(
EPS_AI_ConvAgent_Emotion Emotion,
EPS_AI_ConvAgent_EmotionIntensity Intensity,
bool bSpeaking) const;
/** Pick a random animation from a list, avoiding the currently active anim.
* Returns nullptr if the list is empty. */
UAnimSequence* PickRandomAnim(const TArray<TObjectPtr<UAnimSequence>>& List) const;
/** Initiate a crossfade to a new animation. If NewAnim is the same as ActiveAnim
* and bForce is false, this is a no-op. */
void SwitchToNewAnim(UAnimSequence* NewAnim, bool bForce = false);
/** Pick a new animation from the current state's list and crossfade to it. */
void PickAndSwitchAnim();
// ── State ───────────────────────────────────────────────────────────────
/** True when the agent is currently speaking (selects N/M/E lists).
* False = idle in conversation (selects Idle list). */
bool bIsSpeaking = false;
/** Currently playing body AnimSequence (looping). */
TObjectPtr<UAnimSequence> ActiveAnim;
/** Playback cursor for the active anim (seconds, wraps at anim length). */
float ActivePlaybackTime = 0.0f;
/** Previous body AnimSequence (for crossfade out). */
TObjectPtr<UAnimSequence> PrevAnim;
/** Playback cursor for the previous anim (keeps playing during crossfade). */
float PrevPlaybackTime = 0.0f;
/** Crossfade progress: 0 = fully PrevAnim, 1 = fully ActiveAnim. */
float CrossfadeAlpha = 1.0f;
// ── Snapshot output ──────────────────────────────────────────────────────
/** Current snapshot — written in TickComponent (game thread),
* read by AnimNode via GetSnapshot() (worker thread). */
FPS_AI_ConvAgent_BodyExpressionSnapshot CurrentSnapshot;
/** Lock protecting CurrentSnapshot from concurrent game/anim thread access. */
mutable FCriticalSection SnapshotLock;
/** Current blend alpha (0 = fully inactive/passthrough, 1 = fully active). */
float CurrentActiveAlpha = 0.0f;
/** Active emotion (for change detection). */
EPS_AI_ConvAgent_Emotion ActiveEmotion = EPS_AI_ConvAgent_Emotion::Neutral;
EPS_AI_ConvAgent_EmotionIntensity ActiveEmotionIntensity = EPS_AI_ConvAgent_EmotionIntensity::Medium;
/** Cached reference to the agent component on the same Actor. */
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> AgentComponent;
};

View File

@ -0,0 +1,67 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Engine/DataAsset.h"
#include "Animation/AnimSequence.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_BodyPoseMap.generated.h"
/**
* Animation lists per intensity level for a single emotion.
*
* Each list can hold multiple AnimSequences. At runtime the component picks
* one at random (avoiding repeats) and crossfades when switching.
* - Idle: played while the agent is in conversation but NOT speaking.
* - Normal / Medium / Extreme: played while the agent IS speaking,
* selected according to the current emotion intensity.
*/
USTRUCT(BlueprintType)
struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_BodyAnimList
{
GENERATED_BODY()
/** Idle body anims — played when the agent is in conversation but NOT speaking.
* Multiple entries add variety (random selection, no immediate repeat). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Body Poses")
TArray<TObjectPtr<UAnimSequence>> Idle;
/** Normal (low intensity) body anims — played when the agent is speaking. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Body Poses")
TArray<TObjectPtr<UAnimSequence>> Normal;
/** Medium intensity body anims. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Body Poses")
TArray<TObjectPtr<UAnimSequence>> Medium;
/** Extreme (high intensity) body anims. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Body Poses")
TArray<TObjectPtr<UAnimSequence>> Extreme;
};
/**
* Reusable data asset that maps emotions to body animation lists.
*
* Create ONE instance of this asset in the Content Browser
* (right-click > Miscellaneous > PS AI ConvAgent Body Pose Map),
* populate the Idle / Normal / Medium / Extreme lists for each emotion,
* then reference this asset on the PS AI ConvAgent Body Expression component.
*
* The component picks a random animation from the appropriate list and
* crossfades between them, providing continuous upper body variety both
* while the agent is idle (listening) and while speaking.
*/
UCLASS(BlueprintType, Blueprintable, DisplayName = "PS AI ConvAgent Body Pose Map")
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_BodyPoseMap : public UPrimaryDataAsset
{
GENERATED_BODY()
public:
/** Map of emotions to their body animation lists (Idle / Normal / Medium / Extreme).
* Add entries for each emotion your agent uses (Joy, Sadness, Anger, etc.).
* Neutral is recommended it is the default emotion. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Body Poses",
meta = (ToolTip = "Emotion -> body animation lists.\nIdle = listening, Normal/Medium/Extreme = speaking intensities.\nMultiple entries per list add variety (random selection)."))
TMap<EPS_AI_ConvAgent_Emotion, FPS_AI_ConvAgent_BodyAnimList> BodyPoses;
};

View File

@ -131,7 +131,7 @@ public:
/** Keep the WebSocket open across multiple StartConversation / EndConversation cycles.
* When true, the first StartConversation opens the WebSocket and EndConversation only
* stops the microphone and resets posture the WebSocket stays alive until EndPlay.
* stops the microphone and resets gaze the WebSocket stays alive until EndPlay.
* The agent remembers the full conversation context between interactions.
* When false (ephemeral), each StartConversation opens a fresh WebSocket session
* and EndConversation closes it. */
@ -193,16 +193,6 @@ public:
ToolTip = "Auto-reconnect attempts on unexpected disconnect.\n0 = disabled. Uses exponential backoff."))
int32 MaxReconnectAttempts = 5;
// ── Multi-agent / external mic ───────────────────────────────────────────
/** When true, StartListening/StopListening manage the turn state but do NOT
* create or control a local microphone component. Audio is instead fed via
* FeedExternalAudio() from an external source (e.g. InteractionComponent on the pawn).
* Use this when a centralized mic on the player pawn routes audio to agents. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs",
meta = (ToolTip = "External mic mode: turn management only, no local mic.\nAudio is fed via FeedExternalAudio() from an external source."))
bool bExternalMicManagement = false;
// ── Audio spatialization ─────────────────────────────────────────────────
/** Optional sound attenuation settings for spatializing the agent's voice.
@ -319,7 +309,7 @@ public:
TObjectPtr<APlayerController> NetConversatingPlayer = nullptr;
/** The pawn of the conversating player. Replicated to ALL clients (unlike PlayerController).
* Used by remote clients for posture target (head/eye tracking) and LOD distance checks. */
* Used by remote clients for gaze target (head/eye tracking) and LOD distance checks. */
UPROPERTY(ReplicatedUsing = OnRep_ConversationState, BlueprintReadOnly, Category = "PS AI ConvAgent|Network")
TObjectPtr<APawn> NetConversatingPawn = nullptr;
@ -439,7 +429,8 @@ public:
/**
* Feed microphone audio from an external source (e.g. InteractionComponent on the pawn).
* Use this instead of the local mic when bExternalMicManagement is true.
* When an InteractionComponent exists on the player pawn, or in a network scenario,
* the component auto-detects external mic mode and skips local mic creation.
* The component must be connected and listening (StartListening called) for audio to be sent.
* @param FloatPCM Float32 samples, 16000 Hz mono (same format as MicrophoneCaptureComponent output).
*/
@ -660,8 +651,13 @@ private:
/** Find the InteractionComponent on the local player's pawn (for relay RPCs). */
class UPS_AI_ConvAgent_InteractionComponent* FindLocalRelayComponent() const;
/** Update the NPC's PostureComponent from the current conversation state.
/** Auto-detect whether mic audio comes from an external source.
* True when: network client, Authority with remote player, or
* InteractionComponent exists on the local player's pawn. */
bool ShouldUseExternalMic() const;
/** Update the NPC's GazeComponent from the current conversation state.
* Called on the server when bNetIsConversing / NetConversatingPawn change,
* because OnRep_ConversationState never fires on the Authority. */
void ApplyConversationPosture();
void ApplyConversationGaze();
};

View File

@ -52,10 +52,10 @@ public:
meta = (ToolTip = "Dedicated Emotion Pose Map asset.\nRight-click Content Browser → Miscellaneous → PS AI ConvAgent Emotion Pose Map."))
TObjectPtr<UPS_AI_ConvAgent_EmotionPoseMap> EmotionPoseMap;
/** When false, emotion curves smoothly blend to zero (passthrough).
/** Whether facial expressions are currently active (read-only).
* Managed automatically: activates on conversation connect, deactivates on disconnect.
* The underlying emotion playback keeps running so reactivation is seamless. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression",
meta = (ToolTip = "Enable facial expressions.\nWhen false, outputs blend to zero (passthrough)."))
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|FacialExpression")
bool bActive = true;
/** How long (seconds) to blend in/out when bActive changes. */

View File

@ -6,12 +6,12 @@
#include "Components/ActorComponent.h"
#include "HAL/CriticalSection.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_PostureComponent.generated.h"
#include "PS_AI_ConvAgent_GazeComponent.generated.h"
class UPS_AI_ConvAgent_ElevenLabsComponent;
class USkeletalMeshComponent;
DECLARE_LOG_CATEGORY_EXTERN(LogPS_AI_ConvAgent_Posture, Log, All);
DECLARE_LOG_CATEGORY_EXTERN(LogPS_AI_ConvAgent_Gaze, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// Neck bone chain entry for distributing head rotation across multiple bones
@ -33,7 +33,7 @@ struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_NeckBoneEntry
};
// ─────────────────────────────────────────────────────────────────────────────
// UPS_AI_ConvAgent_PostureComponent
// UPS_AI_ConvAgent_GazeComponent
//
// Chase-based multi-layer look-at system for MetaHuman characters.
// Smoothly orients the character's body, head, and eyes toward a TargetActor.
@ -49,9 +49,9 @@ struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_NeckBoneEntry
//
// Workflow:
// 1. Add this component to the character Blueprint.
// 2. Add the AnimNode "PS AI ConvAgent Posture" in the Body AnimBP
// 2. Add the AnimNode "PS AI ConvAgent Gaze" in the Body AnimBP
// with bApplyHeadRotation = true, bApplyEyeCurves = false.
// 3. Add the AnimNode "PS AI ConvAgent Posture" in the Face AnimBP
// 3. Add the AnimNode "PS AI ConvAgent Gaze" in the Face AnimBP
// with bApplyHeadRotation = false, bApplyEyeCurves = true
// (between "Facial Expression" and "Lip Sync" nodes).
// 4. Set TargetActor to any actor (player pawn, a prop, etc.).
@ -59,44 +59,61 @@ struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_NeckBoneEntry
// eye-level on a simple actor).
// ─────────────────────────────────────────────────────────────────────────────
UCLASS(ClassGroup = "PS AI ConvAgent", meta = (BlueprintSpawnableComponent),
DisplayName = "PS AI ConvAgent Posture")
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_PostureComponent : public UActorComponent
DisplayName = "PS AI ConvAgent Gaze")
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_GazeComponent : public UActorComponent
{
GENERATED_BODY()
public:
UPS_AI_ConvAgent_PostureComponent();
UPS_AI_ConvAgent_GazeComponent();
// ── Target ───────────────────────────────────────────────────────────────
/** The actor to look at. Can be any actor (player, prop, etc.).
* Set to null to smoothly return to neutral. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ToolTip = "Target actor to look at.\nSet to null to return to neutral."))
TObjectPtr<AActor> TargetActor;
/** When false, all posture outputs smoothly blend to neutral (passthrough).
/** When false, all gaze outputs smoothly blend to neutral (passthrough).
* The underlying tracking keeps running so reactivation is seamless.
* Controlled automatically by the conversation state, or manually. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
meta = (ToolTip = "Enable posture system.\nWhen false, outputs blend to neutral (passthrough)."))
* Managed automatically: activates on conversation connect, deactivates on disconnect. */
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|Gaze")
bool bActive = true;
/** How long (seconds) to blend in/out when bActive changes. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0.05", ClampMax = "3.0"))
float ActivationBlendDuration = 0.5f;
/** When false, body rotation is frozen — only head and eyes track the target.
* Useful to have the agent notice the player (eyes+head) before fully engaging (body). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ToolTip = "Enable body rotation toward the target.\nWhen false, only head and eyes track."))
bool bEnableBodyTracking = true;
/** Automatically aim at the target's eye bones (MetaHuman FACIAL_L_Eye / FACIAL_R_Eye).
* When enabled, TargetOffset is ignored and the agent looks at the midpoint
* between the target pawn's eye bones.
* Fallback chain: eye bones head bone ActorOrigin + (0,0,FallbackEyeHeight). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ToolTip = "Auto-target the pawn's eye bones for eye contact.\nFallback: eye bones > head bone > FallbackEyeHeight."))
bool bAutoTargetEyes = true;
/** Height offset (cm) from the target actor's origin when no eye/head bones are found.
* Used as fallback when bAutoTargetEyes is true but the target has no skeleton
* (e.g. first-person pawn, simple actor). 160 eye height for a standing human. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (EditCondition = "bAutoTargetEyes", ClampMin = "0",
ToolTip = "Height offset (cm) when no eye/head bones exist on the target.\n160 = standing human eye level.\nOnly used as last-resort fallback."))
float FallbackEyeHeight = 160.0f;
/** Offset from the target actor's origin to aim at.
* Useful for actors without a skeleton (e.g. (0,0,160) for eye-level). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
meta = (ToolTip = "Offset from target actor origin.\nE.g. (0,0,160) for eye-level."))
* Only used when bAutoTargetEyes is false.
* E.g. (0,0,160) for eye-level on a simple actor. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (EditCondition = "!bAutoTargetEyes",
ToolTip = "Offset from target actor origin.\nE.g. (0,0,160) for eye-level.\nOnly used when Auto Target Eyes is disabled."))
FVector TargetOffset = FVector(0.0f, 0.0f, 0.0f);
// ── Angle limits (degrees) ───────────────────────────────────────────────
@ -109,75 +126,75 @@ public:
// so small movements around the target don't re-trigger higher layers.
/** Maximum head yaw rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxHeadYaw = 40.0f;
/** Maximum head pitch rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxHeadPitch = 30.0f;
/** Maximum horizontal eye angle in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxEyeHorizontal = 15.0f;
/** Maximum vertical eye angle in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxEyeVertical = 10.0f;
// ── Smoothing speeds ─────────────────────────────────────────────────────
/** Body rotation interpolation speed (lower = slower, more natural). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0.1", ClampMax = "20"))
float BodyInterpSpeed = 4.0f;
/** Head rotation interpolation speed. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0.1", ClampMax = "20"))
float HeadInterpSpeed = 4.0f;
/** Eye movement interpolation speed (higher = snappier). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0.1", ClampMax = "20"))
float EyeInterpSpeed = 5.0f;
/** Interpolation speed when returning to neutral (TargetActor is null). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0.1", ClampMax = "20"))
float ReturnToNeutralSpeed = 3.0f;
// ── Animation compensation ──────────────────────────────────────────────
//
// Two independent controls to balance animation vs. posture per layer:
// 1.0 = full override — posture replaces animation entirely.
// 0.0 = pure additive — posture stacks on top of animation.
// Two independent controls to balance animation vs. gaze per layer:
// 1.0 = full override — gaze replaces animation entirely.
// 0.0 = pure additive — gaze stacks on top of animation.
/** How much posture overrides the animation's head/neck rotation.
/** How much gaze overrides the animation's head/neck rotation.
* 1.0 = head always points at target regardless of animation.
* 0.0 = posture is additive on top of animation (old behavior).
* 0.0 = gaze is additive on top of animation (old behavior).
* Default: 1.0 for conversational AI (always look at who you talk to). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "1"))
float HeadAnimationCompensation = 0.9f;
/** How much posture overrides the animation's eye gaze.
* 1.0 = eyes frozen on posture target, animation's eye movement removed.
* 0.0 = animation's eyes play through, posture is additive.
/** How much gaze overrides the animation's eye movement.
* 1.0 = eyes frozen on gaze target, animation's eye movement removed.
* 0.0 = animation's eyes play through, gaze is additive.
* Intermediate (e.g. 0.5) = smooth 50/50 blend. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "1"))
float EyeAnimationCompensation = 0.6f;
/** Compensate body animation below the neck (spine bending, leaning, etc.).
* When the torso bends, the head's world orientation shifts. This
* counter-rotates the posture to keep the head pointing at the target.
* counter-rotates the gaze to keep the head pointing at the target.
* 1.0 = full compensation head stays locked on target even during bows.
* 0.0 = no compensation head follows body movement naturally. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "0", ClampMax = "1"))
float BodyDriftCompensation = 0.8f;
@ -210,14 +227,14 @@ public:
* 0 = mesh faces +X (default UE convention)
* 90 = mesh faces +Y
* -90 = mesh faces -Y */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze",
meta = (ClampMin = "-180", ClampMax = "180"))
float MeshForwardYawOffset = 90.0f;
// ── Head bone ────────────────────────────────────────────────────────────
/** Name of the head bone on the skeletal mesh (used for eye origin calculation). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture")
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze")
FName HeadBoneName = FName(TEXT("head"));
// ── Neck bone chain ─────────────────────────────────────────────────────
@ -226,7 +243,7 @@ public:
* Order: root-to-tip (e.g. neck_01 neck_02 head).
* Weights should sum to ~1.0.
* If empty, falls back to single-bone behavior (HeadBoneName, weight 1.0). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture")
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Gaze")
TArray<FPS_AI_ConvAgent_NeckBoneEntry> NeckBoneChain;
// ── Getters (read by AnimNode) ───────────────────────────────────────────
@ -234,10 +251,10 @@ public:
/** Get current eye gaze curves (8 ARKit eye look curves).
* Returns a COPY scaled by activation alpha safe to call from any thread.
* NOTE: Prefer FillCurrentEyeCurves() for hot paths to avoid per-frame allocation. */
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Posture")
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Gaze")
TMap<FName, float> GetCurrentEyeCurves() const
{
FScopeLock Lock(&PostureDataLock);
FScopeLock Lock(&GazeDataLock);
if (CurrentActiveAlpha < 0.001f) return TMap<FName, float>();
if (CurrentActiveAlpha >= 0.999f) return CurrentEyeCurves;
TMap<FName, float> Scaled = CurrentEyeCurves;
@ -247,10 +264,10 @@ public:
/** Fill an existing TMap with current eye gaze curves (zero-alloc after first call).
* Reuses the caller's existing hash table no heap allocation when keys match.
* Thread-safe (takes PostureDataLock internally). */
* Thread-safe (takes GazeDataLock internally). */
void FillCurrentEyeCurves(TMap<FName, float>& OutCurves) const
{
FScopeLock Lock(&PostureDataLock);
FScopeLock Lock(&GazeDataLock);
if (CurrentActiveAlpha < 0.001f)
{
for (auto& Pair : OutCurves) { Pair.Value = 0.0f; }
@ -268,7 +285,7 @@ public:
* Thread-safe copy, blended by activation alpha. */
FQuat GetCurrentHeadRotation() const
{
FScopeLock Lock(&PostureDataLock);
FScopeLock Lock(&GazeDataLock);
if (CurrentActiveAlpha < 0.001f) return FQuat::Identity;
if (CurrentActiveAlpha >= 0.999f) return CurrentHeadRotation;
return FQuat::Slerp(FQuat::Identity, CurrentHeadRotation, CurrentActiveAlpha);
@ -293,7 +310,7 @@ public:
float GetBodyDriftCompensation() const { return BodyDriftCompensation * CurrentActiveAlpha; }
/** Reset the persistent body yaw target to the actor's current facing.
* Call this when re-attaching a posture target so body tracking starts
* Call this when re-attaching a gaze target so body tracking starts
* fresh instead of chasing a stale yaw from the previous interaction. */
void ResetBodyTarget();
@ -349,11 +366,11 @@ private:
/** Original actor yaw at BeginPlay (for neutral return when TargetActor is null). */
float OriginalActorYaw = 0.0f;
// ── Thread-safe lock for data read by AnimNode worker thread ─────────────
// ── Thread-safe lock for gaze data read by AnimNode worker thread ────────
/** Protects CurrentEyeCurves and CurrentHeadRotation against concurrent
* reads from the animation worker thread (Update_AnyThread). */
mutable FCriticalSection PostureDataLock;
mutable FCriticalSection GazeDataLock;
// ── Output data ──────────────────────────────────────────────────────────

View File

@ -9,7 +9,7 @@
class UPS_AI_ConvAgent_ElevenLabsComponent;
class UPS_AI_ConvAgent_MicrophoneCaptureComponent;
class UPS_AI_ConvAgent_PostureComponent;
class UPS_AI_ConvAgent_GazeComponent;
// ─────────────────────────────────────────────────────────────────────────────
// Delegates
@ -40,9 +40,9 @@ DECLARE_DYNAMIC_MULTICAST_DELEGATE(FOnNoConvAgentInRange);
// Workflow:
// 1. Add this component to your player pawn Blueprint.
// 2. Configure MaxInteractionDistance, ViewConeHalfAngle, etc.
// 3. Bind to OnAgentSelected / OnAgentDeselected to update posture,
// 3. Bind to OnAgentSelected / OnAgentDeselected to update gaze,
// UI, and other agent-specific logic from Blueprint.
// 4. Each agent's ElevenLabsComponent should have bExternalMicManagement = true.
// 4. Mic management is auto-detected (no manual flag needed on agents).
// 5. Agents manage their own WebSocket connections independently.
// ─────────────────────────────────────────────────────────────────────────────
UCLASS(ClassGroup = "PS AI ConvAgent", meta = (BlueprintSpawnableComponent),
@ -92,34 +92,34 @@ public:
meta = (ToolTip = "Require the player to look at an agent to select it.\nWhen false, the closest agent within range is always selected."))
bool bRequireLookAt = true;
// ── Posture management ───────────────────────────────────────────────────
// ── Gaze management ─────────────────────────────────────────────────────
/** Automatically set/clear the agent's PostureComponent TargetActor
* when the agent is selected/deselected. When false, posture must
/** Automatically set/clear the agent's GazeComponent TargetActor
* when the agent is selected/deselected. When false, gaze must
* be managed from Blueprint (e.g. on conversation start). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Posture",
meta = (ToolTip = "Automatically point the agent's posture at the pawn on selection.\nDisable for manual control (e.g. set target only when conversation starts)."))
bool bAutoManagePosture = true;
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Gaze",
meta = (ToolTip = "Automatically point the agent's gaze at the pawn on selection.\nDisable for manual control (e.g. set target only when conversation starts)."))
bool bAutoManageGaze = true;
/** Delay (seconds) before setting the agent's posture target after selection.
/** Delay (seconds) before setting the agent's gaze target after selection.
* 0 = immediate. Useful to let the agent "notice" the player with a beat. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Posture",
meta = (EditCondition = "bAutoManagePosture", ClampMin = "0",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Gaze",
meta = (EditCondition = "bAutoManageGaze", ClampMin = "0",
ToolTip = "Seconds to wait before the agent looks at the pawn.\n0 = immediate."))
float PostureAttachDelay = 0.0f;
float GazeAttachDelay = 0.0f;
/** Delay (seconds) before clearing the agent's posture target after deselection.
/** Delay (seconds) before clearing the agent's gaze target after deselection.
* 0 = immediate. Useful to have the agent keep looking briefly as the player leaves. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Posture",
meta = (EditCondition = "bAutoManagePosture", ClampMin = "0",
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Interaction|Gaze",
meta = (EditCondition = "bAutoManageGaze", ClampMin = "0",
ToolTip = "Seconds to wait before the agent stops looking at the pawn.\n0 = immediate."))
float PostureDetachDelay = 0.0f;
float GazeDetachDelay = 0.0f;
// ── Conversation management ──────────────────────────────────────────────
/** Automatically start the WebSocket conversation when an agent is selected
* (enters range + view cone). When false, selecting an agent only manages
* posture and visual awareness the conversation must be started explicitly
* gaze and visual awareness the conversation must be started explicitly
* via StartConversationWithSelectedAgent() (e.g. on a key press).
* Set to false when you have multiple agents in a scene to prevent them
* all from greeting the player simultaneously. */
@ -150,14 +150,14 @@ public:
// ── Events ────────────────────────────────────────────────────────────────
/** Fired when a new agent enters selection. Use this to set posture targets, show UI, etc. */
/** Fired when a new agent enters selection. Use this to set gaze targets, show UI, etc. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|Interaction|Events",
meta = (ToolTip = "Fires when a new agent is selected.\nSet posture targets, update UI, etc."))
meta = (ToolTip = "Fires when a new agent is selected.\nSet gaze targets, update UI, etc."))
FOnConvAgentSelected OnAgentSelected;
/** Fired when the previously selected agent loses selection. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|Interaction|Events",
meta = (ToolTip = "Fires when the current agent is deselected.\nClear posture targets, hide UI, etc."))
meta = (ToolTip = "Fires when the current agent is deselected.\nClear gaze targets, hide UI, etc."))
FOnConvAgentDeselected OnAgentDeselected;
/** Fired when no agent is within range or view cone. */
@ -242,16 +242,16 @@ private:
/** Get the pawn's view location and direction (uses camera or control rotation). */
void GetPawnViewPoint(FVector& OutLocation, FVector& OutDirection) const;
// ── Posture helpers ──────────────────────────────────────────────────────
// ── Gaze helpers ────────────────────────────────────────────────────────
/** Find the PostureComponent on an agent's owner actor (null if absent). */
static UPS_AI_ConvAgent_PostureComponent* FindPostureOnAgent(UPS_AI_ConvAgent_ElevenLabsComponent* Agent);
/** Find the GazeComponent on an agent's owner actor (null if absent). */
static UPS_AI_ConvAgent_GazeComponent* FindGazeOnAgent(UPS_AI_ConvAgent_ElevenLabsComponent* Agent);
/** Set the agent's PostureComponent target to the pawn (attach). */
void AttachPostureTarget(TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent);
/** Set the agent's GazeComponent target to the pawn (attach). */
void AttachGazeTarget(TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent);
/** Clear the agent's PostureComponent target (detach). */
void DetachPostureTarget(TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent);
/** Clear the agent's GazeComponent target (detach). */
void DetachGazeTarget(TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> Agent);
// ── Mic routing ──────────────────────────────────────────────────────────
@ -272,8 +272,8 @@ private:
* before the PlayerController has been replicated/possessed. */
bool bInitialized = false;
// ── Posture timers ───────────────────────────────────────────────────────
// ── Gaze timers ─────────────────────────────────────────────────────────
FTimerHandle PostureAttachTimerHandle;
FTimerHandle PostureDetachTimerHandle;
FTimerHandle GazeAttachTimerHandle;
FTimerHandle GazeDetachTimerHandle;
};

View File

@ -51,16 +51,17 @@ public:
// ── Configuration ─────────────────────────────────────────────────────────
/** Target skeletal mesh to auto-apply morph targets. Leave empty to handle
* visemes manually via OnVisemesReady + GetCurrentBlendshapes(). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync",
meta = (ToolTip = "Skeletal mesh to drive morph targets on.\nLeave empty to read values manually via GetCurrentBlendshapes()."))
/** Face skeletal mesh — auto-detected at BeginPlay.
* Searches for a component named "Face", then any mesh with morph targets.
* Used internally for mode detection (curve vs morph target).
* Not exposed in editor auto-discovery handles all cases. */
UPROPERTY(Transient)
TObjectPtr<USkeletalMeshComponent> TargetMesh;
/** When false, lip sync blendshapes smoothly blend to zero (passthrough).
/** Whether lip sync is currently active (read-only).
* Managed automatically: activates on conversation connect, deactivates on disconnect.
* The underlying audio analysis keeps running so reactivation is seamless. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync",
meta = (ToolTip = "Enable lip sync.\nWhen false, outputs blend to zero (passthrough)."))
UPROPERTY(BlueprintReadOnly, Category = "PS AI ConvAgent|LipSync")
bool bActive = true;
/** How long (seconds) to blend in/out when bActive changes. */

View File

@ -0,0 +1,33 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimGraphNode_PS_AI_ConvAgent_BodyExpression.h"
#define LOCTEXT_NAMESPACE "AnimNode_PS_AI_ConvAgent_BodyExpression"
FText UAnimGraphNode_PS_AI_ConvAgent_BodyExpression::GetNodeTitle(ENodeTitleType::Type TitleType) const
{
return LOCTEXT("NodeTitle", "PS AI ConvAgent Body Expression");
}
FText UAnimGraphNode_PS_AI_ConvAgent_BodyExpression::GetTooltipText() const
{
return LOCTEXT("Tooltip",
"Blends emotion-driven body poses onto the upper body during agent speech.\n\n"
"Place this node in the Body AnimBP after your upstream animations (idle, locomotion).\n"
"Set BlendRootBone to define the upper body boundary (e.g. spine_02).\n"
"The lower body passes through unchanged.\n\n"
"Auto-discovers the PS AI ConvAgent Body Expression component on the owning Actor.");
}
FString UAnimGraphNode_PS_AI_ConvAgent_BodyExpression::GetNodeCategory() const
{
return TEXT("PS AI ConvAgent");
}
FLinearColor UAnimGraphNode_PS_AI_ConvAgent_BodyExpression::GetNodeTitleColor() const
{
// Warm green — distinct from FacialExpression (amber) and LipSync (teal)
return FLinearColor(0.3f, 0.7f, 0.2f, 1.0f);
}
#undef LOCTEXT_NAMESPACE

View File

@ -1,30 +1,30 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimGraphNode_PS_AI_ConvAgent_Posture.h"
#include "AnimGraphNode_PS_AI_ConvAgent_Gaze.h"
#define LOCTEXT_NAMESPACE "AnimNode_PS_AI_ConvAgent_Posture"
#define LOCTEXT_NAMESPACE "AnimNode_PS_AI_ConvAgent_Gaze"
FText UAnimGraphNode_PS_AI_ConvAgent_Posture::GetNodeTitle(ENodeTitleType::Type TitleType) const
FText UAnimGraphNode_PS_AI_ConvAgent_Gaze::GetNodeTitle(ENodeTitleType::Type TitleType) const
{
return LOCTEXT("NodeTitle", "PS AI ConvAgent Posture");
return LOCTEXT("NodeTitle", "PS AI ConvAgent Gaze");
}
FText UAnimGraphNode_PS_AI_ConvAgent_Posture::GetTooltipText() const
FText UAnimGraphNode_PS_AI_ConvAgent_Gaze::GetTooltipText() const
{
return LOCTEXT("Tooltip",
"Injects head rotation and eye gaze curves from the PS AI ConvAgent Posture component.\n\n"
"Injects head rotation and eye gaze curves from the PS AI ConvAgent Gaze component.\n\n"
"Place this node AFTER the PS AI ConvAgent Facial Expression node and\n"
"BEFORE the PS AI ConvAgent Lip Sync node in the MetaHuman Face AnimBP.\n\n"
"The component distributes look-at rotation across body (actor yaw),\n"
"head (bone rotation), and eyes (ARKit curves) for a natural look-at effect.");
}
FString UAnimGraphNode_PS_AI_ConvAgent_Posture::GetNodeCategory() const
FString UAnimGraphNode_PS_AI_ConvAgent_Gaze::GetNodeCategory() const
{
return TEXT("PS AI ConvAgent");
}
FLinearColor UAnimGraphNode_PS_AI_ConvAgent_Posture::GetNodeTitleColor() const
FLinearColor UAnimGraphNode_PS_AI_ConvAgent_Gaze::GetNodeTitleColor() const
{
// Cool blue to distinguish from Facial Expression (amber) and Lip Sync (teal)
return FLinearColor(0.2f, 0.4f, 0.9f, 1.0f);

View File

@ -0,0 +1,29 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_BodyPoseMapFactory.h"
#include "PS_AI_ConvAgent_BodyPoseMap.h"
#include "AssetTypeCategories.h"
UPS_AI_ConvAgent_BodyPoseMapFactory::UPS_AI_ConvAgent_BodyPoseMapFactory()
{
SupportedClass = UPS_AI_ConvAgent_BodyPoseMap::StaticClass();
bCreateNew = true;
bEditAfterNew = true;
}
UObject* UPS_AI_ConvAgent_BodyPoseMapFactory::FactoryCreateNew(
UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags,
UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UPS_AI_ConvAgent_BodyPoseMap>(InParent, Class, Name, Flags);
}
FText UPS_AI_ConvAgent_BodyPoseMapFactory::GetDisplayName() const
{
return FText::FromString(TEXT("PS AI ConvAgent Body Pose Map"));
}
uint32 UPS_AI_ConvAgent_BodyPoseMapFactory::GetMenuCategories() const
{
return EAssetTypeCategories::Misc;
}

View File

@ -0,0 +1,27 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Factories/Factory.h"
#include "PS_AI_ConvAgent_BodyPoseMapFactory.generated.h"
/**
* Factory that lets users create PS_AI_ConvAgent_BodyPoseMap assets
* directly from the Content Browser (right-click Miscellaneous).
*/
UCLASS()
class UPS_AI_ConvAgent_BodyPoseMapFactory : public UFactory
{
GENERATED_BODY()
public:
UPS_AI_ConvAgent_BodyPoseMapFactory();
virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent,
FName Name, EObjectFlags Flags, UObject* Context,
FFeedbackContext* Warn) override;
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
};

View File

@ -0,0 +1,31 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "AnimGraphNode_Base.h"
#include "AnimNode_PS_AI_ConvAgent_BodyExpression.h"
#include "AnimGraphNode_PS_AI_ConvAgent_BodyExpression.generated.h"
/**
* AnimGraph editor node for the PS AI ConvAgent Body Expression AnimNode.
*
* This node appears in the AnimBP graph editor under the "PS AI ConvAgent" category.
* Place it in the character's Body AnimBP after upstream body animations
* (idle, locomotion). It blends emotion-driven body poses onto the upper body
* while passing the lower body through unchanged.
*/
UCLASS()
class UAnimGraphNode_PS_AI_ConvAgent_BodyExpression : public UAnimGraphNode_Base
{
GENERATED_BODY()
UPROPERTY(EditAnywhere, Category = "Settings")
FAnimNode_PS_AI_ConvAgent_BodyExpression Node;
// UAnimGraphNode_Base interface
virtual FText GetNodeTitle(ENodeTitleType::Type TitleType) const override;
virtual FText GetTooltipText() const override;
virtual FString GetNodeCategory() const override;
virtual FLinearColor GetNodeTitleColor() const override;
};

View File

@ -4,26 +4,26 @@
#include "CoreMinimal.h"
#include "AnimGraphNode_Base.h"
#include "AnimNode_PS_AI_ConvAgent_Posture.h"
#include "AnimGraphNode_PS_AI_ConvAgent_Posture.generated.h"
#include "AnimNode_PS_AI_ConvAgent_Gaze.h"
#include "AnimGraphNode_PS_AI_ConvAgent_Gaze.generated.h"
/**
* AnimGraph editor node for the PS AI ConvAgent Posture AnimNode.
* AnimGraph editor node for the PS AI ConvAgent Gaze AnimNode.
*
* This node appears in the AnimBP graph editor under the "PS AI ConvAgent" category.
* Place it AFTER the PS AI ConvAgent Facial Expression node and BEFORE the
* PS AI ConvAgent Lip Sync node in the MetaHuman Face AnimBP.
*
* It auto-discovers the PS_AI_ConvAgent_PostureComponent on the owning Actor
* It auto-discovers the PS_AI_ConvAgent_GazeComponent on the owning Actor
* and injects head bone rotation + ARKit eye gaze curves for look-at tracking.
*/
UCLASS()
class UAnimGraphNode_PS_AI_ConvAgent_Posture : public UAnimGraphNode_Base
class UAnimGraphNode_PS_AI_ConvAgent_Gaze : public UAnimGraphNode_Base
{
GENERATED_BODY()
UPROPERTY(EditAnywhere, Category = "Settings")
FAnimNode_PS_AI_ConvAgent_Posture Node;
FAnimNode_PS_AI_ConvAgent_Gaze Node;
// UAnimGraphNode_Base interface
virtual FText GetNodeTitle(ENodeTitleType::Type TitleType) const override;