Compare commits

...

2 Commits

Author SHA1 Message Date
7c235106d6 Add Uasset and fox for postures 2026-02-25 13:58:23 +01:00
780101d389 Posture: relative sticky cascade + thread safety + quaternion head rotation
Rewrite posture system as a relative cascade (Eyes → Head → Body) with
persistent sticky targets. Each layer stays put until the layer below
overflows its max angle, then realigns fully toward the target.

Key changes:
- Thread safety: FCriticalSection protects AnimNode shared data
- Persistent TargetHeadYaw/Pitch: overflow checked against target (not
  interpolating current) so head completes full realignment
- Persistent TargetBodyWorldYaw: body only moves when head+eyes combined
  range is exceeded (sticky, same pattern as head)
- Quaternion head rotation: compose independent NodQuat × TurnQuat to
  avoid diagonal coupling that FRotator causes
- Eye curves: negate CurrentEyeYaw for correct ARKit convention
- AnimNode: enhanced logging, axis diagnostic define (disabled)
- Remove old percentage/degree activation thresholds (max angles serve
  as natural thresholds in the relative cascade)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-25 13:28:08 +01:00
12 changed files with 456 additions and 236 deletions

View File

@ -8,6 +8,15 @@
DEFINE_LOG_CATEGORY_STATIC(LogElevenLabsPostureAnimNode, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// DIAGNOSTIC: Set to 1 to enable axis test mode.
// This overrides head rotation with a 20° test value that cycles through
// Pitch / Yaw / Roll every 3 seconds. Watch the MetaHuman's head and report
// which phase produces which visual movement (nod, turn, tilt).
// Set to 0 for normal production behavior.
// ─────────────────────────────────────────────────────────────────────────────
#define ELEVENLABS_AXIS_DIAGNOSTIC 0
// ─────────────────────────────────────────────────────────────────────────────
// FAnimNode_Base interface
// ─────────────────────────────────────────────────────────────────────────────
@ -35,15 +44,26 @@ void FAnimNode_ElevenLabsPosture::Initialize_AnyThread(const FAnimationInitializ
PostureComponent = Comp;
HeadBoneName = Comp->GetHeadBoneName();
UE_LOG(LogElevenLabsPostureAnimNode, Log,
TEXT("ElevenLabs Posture AnimNode bound to component on %s."),
*Owner->GetName());
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("=== ElevenLabs Posture AnimNode ==="));
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT(" Owner: %s | Mesh: %s"),
*Owner->GetName(), *SkelMesh->GetName());
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT(" HeadRotation: %s | EyeCurves: %s"),
bApplyHeadRotation ? TEXT("ON") : TEXT("OFF"),
bApplyEyeCurves ? TEXT("ON") : TEXT("OFF"));
#if ELEVENLABS_AXIS_DIAGNOSTIC
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT(" >>> AXIS DIAGNOSTIC MODE ACTIVE <<<"));
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT(" Phase 0 (0-3s): Pitch=20 | Phase 1 (3-6s): Yaw=20 | Phase 2 (6-9s): Roll=20"));
#endif
}
else
{
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("No ElevenLabsPostureComponent found on %s. "
"Add the component to enable posture tracking."),
TEXT("No ElevenLabsPostureComponent found on %s."),
*Owner->GetName());
}
}
@ -62,24 +82,54 @@ void FAnimNode_ElevenLabsPosture::CacheBones_AnyThread(const FAnimationCacheBone
{
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
if (const FBoneContainer* RequiredBones = &Proxy->GetRequiredBones())
{
const FReferenceSkeleton& RefSkeleton = RequiredBones->GetReferenceSkeleton();
const int32 MeshIndex = RefSkeleton.FindBoneIndex(HeadBoneName);
if (MeshIndex != INDEX_NONE)
{
HeadBoneIndex = RequiredBones->MakeCompactPoseIndex(
FMeshPoseBoneIndex(MeshIndex));
const FBoneContainer& RequiredBones = Proxy->GetRequiredBones();
const FReferenceSkeleton& RefSkeleton = RequiredBones.GetReferenceSkeleton();
const int32 MeshIndex = RefSkeleton.FindBoneIndex(HeadBoneName);
UE_LOG(LogElevenLabsPostureAnimNode, Log,
TEXT("Head bone '%s' resolved to compact index %d."),
*HeadBoneName.ToString(), HeadBoneIndex.GetInt());
if (MeshIndex != INDEX_NONE)
{
HeadBoneIndex = RequiredBones.MakeCompactPoseIndex(
FMeshPoseBoneIndex(MeshIndex));
// Log reference pose rotation for diagnostic
const TArray<FTransform>& RefPose = RefSkeleton.GetRefBonePose();
if (MeshIndex < RefPose.Num())
{
const FQuat RefRot = RefPose[MeshIndex].GetRotation();
const FRotator RefEuler = RefRot.Rotator();
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("Head bone '%s' index=%d | RefPose rotation: P=%.2f Y=%.2f R=%.2f"),
*HeadBoneName.ToString(), HeadBoneIndex.GetInt(),
RefEuler.Pitch, RefEuler.Yaw, RefEuler.Roll);
}
else
// Also log parent bone info
const int32 ParentMeshIdx = RefSkeleton.GetParentIndex(MeshIndex);
if (ParentMeshIdx != INDEX_NONE)
{
const FName ParentName = RefSkeleton.GetBoneName(ParentMeshIdx);
if (ParentMeshIdx < RefPose.Num())
{
const FRotator ParentEuler = RefPose[ParentMeshIdx].GetRotation().Rotator();
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT(" Parent bone: '%s' | RefPose rotation: P=%.2f Y=%.2f R=%.2f"),
*ParentName.ToString(),
ParentEuler.Pitch, ParentEuler.Yaw, ParentEuler.Roll);
}
}
}
else
{
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("Head bone '%s' NOT FOUND in skeleton. Available bones:"),
*HeadBoneName.ToString());
// List first 10 bone names to help debug
const int32 NumBones = FMath::Min(RefSkeleton.GetNum(), 10);
for (int32 i = 0; i < NumBones; ++i)
{
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("Head bone '%s' not found in skeleton."),
*HeadBoneName.ToString());
TEXT(" [%d] %s"), i, *RefSkeleton.GetBoneName(i).ToString());
}
}
}
@ -107,22 +157,74 @@ void FAnimNode_ElevenLabsPosture::Evaluate_AnyThread(FPoseContext& Output)
BasePose.Evaluate(Output);
// ── 1. Inject eye gaze curves (8 ARKit eye look curves) ──────────────────
for (const auto& Pair : CachedEyeCurves)
if (bApplyEyeCurves)
{
Output.Curve.Set(Pair.Key, Pair.Value);
for (const auto& Pair : CachedEyeCurves)
{
Output.Curve.Set(Pair.Key, Pair.Value);
}
}
// ── 2. Apply head bone rotation ──────────────────────────────────────────
if (HeadBoneIndex != FCompactPoseBoneIndex(INDEX_NONE)
&& HeadBoneIndex.GetInt() < Output.Pose.GetNumBones()
&& (!CachedHeadRotation.IsNearlyZero(0.1f)))
// ── 2. Apply head bone rotation ─────────────────────────────────────────
if (bApplyHeadRotation
&& HeadBoneIndex.GetInt() != INDEX_NONE
&& HeadBoneIndex.GetInt() < Output.Pose.GetNumBones())
{
FTransform& HeadTransform = Output.Pose[HeadBoneIndex];
// Compose the head rotation offset with the existing bone rotation.
// This adds our look-at rotation on top of whatever the base animation provides.
const FQuat HeadOffset = CachedHeadRotation.Quaternion();
HeadTransform.SetRotation(HeadOffset * HeadTransform.GetRotation());
#if ELEVENLABS_AXIS_DIAGNOSTIC
// ── DIAGNOSTIC: Cycle through axis test rotations ──────────────
// Phase 0 (0-3s): FRotator(20, 0, 0) = Pitch component only
// Phase 1 (3-6s): FRotator(0, 20, 0) = Yaw component only
// Phase 2 (6-9s): FRotator(0, 0, 20) = Roll component only
// Then repeats.
//
// Watch the head and note what happens in each phase:
// "nod up/down", "turn left/right", "tilt ear-to-shoulder"
//
static float DiagTimer = 0.0f;
static int32 DiagLogCounter = 0;
DiagTimer += 1.0f / 30.0f; // approximate, animation thread doesn't have real delta
const int32 Phase = ((int32)(DiagTimer / 10.0f)) % 3;
FRotator DiagRotation = FRotator::ZeroRotator;
const TCHAR* PhaseName = TEXT("???");
switch (Phase)
{
case 0:
DiagRotation = FRotator(20.0f, 0.0f, 0.0f);
PhaseName = TEXT("PITCH=20 (Y-axis rot)");
break;
case 1:
DiagRotation = FRotator(0.0f, 20.0f, 0.0f);
PhaseName = TEXT("YAW=20 (Z-axis rot)");
break;
case 2:
DiagRotation = FRotator(0.0f, 0.0f, 20.0f);
PhaseName = TEXT("ROLL=20 (X-axis rot)");
break;
}
const FQuat HeadOffset = DiagRotation.Quaternion();
// Pre-multiply: apply in parent space
HeadTransform.SetRotation((HeadOffset * HeadTransform.GetRotation()).GetNormalized());
DiagLogCounter++;
if (DiagLogCounter % 90 == 0)
{
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("DIAG Phase %d: %s | Timer=%.1f"),
Phase, PhaseName, DiagTimer);
}
#else
// ── PRODUCTION: Apply real head rotation ─────────────────────────
if (!CachedHeadRotation.IsNearlyZero(0.1f))
{
const FQuat HeadOffset = CachedHeadRotation.Quaternion();
// Pre-multiply: apply offset in parent space (neck)
HeadTransform.SetRotation((HeadOffset * HeadTransform.GetRotation()).GetNormalized());
}
#endif
}
}

View File

@ -5,7 +5,7 @@
#include "GameFramework/Actor.h"
#include "Math/UnrealMathUtility.h"
DEFINE_LOG_CATEGORY_STATIC(LogElevenLabsPosture, Log, All);
DEFINE_LOG_CATEGORY(LogElevenLabsPosture);
// ── ARKit eye curve names ────────────────────────────────────────────────────
static const FName EyeLookUpLeft(TEXT("eyeLookUpLeft"));
@ -17,6 +17,14 @@ static const FName EyeLookDownRight(TEXT("eyeLookDownRight"));
static const FName EyeLookInRight(TEXT("eyeLookInRight"));
static const FName EyeLookOutRight(TEXT("eyeLookOutRight"));
// ── ARKit full eye range (degrees) ──────────────────────────────────────────
// These represent the physical range of eye motion that maps to ARKit curve
// value 0→1. MaxEyeHorizontal/Vertical control the CASCADE threshold (when
// the head kicks in), but the visual eye deflection is always normalized by
// these fixed constants so the eye curves look correct at any threshold.
static constexpr float ARKitEyeRangeHorizontal = 30.0f;
static constexpr float ARKitEyeRangeVertical = 20.0f;
// ─────────────────────────────────────────────────────────────────────────────
// Construction
// ─────────────────────────────────────────────────────────────────────────────
@ -25,6 +33,7 @@ UElevenLabsPostureComponent::UElevenLabsPostureComponent()
{
PrimaryComponentTick.bCanEverTick = true;
PrimaryComponentTick.TickGroup = TG_PrePhysics;
bAutoActivate = true;
}
// ─────────────────────────────────────────────────────────────────────────────
@ -51,110 +60,15 @@ void UElevenLabsPostureComponent::BeginPlay()
*Owner->GetName());
}
// Remember original actor facing for neutral reference
OriginalActorYaw = Owner->GetActorRotation().Yaw;
// Remember original actor facing for neutral reference.
// Apply the mesh forward offset so "neutral" aligns with where the face points.
OriginalActorYaw = Owner->GetActorRotation().Yaw + MeshForwardYawOffset;
TargetBodyWorldYaw = Owner->GetActorRotation().Yaw;
UE_LOG(LogElevenLabsPosture, Log,
TEXT("Posture component initialized on %s. Body=%.0f%% Head=%.0f%% Eyes=%.0f%%"),
*Owner->GetName(), BodyRotationPercent, HeadRotationPercent, EyeRotationPercent);
}
// ─────────────────────────────────────────────────────────────────────────────
// Compute desired angles from character to target
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::ComputeDesiredAngles(
float& OutTotalYaw, float& OutTotalPitch) const
{
OutTotalYaw = 0.0f;
OutTotalPitch = 0.0f;
AActor* Owner = GetOwner();
if (!Owner || !TargetActor)
return;
// Target world position (actor origin + offset)
const FVector TargetPos = TargetActor->GetActorLocation() + TargetOffset;
// Eye origin: use head bone if available, otherwise actor location + offset
FVector EyeOrigin;
if (CachedMesh.IsValid() && CachedMesh->DoesSocketExist(HeadBoneName))
{
EyeOrigin = CachedMesh->GetSocketLocation(HeadBoneName);
}
else
{
// Fallback: use actor origin + same offset height
EyeOrigin = Owner->GetActorLocation() + FVector(0.0f, 0.0f, TargetOffset.Z);
}
// Direction from eyes to target
const FVector Direction = (TargetPos - EyeOrigin).GetSafeNormal();
if (Direction.IsNearlyZero())
return;
// Convert to rotation
const FRotator LookAtRotation = Direction.Rotation();
// Get actor's current forward rotation (yaw only, from original orientation)
// We compute relative to original facing so body rotation doesn't feed back
const FRotator ActorForward = FRotator(0.0f, OriginalActorYaw + AppliedBodyYaw, 0.0f);
// Delta rotation in local space
FRotator Delta = (LookAtRotation - ActorForward).GetNormalized();
OutTotalYaw = Delta.Yaw;
OutTotalPitch = Delta.Pitch;
}
// ─────────────────────────────────────────────────────────────────────────────
// Distribute total angles across body/head/eye layers
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::DistributeAngles(
float TotalYaw, float TotalPitch,
float& OutBodyYaw,
float& OutHeadYaw, float& OutHeadPitch,
float& OutEyeYaw, float& OutEyePitch) const
{
const float TotalPercent = BodyRotationPercent + HeadRotationPercent + EyeRotationPercent;
if (TotalPercent <= 0.0f)
{
OutBodyYaw = OutHeadYaw = OutHeadPitch = OutEyeYaw = OutEyePitch = 0.0f;
return;
}
// ── Yaw distribution (body + head + eyes) ────────────────────────────────
// Body gets its share first
const float BodyShare = BodyRotationPercent / 100.0f;
OutBodyYaw = FMath::Clamp(TotalYaw * BodyShare, -MaxBodyYaw, MaxBodyYaw);
// Remaining yaw after body
const float RemainingYawAfterBody = TotalYaw - OutBodyYaw;
// Head gets its proportional share of the remainder
const float HeadEyeTotal = HeadRotationPercent + EyeRotationPercent;
const float HeadProportion = HeadEyeTotal > 0.0f
? HeadRotationPercent / HeadEyeTotal : 0.5f;
OutHeadYaw = FMath::Clamp(RemainingYawAfterBody * HeadProportion,
-MaxHeadYaw, MaxHeadYaw);
// Eyes get whatever is left
const float RemainingYawAfterHead = RemainingYawAfterBody - OutHeadYaw;
OutEyeYaw = FMath::Clamp(RemainingYawAfterHead, -MaxEyeHorizontal, MaxEyeHorizontal);
// ── Pitch distribution (head + eyes only, body doesn't pitch) ────────────
const float HeadPitchShare = HeadEyeTotal > 0.0f
? HeadRotationPercent / HeadEyeTotal : 0.5f;
OutHeadPitch = FMath::Clamp(TotalPitch * HeadPitchShare,
-MaxHeadPitch, MaxHeadPitch);
const float RemainingPitch = TotalPitch - OutHeadPitch;
OutEyePitch = FMath::Clamp(RemainingPitch, -MaxEyeVertical, MaxEyeVertical);
TEXT("Posture initialized on %s. MeshOffset=%.0f OriginalYaw=%.0f MaxEye=%.0f/%.0f MaxHead=%.0f/%.0f"),
*Owner->GetName(), MeshForwardYawOffset, OriginalActorYaw,
MaxEyeHorizontal, MaxEyeVertical, MaxHeadYaw, MaxHeadPitch);
}
// ─────────────────────────────────────────────────────────────────────────────
@ -166,17 +80,19 @@ void UElevenLabsPostureComponent::UpdateEyeCurves(float EyeYaw, float EyePitch)
CurrentEyeCurves.Reset();
// Horizontal: positive yaw = looking right
// Normalized by the fixed ARKit physical range, NOT MaxEyeHorizontal
// (which only controls the cascade threshold).
if (EyeYaw > 0.0f)
{
// Looking right: left eye looks outward, right eye looks inward (nasal)
const float Value = FMath::Clamp(EyeYaw / MaxEyeHorizontal, 0.0f, 1.0f);
const float Value = FMath::Clamp(EyeYaw / ARKitEyeRangeHorizontal, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookOutLeft, Value);
CurrentEyeCurves.Add(EyeLookInRight, Value);
}
else if (EyeYaw < 0.0f)
{
// Looking left: left eye looks inward (nasal), right eye looks outward
const float Value = FMath::Clamp(-EyeYaw / MaxEyeHorizontal, 0.0f, 1.0f);
const float Value = FMath::Clamp(-EyeYaw / ARKitEyeRangeHorizontal, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookInLeft, Value);
CurrentEyeCurves.Add(EyeLookOutRight, Value);
}
@ -184,20 +100,30 @@ void UElevenLabsPostureComponent::UpdateEyeCurves(float EyeYaw, float EyePitch)
// Vertical: positive pitch = looking up
if (EyePitch > 0.0f)
{
const float Value = FMath::Clamp(EyePitch / MaxEyeVertical, 0.0f, 1.0f);
const float Value = FMath::Clamp(EyePitch / ARKitEyeRangeVertical, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookUpLeft, Value);
CurrentEyeCurves.Add(EyeLookUpRight, Value);
}
else if (EyePitch < 0.0f)
{
const float Value = FMath::Clamp(-EyePitch / MaxEyeVertical, 0.0f, 1.0f);
const float Value = FMath::Clamp(-EyePitch / ARKitEyeRangeVertical, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookDownLeft, Value);
CurrentEyeCurves.Add(EyeLookDownRight, Value);
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Tick — compute, distribute, smooth, apply
// Tick — relative cascade 360° posture tracking
//
// Eyes always track first, relative to the current head direction.
// When eyes exceed MaxEyeHorizontal → head realigns to target (eyes ≈ 0°).
// Now eyes track relative to the NEW head position ("recalé").
// When head exceeds MaxHeadYaw → body rotates, head recenters.
//
// Same pattern for pitch: eyes → head (no body pitch).
//
// This naturally prevents oscillation: after a realignment, the reference
// point shifts so small movements don't re-trigger higher layers.
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::TickComponent(
@ -209,56 +135,179 @@ void UElevenLabsPostureComponent::TickComponent(
if (!Owner)
return;
// ── 1. Compute total desired angles ──────────────────────────────────────
float TotalYaw = 0.0f;
float TotalPitch = 0.0f;
ComputeDesiredAngles(TotalYaw, TotalPitch);
// ── 2. Distribute across layers ──────────────────────────────────────────
float DesiredBodyYaw = 0.0f;
float DesiredHeadYaw = 0.0f, DesiredHeadPitch = 0.0f;
float DesiredEyeYaw = 0.0f, DesiredEyePitch = 0.0f;
if (TargetActor)
{
DistributeAngles(TotalYaw, TotalPitch,
DesiredBodyYaw,
DesiredHeadYaw, DesiredHeadPitch,
DesiredEyeYaw, DesiredEyePitch);
// ── 1. Compute target position and eye origin ──────────────────────
const FVector TargetPos = TargetActor->GetActorLocation() + TargetOffset;
FVector EyeOrigin;
if (CachedMesh.IsValid() && CachedMesh->DoesSocketExist(HeadBoneName))
{
EyeOrigin = CachedMesh->GetSocketLocation(HeadBoneName);
}
else
{
EyeOrigin = Owner->GetActorLocation() + FVector(0.0f, 0.0f, TargetOffset.Z);
}
const FVector ToTarget = TargetPos - EyeOrigin;
// ── 2. Body: smooth interp toward sticky target ────────────────────
//
// TargetBodyWorldYaw is persistent — only updated when head+eyes
// can't reach the target. Same sticky pattern as TargetHeadYaw.
const FVector HorizontalDir = FVector(ToTarget.X, ToTarget.Y, 0.0f);
float TargetWorldYaw = 0.0f;
if (!HorizontalDir.IsNearlyZero(1.0f))
{
TargetWorldYaw = HorizontalDir.Rotation().Yaw;
}
// Body smoothly interpolates toward its persistent target
const float BodyDelta = FMath::FindDeltaAngleDegrees(
Owner->GetActorRotation().Yaw, TargetBodyWorldYaw);
if (FMath::Abs(BodyDelta) > 0.1f)
{
const float BodyStep = FMath::FInterpTo(0.0f, BodyDelta, DeltaTime, BodyInterpSpeed);
Owner->AddActorWorldRotation(FRotator(0.0f, BodyStep, 0.0f));
}
// ── 3. Compute DeltaYaw after body interp ──────────────────────────
float DeltaYaw = 0.0f;
if (!HorizontalDir.IsNearlyZero(1.0f))
{
const float CurrentFacingYaw = Owner->GetActorRotation().Yaw + MeshForwardYawOffset;
DeltaYaw = FMath::FindDeltaAngleDegrees(CurrentFacingYaw, TargetWorldYaw);
}
// ── 4. Pitch from 3D direction ─────────────────────────────────────
const float HorizontalDist = HorizontalDir.Size();
const float TargetPitch = (HorizontalDist > 1.0f)
? FMath::RadiansToDegrees(FMath::Atan2(ToTarget.Z, HorizontalDist))
: 0.0f;
// ── 5. BODY overflow: check against persistent TargetBodyWorldYaw ──
//
// Same pattern as head: check from body's TARGET position (not current).
// Body triggers when head+eyes combined range is exceeded.
const float BodyTargetFacing = TargetBodyWorldYaw + MeshForwardYawOffset;
const float DeltaFromBodyTarget = FMath::FindDeltaAngleDegrees(
BodyTargetFacing, TargetWorldYaw);
if (FMath::Abs(DeltaFromBodyTarget) > MaxHeadYaw + MaxEyeHorizontal)
{
// Body realigns to face target
TargetBodyWorldYaw = TargetWorldYaw - MeshForwardYawOffset;
// Head returns to ~0° since body will face target directly
TargetHeadYaw = 0.0f;
}
// ── 6. HEAD: realign when eyes overflow (check against body TARGET) ──
//
// Head overflow is checked relative to where the BODY IS GOING
// (TargetBodyWorldYaw), not where it currently is. This prevents
// the head from overcompensating during body interpolation —
// otherwise the head turns to track while body catches up, then
// snaps back when body arrives (two-step animation artifact).
const float HeadDeltaYaw = FMath::FindDeltaAngleDegrees(
TargetBodyWorldYaw + MeshForwardYawOffset, TargetWorldYaw);
const float EyeDeltaYaw = HeadDeltaYaw - TargetHeadYaw;
if (FMath::Abs(EyeDeltaYaw) > MaxEyeHorizontal)
{
TargetHeadYaw = FMath::Clamp(HeadDeltaYaw, -MaxHeadYaw, MaxHeadYaw);
}
// Head smoothly interpolates toward its persistent target
CurrentHeadYaw = FMath::FInterpTo(CurrentHeadYaw, TargetHeadYaw, DeltaTime, HeadInterpSpeed);
// Eyes = remaining gap (during transients, eyes may sit at MaxEye while
// the head catches up — ARKit normalization keeps visual deflection small)
CurrentEyeYaw = FMath::Clamp(DeltaYaw - CurrentHeadYaw, -MaxEyeHorizontal, MaxEyeHorizontal);
// ── 5. PITCH: relative cascade (Eyes → Head, no body pitch) ────────
// Same pattern: check against persistent TargetHeadPitch
const float EyeDeltaPitch = TargetPitch - TargetHeadPitch;
if (FMath::Abs(EyeDeltaPitch) > MaxEyeVertical)
{
// Eyes overflow → head realigns toward target pitch
TargetHeadPitch = FMath::Clamp(TargetPitch, -MaxHeadPitch, MaxHeadPitch);
}
CurrentHeadPitch = FMath::FInterpTo(CurrentHeadPitch, TargetHeadPitch, DeltaTime, HeadInterpSpeed);
// Eyes = remaining pitch gap
CurrentEyePitch = FMath::Clamp(TargetPitch - CurrentHeadPitch, -MaxEyeVertical, MaxEyeVertical);
}
// else: all desired = 0 (return to neutral)
// ── 3. Smooth interpolation ──────────────────────────────────────────────
const float Speed = TargetActor ? 1.0f : 0.0f; // Use per-layer speed or neutral speed
const float BodySpeed = TargetActor ? BodyInterpSpeed : ReturnToNeutralSpeed;
const float HeadSpeed = TargetActor ? HeadInterpSpeed : ReturnToNeutralSpeed;
const float EyeSpeed = TargetActor ? EyeInterpSpeed : ReturnToNeutralSpeed;
CurrentBodyYaw = FMath::FInterpTo(CurrentBodyYaw, DesiredBodyYaw, DeltaTime, BodySpeed);
CurrentHeadYaw = FMath::FInterpTo(CurrentHeadYaw, DesiredHeadYaw, DeltaTime, HeadSpeed);
CurrentHeadPitch = FMath::FInterpTo(CurrentHeadPitch, DesiredHeadPitch, DeltaTime, HeadSpeed);
CurrentEyeYaw = FMath::FInterpTo(CurrentEyeYaw, DesiredEyeYaw, DeltaTime, EyeSpeed);
CurrentEyePitch = FMath::FInterpTo(CurrentEyePitch, DesiredEyePitch, DeltaTime, EyeSpeed);
// ── 4. Apply body rotation (delta to avoid drift) ────────────────────────
const float BodyYawDelta = CurrentBodyYaw - AppliedBodyYaw;
if (FMath::Abs(BodyYawDelta) > 0.01f)
else
{
FRotator CurrentRot = Owner->GetActorRotation();
CurrentRot.Yaw += BodyYawDelta;
Owner->SetActorRotation(CurrentRot);
AppliedBodyYaw = CurrentBodyYaw;
// ── No target: smoothly return to neutral ──────────────────────────
// Body: return to original facing via sticky target
TargetBodyWorldYaw = OriginalActorYaw - MeshForwardYawOffset;
const float NeutralDelta = FMath::FindDeltaAngleDegrees(
Owner->GetActorRotation().Yaw, TargetBodyWorldYaw);
if (FMath::Abs(NeutralDelta) > 0.1f)
{
const float NeutralStep = FMath::FInterpTo(0.0f, NeutralDelta, DeltaTime, ReturnToNeutralSpeed);
Owner->AddActorWorldRotation(FRotator(0.0f, NeutralStep, 0.0f));
}
// Head + Eyes: return to center
TargetHeadYaw = 0.0f;
TargetHeadPitch = 0.0f;
CurrentHeadYaw = FMath::FInterpTo(CurrentHeadYaw, 0.0f, DeltaTime, ReturnToNeutralSpeed);
CurrentHeadPitch = FMath::FInterpTo(CurrentHeadPitch, 0.0f, DeltaTime, ReturnToNeutralSpeed);
CurrentEyeYaw = FMath::FInterpTo(CurrentEyeYaw, 0.0f, DeltaTime, ReturnToNeutralSpeed);
CurrentEyePitch = FMath::FInterpTo(CurrentEyePitch, 0.0f, DeltaTime, ReturnToNeutralSpeed);
}
// ── 5. Store head rotation for AnimNode ──────────────────────────────────
// ── 6. Output for AnimNode (thread-safe write) ────────────────────────
{
FScopeLock Lock(&PostureDataLock);
CurrentHeadRotation = FRotator(CurrentHeadPitch, CurrentHeadYaw, 0.0f);
// MetaHuman head bone axis mapping (independent quaternions to avoid
// diagonal coupling that FRotator causes when both axes are non-zero):
// Z-axis rotation = nod up/down → our HeadPitch
// X-axis rotation = turn left/right → our HeadYaw
const FQuat NodQuat(FVector::UpVector, FMath::DegreesToRadians(-CurrentHeadPitch));
const FQuat TurnQuat(FVector::ForwardVector, FMath::DegreesToRadians(CurrentHeadYaw));
CurrentHeadRotation = (TurnQuat * NodQuat).Rotator();
// ── 6. Update eye curves for AnimNode ────────────────────────────────────
// Eye yaw is negated to match ARKit curve direction convention.
UpdateEyeCurves(-CurrentEyeYaw, CurrentEyePitch);
}
UpdateEyeCurves(CurrentEyeYaw, CurrentEyePitch);
// ── Debug (every ~2 seconds) ─────────────────────────────────────────
#if !UE_BUILD_SHIPPING
DebugFrameCounter++;
if (DebugFrameCounter % 120 == 0)
{
if (TargetActor)
{
const float FacingYaw = Owner->GetActorRotation().Yaw + MeshForwardYawOffset;
const FVector TP = TargetActor->GetActorLocation() + TargetOffset;
const FVector Dir = TP - Owner->GetActorLocation();
const float TgtYaw = FVector(Dir.X, Dir.Y, 0.0f).Rotation().Yaw;
const float Delta = FMath::FindDeltaAngleDegrees(FacingYaw, TgtYaw);
UE_LOG(LogElevenLabsPosture, Log,
TEXT("Posture [%s -> %s]: Delta=%.1f | Head=%.1f/%.1f | Eyes=%.1f/%.1f | EyeGap=%.1f"),
*Owner->GetName(), *TargetActor->GetName(),
Delta,
CurrentHeadYaw, CurrentHeadPitch,
CurrentEyeYaw, CurrentEyePitch,
Delta - CurrentHeadYaw);
}
}
#endif
}

View File

@ -12,15 +12,18 @@ class UElevenLabsPostureComponent;
/**
* Animation node that injects ElevenLabs posture data into the AnimGraph.
*
* Handles two types of output:
* Handles two types of output (each can be toggled independently):
* 1. Head bone rotation (yaw + pitch) applied directly to the bone transform
* 2. Eye gaze curves (8 ARKit eye look curves) injected as animation curves
*
* Place this node in the MetaHuman Face AnimBP AFTER the Facial Expression node
* and BEFORE the Lip Sync node.
* For MetaHuman, place this node in TWO AnimBPs:
*
* Graph layout:
* [Source] [Facial Expression] [ElevenLabs Posture] [Lip Sync] [mh_arkit_mapping_pose] ...
* Body AnimBP: bApplyHeadRotation = true, bApplyEyeCurves = false
* Rotates the head bone on the body mesh (face mesh follows via LeaderPose).
*
* Face AnimBP: bApplyHeadRotation = false, bApplyEyeCurves = true
* Injects ARKit eye curves before mh_arkit_mapping_pose.
* Graph: [Source] [Facial Expression] [Posture] [Lip Sync] [mh_arkit] ...
*
* The node auto-discovers the ElevenLabsPostureComponent no manual wiring needed.
*/
@ -33,6 +36,16 @@ struct PS_AI_AGENT_ELEVENLABS_API FAnimNode_ElevenLabsPosture : public FAnimNode
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Links)
FPoseLink BasePose;
/** Apply head bone rotation (yaw + pitch).
* Enable in the BODY AnimBP, disable in the Face AnimBP. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Settings")
bool bApplyHeadRotation = true;
/** Inject ARKit eye gaze curves (8 eye look curves).
* Enable in the FACE AnimBP, disable in the Body AnimBP. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Settings")
bool bApplyEyeCurves = true;
// ── FAnimNode_Base interface ──────────────────────────────────────────────
virtual void Initialize_AnyThread(const FAnimationInitializeContext& Context) override;

View File

@ -4,31 +4,37 @@
#include "CoreMinimal.h"
#include "Components/ActorComponent.h"
#include "HAL/CriticalSection.h"
#include "ElevenLabsPostureComponent.generated.h"
class USkeletalMeshComponent;
DECLARE_LOG_CATEGORY_EXTERN(LogElevenLabsPosture, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// UElevenLabsPostureComponent
//
// Multi-layer look-at system for MetaHuman characters. Smoothly orients the
// character's body, head, and eyes toward a TargetActor using configurable
// rotation percentages and angle limits.
// Chase-based multi-layer look-at system for MetaHuman characters.
// Smoothly orients the character's body, head, and eyes toward a TargetActor.
// Supports full 360° continuous tracking — the player can circle the character
// and the agent will continuously follow without snapping or changing direction.
//
// Rotation is distributed across 3 layers:
// Body (60%) — Rotates the entire owning actor (yaw only)
// Head (20%) — Rotates the head bone via AnimNode
// Eyes (10%) — Drives ARKit eye look curves via AnimNode
// Body — Continuously chases target yaw (full 360°, no angle limit)
// Head — Fills the remaining yaw/pitch gap (clamped by MaxHeadYaw/Pitch)
// Eyes — Fills what the head can't reach (clamped by MaxEyeHorizontal/Vertical)
//
// If TargetActor is null, all layers smoothly return to neutral.
// If the target is behind the character, angles clamp at their max limits.
//
// Workflow:
// 1. Add this component to the character Blueprint.
// 2. Add the AnimNode "ElevenLabs Posture" in the Face AnimBP
// between "Facial Expression" and "Lip Sync" nodes.
// 3. Set TargetActor to any actor (player pawn, a prop, etc.).
// 4. Set TargetOffset for actors without a skeleton (e.g. (0,0,160) for
// 2. Add the AnimNode "ElevenLabs Posture" in the Body AnimBP
// with bApplyHeadRotation = true, bApplyEyeCurves = false.
// 3. Add the AnimNode "ElevenLabs Posture" in the Face AnimBP
// with bApplyHeadRotation = false, bApplyEyeCurves = true
// (between "Facial Expression" and "Lip Sync" nodes).
// 4. Set TargetActor to any actor (player pawn, a prop, etc.).
// 5. Set TargetOffset for actors without a skeleton (e.g. (0,0,160) for
// eye-level on a simple actor).
// ─────────────────────────────────────────────────────────────────────────────
UCLASS(ClassGroup = "ElevenLabs", meta = (BlueprintSpawnableComponent),
@ -54,29 +60,14 @@ public:
meta = (ToolTip = "Offset from target actor origin.\nE.g. (0,0,160) for eye-level."))
FVector TargetOffset = FVector(0.0f, 0.0f, 160.0f);
// ── Rotation distribution (%) ────────────────────────────────────────────
/** Percentage of total rotation handled by body (whole actor yaw). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "100"))
float BodyRotationPercent = 60.0f;
/** Percentage of total rotation handled by head bone. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "100"))
float HeadRotationPercent = 20.0f;
/** Percentage of total rotation handled by eye look curves. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "100"))
float EyeRotationPercent = 10.0f;
// ── Angle limits (degrees) ───────────────────────────────────────────────
/** Maximum body yaw rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "180"))
float MaxBodyYaw = 45.0f;
//
// Relative cascade: Eyes → Head → Body.
// Eyes always track first (relative to current head direction).
// When eyes exceed their max → head realigns to target, eyes reset ~0°.
// When head exceeds its max → body rotates, head recenters.
// After each realignment, tracking becomes relative to the new position,
// so small movements around the target don't re-trigger higher layers.
/** Maximum head yaw rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
@ -120,6 +111,17 @@ public:
meta = (ClampMin = "0.1", ClampMax = "20"))
float ReturnToNeutralSpeed = 3.0f;
// ── Forward offset ──────────────────────────────────────────────────────
/** Yaw offset (degrees) between the actor's forward (+X) and the mesh's
* visual forward (where the face points). Common values:
* 0 = mesh faces +X (default UE convention)
* 90 = mesh faces +Y
* -90 = mesh faces -Y */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "-180", ClampMax = "180"))
float MeshForwardYawOffset = 0.0f;
// ── Head bone ────────────────────────────────────────────────────────────
/** Name of the head bone on the skeletal mesh (used for eye origin calculation). */
@ -128,13 +130,23 @@ public:
// ── Getters (read by AnimNode) ───────────────────────────────────────────
/** Get current eye gaze curves (8 ARKit eye look curves). */
/** Get current eye gaze curves (8 ARKit eye look curves).
* Returns a COPY safe to call from any thread. */
UFUNCTION(BlueprintCallable, Category = "ElevenLabs|Posture")
const TMap<FName, float>& GetCurrentEyeCurves() const { return CurrentEyeCurves; }
TMap<FName, float> GetCurrentEyeCurves() const
{
FScopeLock Lock(&PostureDataLock);
return CurrentEyeCurves;
}
/** Get current head rotation offset (yaw + pitch, applied by AnimNode). */
/** Get current head rotation offset (yaw + pitch, applied by AnimNode).
* Thread-safe copy. */
UFUNCTION(BlueprintCallable, Category = "ElevenLabs|Posture")
FRotator GetCurrentHeadRotation() const { return CurrentHeadRotation; }
FRotator GetCurrentHeadRotation() const
{
FScopeLock Lock(&PostureDataLock);
return CurrentHeadRotation;
}
/** Get the head bone name (used by AnimNode to resolve bone index). */
FName GetHeadBoneName() const { return HeadBoneName; }
@ -147,32 +159,36 @@ public:
private:
// ── Internals ────────────────────────────────────────────────────────────
/** Compute the total yaw/pitch angle from the character to the target. */
void ComputeDesiredAngles(float& OutTotalYaw, float& OutTotalPitch) const;
/** Distribute a total angle across body/head/eye layers. */
void DistributeAngles(float TotalYaw, float TotalPitch,
float& OutBodyYaw,
float& OutHeadYaw, float& OutHeadPitch,
float& OutEyeYaw, float& OutEyePitch) const;
/** Map eye yaw/pitch angles to 8 ARKit eye curves. */
void UpdateEyeCurves(float EyeYaw, float EyePitch);
// ── Smoothed current values ──────────────────────────────────────────────
// ── Smoothed current values (head + eyes, body is actor yaw) ────────────
float CurrentBodyYaw = 0.0f;
float CurrentHeadYaw = 0.0f;
float CurrentHeadPitch = 0.0f;
float CurrentEyeYaw = 0.0f;
float CurrentEyePitch = 0.0f;
/** Previous body yaw, for delta rotation (avoids drift). */
float AppliedBodyYaw = 0.0f;
/** Persistent head targets — only updated when eyes overflow.
* Overflow is checked against these (not CurrentHead) so the head
* fully completes its realignment instead of stopping mid-interp. */
float TargetHeadYaw = 0.0f;
float TargetHeadPitch = 0.0f;
/** Original actor yaw at BeginPlay (for neutral reference). */
/** Persistent body target — world yaw the actor should face.
* Only updated when head+eyes can't reach the target (overflow).
* Same sticky pattern as TargetHeadYaw but for the body layer. */
float TargetBodyWorldYaw = 0.0f;
/** Original actor yaw at BeginPlay (for neutral return when TargetActor is null). */
float OriginalActorYaw = 0.0f;
// ── Thread-safe lock for data read by AnimNode worker thread ─────────────
/** Protects CurrentEyeCurves and CurrentHeadRotation against concurrent
* reads from the animation worker thread (Update_AnyThread). */
mutable FCriticalSection PostureDataLock;
// ── Output data ──────────────────────────────────────────────────────────
/** 8 ARKit eye look curves (eyeLookUpLeft, eyeLookDownRight, etc.). */
@ -183,4 +199,9 @@ private:
/** Cached skeletal mesh component on the owning actor. */
TWeakObjectPtr<USkeletalMeshComponent> CachedMesh;
#if !UE_BUILD_SHIPPING
/** Frame counter for periodic debug logging. */
int32 DebugFrameCounter = 0;
#endif
};

35
build Lancelot.bat Normal file
View File

@ -0,0 +1,35 @@
@echo off
chcp 65001 >nul
title Build PS_AI_Agent
echo ============================================================
echo PS_AI_Agent - Compilation plugin ElevenLabs (UE 5.5)
echo ============================================================
echo.
echo ATTENTION : Ferme l'Unreal Editor avant de continuer !
echo (Les DLL seraient verrouillees et la compilation echouerait)
echo.
pause
echo.
echo Compilation en cours...
echo (Seuls les .cpp modifies sont recompiles, ~16s)
echo.
powershell.exe -Command "& 'C:\Program Files\Epic Games\UE_5.5\Engine\Build\BatchFiles\RunUAT.bat' BuildEditor -project='E:\ASTERION\GIT\PS_AI_Agent\Unreal\PS_AI_Agent\PS_AI_Agent.uproject' -notools -noP4 2>&1"
echo.
if %ERRORLEVEL% == 0 (
echo ============================================================
echo SUCCES - Compilation terminee sans erreur.
echo Tu peux relancer l'Unreal Editor.
echo ============================================================
) else (
echo ============================================================
echo ECHEC - Erreur de compilation (code %ERRORLEVEL%)
echo Consulte le log ci-dessus pour le detail.
echo ============================================================
)
echo.
pause