v2.4.0: Posture component — multi-layer look-at (body/head/eyes)

New ElevenLabsPostureComponent with 3-layer rotation distribution:
- Body (60%): rotates entire actor (yaw only) via SetActorRotation
- Head (20%): direct bone transform in AnimNode
- Eyes (10%): 8 ARKit eye look curves (eyeLookUp/Down/In/Out L/R)

Features:
- Configurable rotation percentages and angle limits per layer
- Smooth FInterpTo interpolation with per-layer speeds
- TargetActor + TargetOffset for any actor type (no skeleton required)
- Smooth return to neutral when TargetActor is cleared
- Blue AnimGraph node in ElevenLabs category

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
j.foucher 2026-02-24 21:16:57 +01:00
parent 88c175909e
commit 7373959d8b
7 changed files with 714 additions and 0 deletions

View File

@ -0,0 +1,137 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimNode_ElevenLabsPosture.h"
#include "ElevenLabsPostureComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "Animation/AnimInstanceProxy.h"
#include "GameFramework/Actor.h"
DEFINE_LOG_CATEGORY_STATIC(LogElevenLabsPostureAnimNode, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// FAnimNode_Base interface
// ─────────────────────────────────────────────────────────────────────────────
void FAnimNode_ElevenLabsPosture::Initialize_AnyThread(const FAnimationInitializeContext& Context)
{
BasePose.Initialize(Context);
// Find the ElevenLabsPostureComponent on the owning actor.
PostureComponent.Reset();
CachedEyeCurves.Reset();
CachedHeadRotation = FRotator::ZeroRotator;
HeadBoneIndex = FCompactPoseBoneIndex(INDEX_NONE);
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
if (const USkeletalMeshComponent* SkelMesh = Proxy->GetSkelMeshComponent())
{
if (AActor* Owner = SkelMesh->GetOwner())
{
UElevenLabsPostureComponent* Comp =
Owner->FindComponentByClass<UElevenLabsPostureComponent>();
if (Comp)
{
PostureComponent = Comp;
HeadBoneName = Comp->GetHeadBoneName();
UE_LOG(LogElevenLabsPostureAnimNode, Log,
TEXT("ElevenLabs Posture AnimNode bound to component on %s."),
*Owner->GetName());
}
else
{
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("No ElevenLabsPostureComponent found on %s. "
"Add the component to enable posture tracking."),
*Owner->GetName());
}
}
}
}
}
void FAnimNode_ElevenLabsPosture::CacheBones_AnyThread(const FAnimationCacheBonesContext& Context)
{
BasePose.CacheBones(Context);
// Resolve head bone index from the skeleton
HeadBoneIndex = FCompactPoseBoneIndex(INDEX_NONE);
if (!HeadBoneName.IsNone())
{
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
if (const FBoneContainer* RequiredBones = &Proxy->GetRequiredBones())
{
const FReferenceSkeleton& RefSkeleton = RequiredBones->GetReferenceSkeleton();
const int32 MeshIndex = RefSkeleton.FindBoneIndex(HeadBoneName);
if (MeshIndex != INDEX_NONE)
{
HeadBoneIndex = RequiredBones->MakeCompactPoseIndex(
FMeshPoseBoneIndex(MeshIndex));
UE_LOG(LogElevenLabsPostureAnimNode, Log,
TEXT("Head bone '%s' resolved to compact index %d."),
*HeadBoneName.ToString(), HeadBoneIndex.GetInt());
}
else
{
UE_LOG(LogElevenLabsPostureAnimNode, Warning,
TEXT("Head bone '%s' not found in skeleton."),
*HeadBoneName.ToString());
}
}
}
}
}
void FAnimNode_ElevenLabsPosture::Update_AnyThread(const FAnimationUpdateContext& Context)
{
BasePose.Update(Context);
// Cache posture data from the component (game thread safe copy).
CachedEyeCurves.Reset();
CachedHeadRotation = FRotator::ZeroRotator;
if (PostureComponent.IsValid())
{
CachedEyeCurves = PostureComponent->GetCurrentEyeCurves();
CachedHeadRotation = PostureComponent->GetCurrentHeadRotation();
}
}
void FAnimNode_ElevenLabsPosture::Evaluate_AnyThread(FPoseContext& Output)
{
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
// ── 1. Inject eye gaze curves (8 ARKit eye look curves) ──────────────────
for (const auto& Pair : CachedEyeCurves)
{
Output.Curve.Set(Pair.Key, Pair.Value);
}
// ── 2. Apply head bone rotation ──────────────────────────────────────────
if (HeadBoneIndex != FCompactPoseBoneIndex(INDEX_NONE)
&& HeadBoneIndex.GetInt() < Output.Pose.GetNumBones()
&& (!CachedHeadRotation.IsNearlyZero(0.1f)))
{
FTransform& HeadTransform = Output.Pose[HeadBoneIndex];
// Compose the head rotation offset with the existing bone rotation.
// This adds our look-at rotation on top of whatever the base animation provides.
const FQuat HeadOffset = CachedHeadRotation.Quaternion();
HeadTransform.SetRotation(HeadOffset * HeadTransform.GetRotation());
}
}
void FAnimNode_ElevenLabsPosture::GatherDebugData(FNodeDebugData& DebugData)
{
FString DebugLine = FString::Printf(
TEXT("ElevenLabs Posture (eyes: %d curves, head: Y=%.1f P=%.1f)"),
CachedEyeCurves.Num(),
CachedHeadRotation.Yaw, CachedHeadRotation.Pitch);
DebugData.AddDebugItem(DebugLine);
BasePose.GatherDebugData(DebugData);
}

View File

@ -0,0 +1,264 @@
// Copyright ASTERION. All Rights Reserved.
#include "ElevenLabsPostureComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "GameFramework/Actor.h"
#include "Math/UnrealMathUtility.h"
DEFINE_LOG_CATEGORY_STATIC(LogElevenLabsPosture, Log, All);
// ── ARKit eye curve names ────────────────────────────────────────────────────
static const FName EyeLookUpLeft(TEXT("eyeLookUpLeft"));
static const FName EyeLookDownLeft(TEXT("eyeLookDownLeft"));
static const FName EyeLookInLeft(TEXT("eyeLookInLeft"));
static const FName EyeLookOutLeft(TEXT("eyeLookOutLeft"));
static const FName EyeLookUpRight(TEXT("eyeLookUpRight"));
static const FName EyeLookDownRight(TEXT("eyeLookDownRight"));
static const FName EyeLookInRight(TEXT("eyeLookInRight"));
static const FName EyeLookOutRight(TEXT("eyeLookOutRight"));
// ─────────────────────────────────────────────────────────────────────────────
// Construction
// ─────────────────────────────────────────────────────────────────────────────
UElevenLabsPostureComponent::UElevenLabsPostureComponent()
{
PrimaryComponentTick.bCanEverTick = true;
PrimaryComponentTick.TickGroup = TG_PrePhysics;
}
// ─────────────────────────────────────────────────────────────────────────────
// BeginPlay
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::BeginPlay()
{
Super::BeginPlay();
AActor* Owner = GetOwner();
if (!Owner)
{
UE_LOG(LogElevenLabsPosture, Warning, TEXT("No owner actor — posture disabled."));
return;
}
// Cache skeletal mesh for head bone queries
CachedMesh = Owner->FindComponentByClass<USkeletalMeshComponent>();
if (!CachedMesh.IsValid())
{
UE_LOG(LogElevenLabsPosture, Warning,
TEXT("No SkeletalMeshComponent found on %s — head bone lookup will be unavailable."),
*Owner->GetName());
}
// Remember original actor facing for neutral reference
OriginalActorYaw = Owner->GetActorRotation().Yaw;
UE_LOG(LogElevenLabsPosture, Log,
TEXT("Posture component initialized on %s. Body=%.0f%% Head=%.0f%% Eyes=%.0f%%"),
*Owner->GetName(), BodyRotationPercent, HeadRotationPercent, EyeRotationPercent);
}
// ─────────────────────────────────────────────────────────────────────────────
// Compute desired angles from character to target
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::ComputeDesiredAngles(
float& OutTotalYaw, float& OutTotalPitch) const
{
OutTotalYaw = 0.0f;
OutTotalPitch = 0.0f;
AActor* Owner = GetOwner();
if (!Owner || !TargetActor)
return;
// Target world position (actor origin + offset)
const FVector TargetPos = TargetActor->GetActorLocation() + TargetOffset;
// Eye origin: use head bone if available, otherwise actor location + offset
FVector EyeOrigin;
if (CachedMesh.IsValid() && CachedMesh->DoesSocketExist(HeadBoneName))
{
EyeOrigin = CachedMesh->GetSocketLocation(HeadBoneName);
}
else
{
// Fallback: use actor origin + same offset height
EyeOrigin = Owner->GetActorLocation() + FVector(0.0f, 0.0f, TargetOffset.Z);
}
// Direction from eyes to target
const FVector Direction = (TargetPos - EyeOrigin).GetSafeNormal();
if (Direction.IsNearlyZero())
return;
// Convert to rotation
const FRotator LookAtRotation = Direction.Rotation();
// Get actor's current forward rotation (yaw only, from original orientation)
// We compute relative to original facing so body rotation doesn't feed back
const FRotator ActorForward = FRotator(0.0f, OriginalActorYaw + AppliedBodyYaw, 0.0f);
// Delta rotation in local space
FRotator Delta = (LookAtRotation - ActorForward).GetNormalized();
OutTotalYaw = Delta.Yaw;
OutTotalPitch = Delta.Pitch;
}
// ─────────────────────────────────────────────────────────────────────────────
// Distribute total angles across body/head/eye layers
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::DistributeAngles(
float TotalYaw, float TotalPitch,
float& OutBodyYaw,
float& OutHeadYaw, float& OutHeadPitch,
float& OutEyeYaw, float& OutEyePitch) const
{
const float TotalPercent = BodyRotationPercent + HeadRotationPercent + EyeRotationPercent;
if (TotalPercent <= 0.0f)
{
OutBodyYaw = OutHeadYaw = OutHeadPitch = OutEyeYaw = OutEyePitch = 0.0f;
return;
}
// ── Yaw distribution (body + head + eyes) ────────────────────────────────
// Body gets its share first
const float BodyShare = BodyRotationPercent / 100.0f;
OutBodyYaw = FMath::Clamp(TotalYaw * BodyShare, -MaxBodyYaw, MaxBodyYaw);
// Remaining yaw after body
const float RemainingYawAfterBody = TotalYaw - OutBodyYaw;
// Head gets its proportional share of the remainder
const float HeadEyeTotal = HeadRotationPercent + EyeRotationPercent;
const float HeadProportion = HeadEyeTotal > 0.0f
? HeadRotationPercent / HeadEyeTotal : 0.5f;
OutHeadYaw = FMath::Clamp(RemainingYawAfterBody * HeadProportion,
-MaxHeadYaw, MaxHeadYaw);
// Eyes get whatever is left
const float RemainingYawAfterHead = RemainingYawAfterBody - OutHeadYaw;
OutEyeYaw = FMath::Clamp(RemainingYawAfterHead, -MaxEyeHorizontal, MaxEyeHorizontal);
// ── Pitch distribution (head + eyes only, body doesn't pitch) ────────────
const float HeadPitchShare = HeadEyeTotal > 0.0f
? HeadRotationPercent / HeadEyeTotal : 0.5f;
OutHeadPitch = FMath::Clamp(TotalPitch * HeadPitchShare,
-MaxHeadPitch, MaxHeadPitch);
const float RemainingPitch = TotalPitch - OutHeadPitch;
OutEyePitch = FMath::Clamp(RemainingPitch, -MaxEyeVertical, MaxEyeVertical);
}
// ─────────────────────────────────────────────────────────────────────────────
// Map eye angles to 8 ARKit eye curves
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::UpdateEyeCurves(float EyeYaw, float EyePitch)
{
CurrentEyeCurves.Reset();
// Horizontal: positive yaw = looking right
if (EyeYaw > 0.0f)
{
// Looking right: left eye looks outward, right eye looks inward (nasal)
const float Value = FMath::Clamp(EyeYaw / MaxEyeHorizontal, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookOutLeft, Value);
CurrentEyeCurves.Add(EyeLookInRight, Value);
}
else if (EyeYaw < 0.0f)
{
// Looking left: left eye looks inward (nasal), right eye looks outward
const float Value = FMath::Clamp(-EyeYaw / MaxEyeHorizontal, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookInLeft, Value);
CurrentEyeCurves.Add(EyeLookOutRight, Value);
}
// Vertical: positive pitch = looking up
if (EyePitch > 0.0f)
{
const float Value = FMath::Clamp(EyePitch / MaxEyeVertical, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookUpLeft, Value);
CurrentEyeCurves.Add(EyeLookUpRight, Value);
}
else if (EyePitch < 0.0f)
{
const float Value = FMath::Clamp(-EyePitch / MaxEyeVertical, 0.0f, 1.0f);
CurrentEyeCurves.Add(EyeLookDownLeft, Value);
CurrentEyeCurves.Add(EyeLookDownRight, Value);
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Tick — compute, distribute, smooth, apply
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsPostureComponent::TickComponent(
float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
AActor* Owner = GetOwner();
if (!Owner)
return;
// ── 1. Compute total desired angles ──────────────────────────────────────
float TotalYaw = 0.0f;
float TotalPitch = 0.0f;
ComputeDesiredAngles(TotalYaw, TotalPitch);
// ── 2. Distribute across layers ──────────────────────────────────────────
float DesiredBodyYaw = 0.0f;
float DesiredHeadYaw = 0.0f, DesiredHeadPitch = 0.0f;
float DesiredEyeYaw = 0.0f, DesiredEyePitch = 0.0f;
if (TargetActor)
{
DistributeAngles(TotalYaw, TotalPitch,
DesiredBodyYaw,
DesiredHeadYaw, DesiredHeadPitch,
DesiredEyeYaw, DesiredEyePitch);
}
// else: all desired = 0 (return to neutral)
// ── 3. Smooth interpolation ──────────────────────────────────────────────
const float Speed = TargetActor ? 1.0f : 0.0f; // Use per-layer speed or neutral speed
const float BodySpeed = TargetActor ? BodyInterpSpeed : ReturnToNeutralSpeed;
const float HeadSpeed = TargetActor ? HeadInterpSpeed : ReturnToNeutralSpeed;
const float EyeSpeed = TargetActor ? EyeInterpSpeed : ReturnToNeutralSpeed;
CurrentBodyYaw = FMath::FInterpTo(CurrentBodyYaw, DesiredBodyYaw, DeltaTime, BodySpeed);
CurrentHeadYaw = FMath::FInterpTo(CurrentHeadYaw, DesiredHeadYaw, DeltaTime, HeadSpeed);
CurrentHeadPitch = FMath::FInterpTo(CurrentHeadPitch, DesiredHeadPitch, DeltaTime, HeadSpeed);
CurrentEyeYaw = FMath::FInterpTo(CurrentEyeYaw, DesiredEyeYaw, DeltaTime, EyeSpeed);
CurrentEyePitch = FMath::FInterpTo(CurrentEyePitch, DesiredEyePitch, DeltaTime, EyeSpeed);
// ── 4. Apply body rotation (delta to avoid drift) ────────────────────────
const float BodyYawDelta = CurrentBodyYaw - AppliedBodyYaw;
if (FMath::Abs(BodyYawDelta) > 0.01f)
{
FRotator CurrentRot = Owner->GetActorRotation();
CurrentRot.Yaw += BodyYawDelta;
Owner->SetActorRotation(CurrentRot);
AppliedBodyYaw = CurrentBodyYaw;
}
// ── 5. Store head rotation for AnimNode ──────────────────────────────────
CurrentHeadRotation = FRotator(CurrentHeadPitch, CurrentHeadYaw, 0.0f);
// ── 6. Update eye curves for AnimNode ────────────────────────────────────
UpdateEyeCurves(CurrentEyeYaw, CurrentEyePitch);
}

View File

@ -0,0 +1,61 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Animation/AnimNodeBase.h"
#include "BoneContainer.h"
#include "AnimNode_ElevenLabsPosture.generated.h"
class UElevenLabsPostureComponent;
/**
* Animation node that injects ElevenLabs posture data into the AnimGraph.
*
* Handles two types of output:
* 1. Head bone rotation (yaw + pitch) applied directly to the bone transform
* 2. Eye gaze curves (8 ARKit eye look curves) injected as animation curves
*
* Place this node in the MetaHuman Face AnimBP AFTER the Facial Expression node
* and BEFORE the Lip Sync node.
*
* Graph layout:
* [Source] [Facial Expression] [ElevenLabs Posture] [Lip Sync] [mh_arkit_mapping_pose] ...
*
* The node auto-discovers the ElevenLabsPostureComponent no manual wiring needed.
*/
USTRUCT(BlueprintInternalUseOnly)
struct PS_AI_AGENT_ELEVENLABS_API FAnimNode_ElevenLabsPosture : public FAnimNode_Base
{
GENERATED_USTRUCT_BODY()
/** Input pose to pass through. Connect your upstream pose source here. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Links)
FPoseLink BasePose;
// ── FAnimNode_Base interface ──────────────────────────────────────────────
virtual void Initialize_AnyThread(const FAnimationInitializeContext& Context) override;
virtual void CacheBones_AnyThread(const FAnimationCacheBonesContext& Context) override;
virtual void Update_AnyThread(const FAnimationUpdateContext& Context) override;
virtual void Evaluate_AnyThread(FPoseContext& Output) override;
virtual void GatherDebugData(FNodeDebugData& DebugData) override;
private:
/** Cached reference to the posture component on the owning actor. */
TWeakObjectPtr<UElevenLabsPostureComponent> PostureComponent;
/** Eye gaze curves to inject (8 ARKit eye look curves).
* Copied from the component during Update (game thread safe). */
TMap<FName, float> CachedEyeCurves;
/** Head rotation offset (yaw + pitch) to apply to the head bone.
* Copied from the component during Update. */
FRotator CachedHeadRotation = FRotator::ZeroRotator;
/** Resolved head bone index in the skeleton. */
FCompactPoseBoneIndex HeadBoneIndex = FCompactPoseBoneIndex(INDEX_NONE);
/** Head bone name cached from the component at initialization. */
FName HeadBoneName;
};

View File

@ -0,0 +1,186 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Components/ActorComponent.h"
#include "ElevenLabsPostureComponent.generated.h"
class USkeletalMeshComponent;
// ─────────────────────────────────────────────────────────────────────────────
// UElevenLabsPostureComponent
//
// Multi-layer look-at system for MetaHuman characters. Smoothly orients the
// character's body, head, and eyes toward a TargetActor using configurable
// rotation percentages and angle limits.
//
// Rotation is distributed across 3 layers:
// Body (60%) — Rotates the entire owning actor (yaw only)
// Head (20%) — Rotates the head bone via AnimNode
// Eyes (10%) — Drives ARKit eye look curves via AnimNode
//
// If TargetActor is null, all layers smoothly return to neutral.
// If the target is behind the character, angles clamp at their max limits.
//
// Workflow:
// 1. Add this component to the character Blueprint.
// 2. Add the AnimNode "ElevenLabs Posture" in the Face AnimBP
// between "Facial Expression" and "Lip Sync" nodes.
// 3. Set TargetActor to any actor (player pawn, a prop, etc.).
// 4. Set TargetOffset for actors without a skeleton (e.g. (0,0,160) for
// eye-level on a simple actor).
// ─────────────────────────────────────────────────────────────────────────────
UCLASS(ClassGroup = "ElevenLabs", meta = (BlueprintSpawnableComponent),
DisplayName = "ElevenLabs Posture")
class PS_AI_AGENT_ELEVENLABS_API UElevenLabsPostureComponent : public UActorComponent
{
GENERATED_BODY()
public:
UElevenLabsPostureComponent();
// ── Target ───────────────────────────────────────────────────────────────
/** The actor to look at. Can be any actor (player, prop, etc.).
* Set to null to smoothly return to neutral. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ToolTip = "Target actor to look at.\nSet to null to return to neutral."))
TObjectPtr<AActor> TargetActor;
/** Offset from the target actor's origin to aim at.
* Useful for actors without a skeleton (e.g. (0,0,160) for eye-level). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ToolTip = "Offset from target actor origin.\nE.g. (0,0,160) for eye-level."))
FVector TargetOffset = FVector(0.0f, 0.0f, 160.0f);
// ── Rotation distribution (%) ────────────────────────────────────────────
/** Percentage of total rotation handled by body (whole actor yaw). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "100"))
float BodyRotationPercent = 60.0f;
/** Percentage of total rotation handled by head bone. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "100"))
float HeadRotationPercent = 20.0f;
/** Percentage of total rotation handled by eye look curves. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "100"))
float EyeRotationPercent = 10.0f;
// ── Angle limits (degrees) ───────────────────────────────────────────────
/** Maximum body yaw rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "180"))
float MaxBodyYaw = 45.0f;
/** Maximum head yaw rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxHeadYaw = 35.0f;
/** Maximum head pitch rotation in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxHeadPitch = 25.0f;
/** Maximum horizontal eye angle in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxEyeHorizontal = 30.0f;
/** Maximum vertical eye angle in degrees. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0", ClampMax = "90"))
float MaxEyeVertical = 20.0f;
// ── Smoothing speeds ─────────────────────────────────────────────────────
/** Body rotation interpolation speed (lower = slower, more natural). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0.1", ClampMax = "20"))
float BodyInterpSpeed = 2.0f;
/** Head rotation interpolation speed. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0.1", ClampMax = "20"))
float HeadInterpSpeed = 5.0f;
/** Eye movement interpolation speed (higher = snappier). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0.1", ClampMax = "20"))
float EyeInterpSpeed = 8.0f;
/** Interpolation speed when returning to neutral (TargetActor is null). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture",
meta = (ClampMin = "0.1", ClampMax = "20"))
float ReturnToNeutralSpeed = 3.0f;
// ── Head bone ────────────────────────────────────────────────────────────
/** Name of the head bone on the skeletal mesh (used for eye origin calculation). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|Posture")
FName HeadBoneName = FName(TEXT("head"));
// ── Getters (read by AnimNode) ───────────────────────────────────────────
/** Get current eye gaze curves (8 ARKit eye look curves). */
UFUNCTION(BlueprintCallable, Category = "ElevenLabs|Posture")
const TMap<FName, float>& GetCurrentEyeCurves() const { return CurrentEyeCurves; }
/** Get current head rotation offset (yaw + pitch, applied by AnimNode). */
UFUNCTION(BlueprintCallable, Category = "ElevenLabs|Posture")
FRotator GetCurrentHeadRotation() const { return CurrentHeadRotation; }
/** Get the head bone name (used by AnimNode to resolve bone index). */
FName GetHeadBoneName() const { return HeadBoneName; }
// ── UActorComponent overrides ────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction* ThisTickFunction) override;
private:
// ── Internals ────────────────────────────────────────────────────────────
/** Compute the total yaw/pitch angle from the character to the target. */
void ComputeDesiredAngles(float& OutTotalYaw, float& OutTotalPitch) const;
/** Distribute a total angle across body/head/eye layers. */
void DistributeAngles(float TotalYaw, float TotalPitch,
float& OutBodyYaw,
float& OutHeadYaw, float& OutHeadPitch,
float& OutEyeYaw, float& OutEyePitch) const;
/** Map eye yaw/pitch angles to 8 ARKit eye curves. */
void UpdateEyeCurves(float EyeYaw, float EyePitch);
// ── Smoothed current values ──────────────────────────────────────────────
float CurrentBodyYaw = 0.0f;
float CurrentHeadYaw = 0.0f;
float CurrentHeadPitch = 0.0f;
float CurrentEyeYaw = 0.0f;
float CurrentEyePitch = 0.0f;
/** Previous body yaw, for delta rotation (avoids drift). */
float AppliedBodyYaw = 0.0f;
/** Original actor yaw at BeginPlay (for neutral reference). */
float OriginalActorYaw = 0.0f;
// ── Output data ──────────────────────────────────────────────────────────
/** 8 ARKit eye look curves (eyeLookUpLeft, eyeLookDownRight, etc.). */
TMap<FName, float> CurrentEyeCurves;
/** Head bone rotation offset (Yaw + Pitch). */
FRotator CurrentHeadRotation = FRotator::ZeroRotator;
/** Cached skeletal mesh component on the owning actor. */
TWeakObjectPtr<USkeletalMeshComponent> CachedMesh;
};

View File

@ -0,0 +1,33 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimGraphNode_ElevenLabsPosture.h"
#define LOCTEXT_NAMESPACE "AnimNode_ElevenLabsPosture"
FText UAnimGraphNode_ElevenLabsPosture::GetNodeTitle(ENodeTitleType::Type TitleType) const
{
return LOCTEXT("NodeTitle", "ElevenLabs Posture");
}
FText UAnimGraphNode_ElevenLabsPosture::GetTooltipText() const
{
return LOCTEXT("Tooltip",
"Injects head rotation and eye gaze curves from the ElevenLabs Posture component.\n\n"
"Place this node AFTER the ElevenLabs Facial Expression node and\n"
"BEFORE the ElevenLabs Lip Sync node in the MetaHuman Face AnimBP.\n\n"
"The component distributes look-at rotation across body (actor yaw),\n"
"head (bone rotation), and eyes (ARKit curves) for a natural look-at effect.");
}
FString UAnimGraphNode_ElevenLabsPosture::GetNodeCategory() const
{
return TEXT("ElevenLabs");
}
FLinearColor UAnimGraphNode_ElevenLabsPosture::GetNodeTitleColor() const
{
// Cool blue to distinguish from Facial Expression (amber) and Lip Sync (teal)
return FLinearColor(0.2f, 0.4f, 0.9f, 1.0f);
}
#undef LOCTEXT_NAMESPACE

View File

@ -0,0 +1,33 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "AnimGraphNode_Base.h"
#include "AnimNode_ElevenLabsPosture.h"
#include "AnimGraphNode_ElevenLabsPosture.generated.h"
/**
* AnimGraph editor node for the ElevenLabs Posture AnimNode.
*
* This node appears in the AnimBP graph editor under the "ElevenLabs" category.
* Place it AFTER the ElevenLabs Facial Expression node and BEFORE the
* ElevenLabs Lip Sync node in the MetaHuman Face AnimBP.
*
* It auto-discovers the ElevenLabsPostureComponent on the owning Actor
* and injects head bone rotation + ARKit eye gaze curves for look-at tracking.
*/
UCLASS()
class UAnimGraphNode_ElevenLabsPosture : public UAnimGraphNode_Base
{
GENERATED_BODY()
UPROPERTY(EditAnywhere, Category = "Settings")
FAnimNode_ElevenLabsPosture Node;
// UAnimGraphNode_Base interface
virtual FText GetNodeTitle(ENodeTitleType::Type TitleType) const override;
virtual FText GetTooltipText() const override;
virtual FString GetNodeCategory() const override;
virtual FLinearColor GetNodeTitleColor() const override;
};