v2.2.0: Separate AnimNode for facial expressions + real-time emotion anim playback

- New AnimNode_ElevenLabsFacialExpression: independent AnimBP node for emotion expressions
- New AnimGraphNode (amber color) in ElevenLabs category for AnimBP editor
- Emotion AnimSequences now play in real-time (looping) instead of static pose at t=0
- Smooth crossfade between emotions with configurable duration
- LipSync AnimNode skips near-zero curves so emotion base layer shows through during silence
- Removed emotion merge from LipSyncComponent (now handled by AnimBP node ordering)
- Removed verbose per-tick VISEME debug log
- Two-layer AnimBP chain: FacialExpression → LipSync → mh_arkit_mapping_pose

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
j.foucher 2026-02-24 19:13:18 +01:00
parent 949efd5578
commit f6541bd7e2
11 changed files with 384 additions and 180 deletions

View File

@ -0,0 +1,92 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimNode_ElevenLabsFacialExpression.h"
#include "ElevenLabsFacialExpressionComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "Animation/AnimInstanceProxy.h"
#include "GameFramework/Actor.h"
DEFINE_LOG_CATEGORY_STATIC(LogElevenLabsFacialExprAnimNode, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// FAnimNode_Base interface
// ─────────────────────────────────────────────────────────────────────────────
void FAnimNode_ElevenLabsFacialExpression::Initialize_AnyThread(const FAnimationInitializeContext& Context)
{
BasePose.Initialize(Context);
// Find the ElevenLabsFacialExpressionComponent on the owning actor.
// This runs during initialization (game thread) so actor access is safe.
FacialExpressionComponent.Reset();
CachedEmotionCurves.Reset();
if (const FAnimInstanceProxy* Proxy = Context.AnimInstanceProxy)
{
if (const USkeletalMeshComponent* SkelMesh = Proxy->GetSkelMeshComponent())
{
if (AActor* Owner = SkelMesh->GetOwner())
{
UElevenLabsFacialExpressionComponent* Comp =
Owner->FindComponentByClass<UElevenLabsFacialExpressionComponent>();
if (Comp)
{
FacialExpressionComponent = Comp;
UE_LOG(LogElevenLabsFacialExprAnimNode, Log,
TEXT("ElevenLabs Facial Expression AnimNode bound to component on %s."),
*Owner->GetName());
}
else
{
UE_LOG(LogElevenLabsFacialExprAnimNode, Warning,
TEXT("No ElevenLabsFacialExpressionComponent found on %s. "
"Add the component alongside the Conversational Agent."),
*Owner->GetName());
}
}
}
}
}
void FAnimNode_ElevenLabsFacialExpression::CacheBones_AnyThread(const FAnimationCacheBonesContext& Context)
{
BasePose.CacheBones(Context);
}
void FAnimNode_ElevenLabsFacialExpression::Update_AnyThread(const FAnimationUpdateContext& Context)
{
BasePose.Update(Context);
// Cache emotion curves from the facial expression component.
// GetCurrentEmotionCurves() returns CTRL_expressions_* curves
// extracted from emotion pose AnimSequences, already smoothly blended.
CachedEmotionCurves.Reset();
if (FacialExpressionComponent.IsValid())
{
CachedEmotionCurves = FacialExpressionComponent->GetCurrentEmotionCurves();
}
}
void FAnimNode_ElevenLabsFacialExpression::Evaluate_AnyThread(FPoseContext& Output)
{
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
// Inject emotion expression curves into the pose output.
// These are CTRL_expressions_* curves (MetaHuman native format)
// covering eyes, eyebrows, cheeks, nose, and mouth mood.
// The downstream Lip Sync node will override mouth-area curves
// during speech, while non-mouth emotion curves pass through.
for (const auto& Pair : CachedEmotionCurves)
{
Output.Curve.Set(Pair.Key, Pair.Value);
}
}
void FAnimNode_ElevenLabsFacialExpression::GatherDebugData(FNodeDebugData& DebugData)
{
FString DebugLine = FString::Printf(TEXT("ElevenLabs Facial Expression (%d curves)"), CachedEmotionCurves.Num());
DebugData.AddDebugItem(DebugLine);
BasePose.GatherDebugData(DebugData);
}

View File

@ -74,13 +74,16 @@ void FAnimNode_ElevenLabsLipSync::Evaluate_AnyThread(FPoseContext& Output)
// Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output);
// Inject ARKit lip sync curves into the pose output.
// The mh_arkit_mapping_pose node downstream will convert these
// ARKit names (jawOpen, mouthFunnel, etc.) to CTRL_expressions_*
// curves that drive the MetaHuman facial bones.
// Inject lip sync curves into the pose output.
// Skip near-zero values so that the upstream Facial Expression node's
// emotion curves (eyes, brows, mouth mood) pass through during silence.
// During speech, active lip sync curves override emotion's mouth curves.
for (const auto& Pair : CachedCurves)
{
Output.Curve.Set(Pair.Key, Pair.Value);
if (FMath::Abs(Pair.Value) > 0.01f)
{
Output.Curve.Set(Pair.Key, Pair.Value);
}
}
}

View File

@ -4,7 +4,7 @@
#include "ElevenLabsConversationalAgentComponent.h"
#include "ElevenLabsLipSyncPoseMap.h"
#include "Animation/AnimSequence.h"
#include "Components/SkeletalMeshComponent.h"
#include "Animation/AnimData/IAnimationDataModel.h"
DEFINE_LOG_CATEGORY_STATIC(LogElevenLabsFacialExpr, Log, All);
@ -26,7 +26,6 @@ void UElevenLabsFacialExpressionComponent::BeginPlay()
{
Super::BeginPlay();
// Find the agent component on the same actor
AActor* Owner = GetOwner();
if (!Owner)
{
@ -34,6 +33,7 @@ void UElevenLabsFacialExpressionComponent::BeginPlay()
return;
}
// Find and bind to agent component
auto* Agent = Owner->FindComponentByClass<UElevenLabsConversationalAgentComponent>();
if (Agent)
{
@ -52,8 +52,8 @@ void UElevenLabsFacialExpressionComponent::BeginPlay()
*Owner->GetName());
}
// Extract emotion curves from PoseMap
InitializeEmotionPoses();
// Validate emotion poses from PoseMap
ValidateEmotionPoses();
}
void UElevenLabsFacialExpressionComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
@ -68,13 +68,11 @@ void UElevenLabsFacialExpressionComponent::EndPlay(const EEndPlayReason::Type En
}
// ─────────────────────────────────────────────────────────────────────────────
// Emotion pose initialization
// Validation
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsFacialExpressionComponent::InitializeEmotionPoses()
void UElevenLabsFacialExpressionComponent::ValidateEmotionPoses()
{
EmotionCurveMap.Reset();
if (!PoseMap || PoseMap->EmotionPoses.Num() == 0)
{
UE_LOG(LogElevenLabsFacialExpr, Log,
@ -82,37 +80,57 @@ void UElevenLabsFacialExpressionComponent::InitializeEmotionPoses()
return;
}
int32 EmotionCount = 0;
int32 AnimCount = 0;
for (const auto& EmotionPair : PoseMap->EmotionPoses)
{
const EElevenLabsEmotion Emotion = EmotionPair.Key;
const FElevenLabsEmotionPoseSet& PoseSet = EmotionPair.Value;
auto& IntensityMap = EmotionCurveMap.FindOrAdd(Emotion);
if (PoseSet.Normal)
{
IntensityMap.Add(EElevenLabsEmotionIntensity::Low, ExtractCurvesFromAnim(PoseSet.Normal));
++EmotionCount;
}
if (PoseSet.Medium)
{
IntensityMap.Add(EElevenLabsEmotionIntensity::Medium, ExtractCurvesFromAnim(PoseSet.Medium));
++EmotionCount;
}
if (PoseSet.Extreme)
{
IntensityMap.Add(EElevenLabsEmotionIntensity::High, ExtractCurvesFromAnim(PoseSet.Extreme));
++EmotionCount;
}
if (PoseSet.Normal) ++AnimCount;
if (PoseSet.Medium) ++AnimCount;
if (PoseSet.Extreme) ++AnimCount;
}
UE_LOG(LogElevenLabsFacialExpr, Log,
TEXT("=== Emotion poses: %d emotions, %d total anim slots loaded ==="),
PoseMap->EmotionPoses.Num(), EmotionCount);
TEXT("=== Emotion poses: %d emotions, %d anim slots available ==="),
PoseMap->EmotionPoses.Num(), AnimCount);
}
TMap<FName, float> UElevenLabsFacialExpressionComponent::ExtractCurvesFromAnim(UAnimSequence* AnimSeq)
// ─────────────────────────────────────────────────────────────────────────────
// Find AnimSequence for emotion + intensity (with fallback)
// ─────────────────────────────────────────────────────────────────────────────
UAnimSequence* UElevenLabsFacialExpressionComponent::FindAnimForEmotion(
EElevenLabsEmotion Emotion, EElevenLabsEmotionIntensity Intensity) const
{
if (!PoseMap) return nullptr;
const FElevenLabsEmotionPoseSet* PoseSet = PoseMap->EmotionPoses.Find(Emotion);
if (!PoseSet) return nullptr;
// Direct match
UAnimSequence* Anim = nullptr;
switch (Intensity)
{
case EElevenLabsEmotionIntensity::Low: Anim = PoseSet->Normal; break;
case EElevenLabsEmotionIntensity::Medium: Anim = PoseSet->Medium; break;
case EElevenLabsEmotionIntensity::High: Anim = PoseSet->Extreme; break;
}
if (Anim) return Anim;
// Fallback: Medium → Low → High
if (PoseSet->Medium) return PoseSet->Medium;
if (PoseSet->Normal) return PoseSet->Normal;
if (PoseSet->Extreme) return PoseSet->Extreme;
return nullptr;
}
// ─────────────────────────────────────────────────────────────────────────────
// Evaluate all FloatCurves from an AnimSequence at a given time
// ─────────────────────────────────────────────────────────────────────────────
TMap<FName, float> UElevenLabsFacialExpressionComponent::EvaluateAnimCurves(
UAnimSequence* AnimSeq, float Time) const
{
TMap<FName, float> CurveValues;
if (!AnimSeq) return CurveValues;
@ -123,15 +141,13 @@ TMap<FName, float> UElevenLabsFacialExpressionComponent::ExtractCurvesFromAnim(U
const TArray<FFloatCurve>& FloatCurves = DataModel->GetFloatCurves();
for (const FFloatCurve& Curve : FloatCurves)
{
const FName CurveName = Curve.GetName();
const float Value = Curve.FloatCurve.Eval(0.0f);
if (FMath::Abs(Value) < 0.001f) continue;
CurveValues.Add(CurveName, Value);
const float Value = Curve.FloatCurve.Eval(Time);
if (FMath::Abs(Value) > 0.001f)
{
CurveValues.Add(Curve.GetName(), Value);
}
}
UE_LOG(LogElevenLabsFacialExpr, Log,
TEXT("Emotion anim '%s': Extracted %d non-zero curves."),
*AnimSeq->GetName(), CurveValues.Num());
return CurveValues;
}
@ -148,43 +164,28 @@ void UElevenLabsFacialExpressionComponent::OnEmotionChanged(
ActiveEmotion = Emotion;
ActiveEmotionIntensity = Intensity;
// Look up target emotion curves
TargetEmotionCurves.Reset();
const auto* IntensityMap = EmotionCurveMap.Find(Emotion);
if (IntensityMap)
{
const auto* Curves = IntensityMap->Find(Intensity);
if (Curves)
{
TargetEmotionCurves = *Curves;
}
else
{
// Fallback: try Medium, then Low, then High
static const EElevenLabsEmotionIntensity Fallbacks[] = {
EElevenLabsEmotionIntensity::Medium,
EElevenLabsEmotionIntensity::Low,
EElevenLabsEmotionIntensity::High
};
for (EElevenLabsEmotionIntensity Fb : Fallbacks)
{
Curves = IntensityMap->Find(Fb);
if (Curves) { TargetEmotionCurves = *Curves; break; }
}
}
}
// Find the AnimSequence for the new emotion
UAnimSequence* NewAnim = FindAnimForEmotion(Emotion, Intensity);
// Start blending from current to target
EmotionBlendAlpha = 0.0f;
// Start crossfade: current active becomes previous
PrevAnim = ActiveAnim;
PrevPlaybackTime = ActivePlaybackTime;
// New anim starts playing from the beginning
ActiveAnim = NewAnim;
ActivePlaybackTime = 0.0f;
// Begin crossfade
CrossfadeAlpha = 0.0f;
UE_LOG(LogElevenLabsFacialExpr, Log,
TEXT("Emotion target set: %s (%s) — %d curves, blending over %.1fs..."),
TEXT("Emotion changed: %s (%s) — anim: %s, crossfading over %.1fs..."),
*UEnum::GetValueAsString(Emotion), *UEnum::GetValueAsString(Intensity),
TargetEmotionCurves.Num(), EmotionBlendDuration);
NewAnim ? *NewAnim->GetName() : TEXT("(none)"), EmotionBlendDuration);
}
// ─────────────────────────────────────────────────────────────────────────────
// Tick — smooth emotion blending
// Tick — play emotion animation and crossfade
// ─────────────────────────────────────────────────────────────────────────────
void UElevenLabsFacialExpressionComponent::TickComponent(
@ -192,34 +193,79 @@ void UElevenLabsFacialExpressionComponent::TickComponent(
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
if (EmotionCurveMap.Num() == 0)
return; // No emotion data loaded
// Nothing to play
if (!ActiveAnim && !PrevAnim)
return;
// Advance blend alpha
if (EmotionBlendAlpha < 1.0f)
// ── Advance playback cursors (looping) ──────────────────────────────────
if (ActiveAnim)
{
const float BlendSpeed = 1.0f / FMath::Max(0.05f, EmotionBlendDuration);
EmotionBlendAlpha = FMath::Min(1.0f, EmotionBlendAlpha + DeltaTime * BlendSpeed);
ActivePlaybackTime += DeltaTime;
const float Duration = ActiveAnim->GetPlayLength();
if (Duration > 0.0f)
{
ActivePlaybackTime = FMath::Fmod(ActivePlaybackTime, Duration);
}
}
// Blend CurrentEmotionCurves toward TargetEmotionCurves
if (PrevAnim && CrossfadeAlpha < 1.0f)
{
PrevPlaybackTime += DeltaTime;
const float Duration = PrevAnim->GetPlayLength();
if (Duration > 0.0f)
{
PrevPlaybackTime = FMath::Fmod(PrevPlaybackTime, Duration);
}
}
// ── Advance crossfade ───────────────────────────────────────────────────
if (CrossfadeAlpha < 1.0f)
{
const float BlendSpeed = 1.0f / FMath::Max(0.05f, EmotionBlendDuration);
CrossfadeAlpha = FMath::Min(1.0f, CrossfadeAlpha + DeltaTime * BlendSpeed);
// Crossfade complete — release previous anim
if (CrossfadeAlpha >= 1.0f)
{
PrevAnim = nullptr;
PrevPlaybackTime = 0.0f;
}
}
// ── Evaluate curves from playing animations ─────────────────────────────
TMap<FName, float> ActiveCurves = EvaluateAnimCurves(ActiveAnim, ActivePlaybackTime);
if (CrossfadeAlpha >= 1.0f)
{
// No crossfade — use active curves directly
CurrentEmotionCurves = MoveTemp(ActiveCurves);
}
else
{
// Crossfading — blend between previous and active anim curves
TMap<FName, float> PrevCurves = EvaluateAnimCurves(PrevAnim, PrevPlaybackTime);
// Collect all curve names from both anims
CurrentEmotionCurves.Reset();
TSet<FName> AllCurves;
for (const auto& P : CurrentEmotionCurves) AllCurves.Add(P.Key);
for (const auto& P : TargetEmotionCurves) AllCurves.Add(P.Key);
for (const auto& P : ActiveCurves) AllCurves.Add(P.Key);
for (const auto& P : PrevCurves) AllCurves.Add(P.Key);
for (const FName& CurveName : AllCurves)
{
const float Current = CurrentEmotionCurves.Contains(CurveName)
? CurrentEmotionCurves[CurveName] : 0.0f;
const float Target = TargetEmotionCurves.Contains(CurveName)
? TargetEmotionCurves[CurveName] : 0.0f;
const float Blended = FMath::Lerp(Current, Target, EmotionBlendAlpha);
const float PrevVal = PrevCurves.Contains(CurveName)
? PrevCurves[CurveName] : 0.0f;
const float ActiveVal = ActiveCurves.Contains(CurveName)
? ActiveCurves[CurveName] : 0.0f;
const float Blended = FMath::Lerp(PrevVal, ActiveVal, CrossfadeAlpha);
if (FMath::Abs(Blended) > 0.001f)
CurrentEmotionCurves.FindOrAdd(CurveName) = Blended;
else
CurrentEmotionCurves.Remove(CurveName);
{
CurrentEmotionCurves.Add(CurveName, Blended);
}
}
}
}

View File

@ -3,7 +3,6 @@
#include "ElevenLabsLipSyncComponent.h"
#include "ElevenLabsLipSyncPoseMap.h"
#include "ElevenLabsConversationalAgentComponent.h"
#include "ElevenLabsFacialExpressionComponent.h"
#include "Components/SkeletalMeshComponent.h"
#include "Engine/SkeletalMesh.h"
#include "Animation/MorphTarget.h"
@ -820,28 +819,6 @@ void UElevenLabsLipSyncComponent::TickComponent(float DeltaTime, ELevelTick Tick
}
}
// Real-time viseme debug log — every 3 ticks (~100ms at 30fps).
// Shows all active smoothed visemes + envelope to diagnose trembling.
static int32 TickLogCount = 0;
if (++TickLogCount % 3 == 0 && bAnyNonZero)
{
FString ActiveVisemes;
for (const FName& Name : VisemeNames)
{
const float W = SmoothedVisemes.FindOrAdd(Name);
if (W > 0.01f)
{
if (ActiveVisemes.Len() > 0) ActiveVisemes += TEXT(" ");
ActiveVisemes += FString::Printf(TEXT("%s=%.3f"), *Name.ToString(), W);
}
}
if (ActiveVisemes.IsEmpty()) ActiveVisemes = TEXT("(none)");
UE_LOG(LogElevenLabsLipSync, Log,
TEXT("VISEME Q=%d Env=%.3f TL=%.0fms | %s"),
VisemeQueue.Num(), AudioEnvelopeValue, VisemeTimelineCursor * 1000.0f, *ActiveVisemes);
}
// Convert visemes to ARKit blendshapes
MapVisemesToBlendshapes();
@ -2275,44 +2252,6 @@ void UElevenLabsLipSyncComponent::MapVisemesToBlendshapes()
}
}
// ── Merge emotion base layer from FacialExpressionComponent ──────────
// Emotion provides the base expression (eyes, brows, cheeks).
// Lip sync overrides only mouth-area curves.
if (AActor* Owner = GetOwner())
{
if (auto* FaceExpr = Owner->FindComponentByClass<UElevenLabsFacialExpressionComponent>())
{
const TMap<FName, float>& EmotionCurves = FaceExpr->GetCurrentEmotionCurves();
if (EmotionCurves.Num() > 0)
{
// Collect which curves lip sync is actively driving (mouth area)
TSet<FName> LipSyncMouthCurves;
for (const auto& Pair : CurrentBlendshapes)
{
if (UElevenLabsFacialExpressionComponent::IsMouthCurve(Pair.Key) && Pair.Value > 0.01f)
LipSyncMouthCurves.Add(Pair.Key);
}
// Add non-mouth emotion curves (eyes, brows, cheeks, nose)
for (const auto& Pair : EmotionCurves)
{
if (!UElevenLabsFacialExpressionComponent::IsMouthCurve(Pair.Key))
{
// Emotion controls non-mouth curves exclusively
CurrentBlendshapes.FindOrAdd(Pair.Key) = Pair.Value;
}
else if (!LipSyncMouthCurves.Contains(Pair.Key))
{
// Mouth curves from emotion only if lip sync has nothing active there
// (e.g. during silence, the emotion's mouth pose shows through)
CurrentBlendshapes.FindOrAdd(Pair.Key) = Pair.Value;
}
// Otherwise: lip sync already has a value for this mouth curve — keep it
}
}
}
}
// Clamp all values. Use wider range for pose data (CTRL curves can exceed 1.0).
const float MaxClamp = bUsePoseMapping ? 2.0f : 1.0f;
for (auto& Pair : CurrentBlendshapes)

View File

@ -0,0 +1,51 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Animation/AnimNodeBase.h"
#include "AnimNode_ElevenLabsFacialExpression.generated.h"
class UElevenLabsFacialExpressionComponent;
/**
* Animation node that injects ElevenLabs facial expression curves into the AnimGraph.
*
* Place this node in the MetaHuman Face AnimBP BEFORE the ElevenLabs Lip Sync node.
* It reads emotion curves (eyes, eyebrows, cheeks, mouth mood) from the
* ElevenLabsFacialExpressionComponent on the same Actor and outputs them as
* CTRL_expressions_* animation curves.
*
* The Lip Sync node placed AFTER this one will override mouth-area curves
* during speech, while non-mouth emotion curves (eyes, brows) pass through.
*
* Graph layout:
* [Live Link Pose] [ElevenLabs Facial Expression] [ElevenLabs Lip Sync] [mh_arkit_mapping_pose] ...
*
* The node auto-discovers the FacialExpressionComponent no manual wiring needed.
*/
USTRUCT(BlueprintInternalUseOnly)
struct PS_AI_AGENT_ELEVENLABS_API FAnimNode_ElevenLabsFacialExpression : public FAnimNode_Base
{
GENERATED_USTRUCT_BODY()
/** Input pose to pass through. Connect your upstream pose source here. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = Links)
FPoseLink BasePose;
// ── FAnimNode_Base interface ──────────────────────────────────────────────
virtual void Initialize_AnyThread(const FAnimationInitializeContext& Context) override;
virtual void CacheBones_AnyThread(const FAnimationCacheBonesContext& Context) override;
virtual void Update_AnyThread(const FAnimationUpdateContext& Context) override;
virtual void Evaluate_AnyThread(FPoseContext& Output) override;
virtual void GatherDebugData(FNodeDebugData& DebugData) override;
private:
/** Cached reference to the facial expression component on the owning actor. */
TWeakObjectPtr<UElevenLabsFacialExpressionComponent> FacialExpressionComponent;
/** Emotion expression curves to inject (CTRL_expressions_* format).
* Copied from the component during Update (game thread safe). */
TMap<FName, float> CachedEmotionCurves;
};

View File

@ -9,21 +9,26 @@
class UElevenLabsConversationalAgentComponent;
class UElevenLabsLipSyncPoseMap;
class USkeletalMeshComponent;
class UAnimSequence;
// ─────────────────────────────────────────────────────────────────────────────
// UElevenLabsFacialExpressionComponent
//
// Drives emotion-based facial expressions on a MetaHuman (or any skeletal mesh)
// as a BASE layer. Lip sync (from ElevenLabsLipSyncComponent) modulates on top,
// overriding only mouth-area curves.
// overriding only mouth-area curves during speech.
//
// Emotion AnimSequences are played back in real-time (looping), not sampled
// as a static pose. This preserves micro-movements, blinks, and breathing
// authored into the animations.
//
// Workflow:
// 1. Assign a PoseMap data asset with Emotion Poses filled in.
// 2. Assign the TargetMesh (same mesh as the LipSync component).
// 2. Add the AnimNode "ElevenLabs Facial Expression" in the Face AnimBP
// BEFORE the "ElevenLabs Lip Sync" node.
// 3. The component listens to OnAgentEmotionChanged from the agent component.
// 4. Emotion curves are smoothly blended (~500ms transitions).
// 5. The LipSync component reads GetCurrentEmotionCurves() to merge as base layer.
// 4. Emotion animations crossfade smoothly (configurable duration).
// 5. The AnimNode reads GetCurrentEmotionCurves() and injects them into the pose.
// ─────────────────────────────────────────────────────────────────────────────
UCLASS(ClassGroup = "ElevenLabs", meta = (BlueprintSpawnableComponent),
DisplayName = "ElevenLabs Facial Expression")
@ -42,21 +47,15 @@ public:
meta = (ToolTip = "Pose map with Emotion Poses filled in.\nCan be the same asset as the LipSync component."))
TObjectPtr<UElevenLabsLipSyncPoseMap> PoseMap;
/** Skeletal mesh to apply emotion curves to.
* Should be the same mesh as the LipSync component's TargetMesh. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|FacialExpression",
meta = (ToolTip = "Skeletal mesh for emotion curves.\nShould match the LipSync component's TargetMesh."))
TObjectPtr<USkeletalMeshComponent> TargetMesh;
/** Emotion transition duration in seconds. */
/** Emotion crossfade duration in seconds. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ElevenLabs|FacialExpression",
meta = (ClampMin = "0.1", ClampMax = "3.0",
ToolTip = "How long (seconds) to blend between emotions.\n0.5 = snappy, 1.5 = smooth."))
ToolTip = "How long (seconds) to crossfade between emotions.\n0.5 = snappy, 1.5 = smooth."))
float EmotionBlendDuration = 0.5f;
// ── Getters ───────────────────────────────────────────────────────────────
/** Get the current smoothed emotion curves (for the LipSync component to merge). */
/** Get the current emotion curves evaluated from the playing AnimSequence. */
UFUNCTION(BlueprintCallable, Category = "ElevenLabs|FacialExpression")
const TMap<FName, float>& GetCurrentEmotionCurves() const { return CurrentEmotionCurves; }
@ -85,28 +84,39 @@ private:
UFUNCTION()
void OnEmotionChanged(EElevenLabsEmotion Emotion, EElevenLabsEmotionIntensity Intensity);
// ── Curve extraction ──────────────────────────────────────────────────────
// ── Helpers ───────────────────────────────────────────────────────────────
/** Extract curve values at t=0 from an AnimSequence. */
TMap<FName, float> ExtractCurvesFromAnim(UAnimSequence* AnimSeq);
/** Validate PoseMap emotion entries at BeginPlay. */
void ValidateEmotionPoses();
/** Initialize emotion curve data from PoseMap at BeginPlay. */
void InitializeEmotionPoses();
/** Find the best AnimSequence for a given emotion + intensity (with fallback). */
UAnimSequence* FindAnimForEmotion(EElevenLabsEmotion Emotion, EElevenLabsEmotionIntensity Intensity) const;
// ── State ─────────────────────────────────────────────────────────────────
/** Evaluate all FloatCurves from an AnimSequence at a given time. */
TMap<FName, float> EvaluateAnimCurves(UAnimSequence* AnimSeq, float Time) const;
/** Extracted curve data: Emotion → Intensity → { CurveName → Value }. */
TMap<EElevenLabsEmotion, TMap<EElevenLabsEmotionIntensity, TMap<FName, float>>> EmotionCurveMap;
// ── Animation playback state ─────────────────────────────────────────────
/** Current smoothed emotion curves (blended each tick). */
/** Currently playing emotion AnimSequence (looping). */
TObjectPtr<UAnimSequence> ActiveAnim;
/** Playback cursor for the active anim (seconds, wraps at anim length). */
float ActivePlaybackTime = 0.0f;
/** Previous emotion AnimSequence (for crossfade out). */
TObjectPtr<UAnimSequence> PrevAnim;
/** Playback cursor for the previous anim (keeps playing during crossfade). */
float PrevPlaybackTime = 0.0f;
/** Crossfade progress: 0 = fully PrevAnim, 1 = fully ActiveAnim. */
float CrossfadeAlpha = 1.0f;
// ── Curve output ─────────────────────────────────────────────────────────
/** Current blended emotion curves (evaluated each tick from playing anims). */
TMap<FName, float> CurrentEmotionCurves;
/** Target emotion curves (set when emotion changes, blended toward). */
TMap<FName, float> TargetEmotionCurves;
/** Current blend progress (0 = old emotion, 1 = target emotion). */
float EmotionBlendAlpha = 1.0f;
/** Active emotion (for change detection). */
EElevenLabsEmotion ActiveEmotion = EElevenLabsEmotion::Neutral;
EElevenLabsEmotionIntensity ActiveEmotionIntensity = EElevenLabsEmotionIntensity::Medium;

View File

@ -0,0 +1,32 @@
// Copyright ASTERION. All Rights Reserved.
#include "AnimGraphNode_ElevenLabsFacialExpression.h"
#define LOCTEXT_NAMESPACE "AnimNode_ElevenLabsFacialExpression"
FText UAnimGraphNode_ElevenLabsFacialExpression::GetNodeTitle(ENodeTitleType::Type TitleType) const
{
return LOCTEXT("NodeTitle", "ElevenLabs Facial Expression");
}
FText UAnimGraphNode_ElevenLabsFacialExpression::GetTooltipText() const
{
return LOCTEXT("Tooltip",
"Injects emotion expression curves from the ElevenLabs Facial Expression component.\n\n"
"Place this node BEFORE the ElevenLabs Lip Sync node in the MetaHuman Face AnimBP.\n"
"It outputs CTRL_expressions_* curves for eyes, eyebrows, cheeks, and mouth mood.\n"
"The Lip Sync node placed after will override mouth-area curves during speech.");
}
FString UAnimGraphNode_ElevenLabsFacialExpression::GetNodeCategory() const
{
return TEXT("ElevenLabs");
}
FLinearColor UAnimGraphNode_ElevenLabsFacialExpression::GetNodeTitleColor() const
{
// Warm amber to distinguish from Lip Sync (teal)
return FLinearColor(0.8f, 0.5f, 0.1f, 1.0f);
}
#undef LOCTEXT_NAMESPACE

View File

@ -0,0 +1,31 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "AnimGraphNode_Base.h"
#include "AnimNode_ElevenLabsFacialExpression.h"
#include "AnimGraphNode_ElevenLabsFacialExpression.generated.h"
/**
* AnimGraph editor node for the ElevenLabs Facial Expression AnimNode.
*
* This node appears in the AnimBP graph editor under the "ElevenLabs" category.
* Place it BEFORE the ElevenLabs Lip Sync node in the MetaHuman Face AnimBP.
* It auto-discovers the ElevenLabsFacialExpressionComponent on the owning Actor
* and injects CTRL_expressions_* curves for emotion-driven facial expressions.
*/
UCLASS()
class UAnimGraphNode_ElevenLabsFacialExpression : public UAnimGraphNode_Base
{
GENERATED_BODY()
UPROPERTY(EditAnywhere, Category = "Settings")
FAnimNode_ElevenLabsFacialExpression Node;
// UAnimGraphNode_Base interface
virtual FText GetNodeTitle(ENodeTitleType::Type TitleType) const override;
virtual FText GetTooltipText() const override;
virtual FString GetNodeCategory() const override;
virtual FLinearColor GetNodeTitleColor() const override;
};