fix crash + posture replication

This commit is contained in:
j.foucher 2026-03-03 10:21:52 +01:00
parent 1c4dbfc402
commit 45ee0c6f7d
6 changed files with 57 additions and 28 deletions

View File

@ -89,9 +89,12 @@ void FAnimNode_PS_AI_ConvAgent_FacialExpression::Evaluate_AnyThread(FPoseContext
// covering eyes, eyebrows, cheeks, nose, and mouth mood. // covering eyes, eyebrows, cheeks, nose, and mouth mood.
// The downstream Lip Sync node will override mouth-area curves // The downstream Lip Sync node will override mouth-area curves
// during speech, while non-mouth emotion curves pass through. // during speech, while non-mouth emotion curves pass through.
for (const auto& Pair : CachedEmotionCurves) if (Output.Curve.IsValid())
{ {
Output.Curve.Set(Pair.Key, Pair.Value); for (const auto& Pair : CachedEmotionCurves)
{
Output.Curve.Set(Pair.Key, Pair.Value);
}
} }
} }

View File

@ -89,11 +89,14 @@ void FAnimNode_PS_AI_ConvAgent_LipSync::Evaluate_AnyThread(FPoseContext& Output)
// Skip near-zero values so that the upstream Facial Expression node's // Skip near-zero values so that the upstream Facial Expression node's
// emotion curves (eyes, brows, mouth mood) pass through during silence. // emotion curves (eyes, brows, mouth mood) pass through during silence.
// During speech, active lip sync curves override emotion's mouth curves. // During speech, active lip sync curves override emotion's mouth curves.
for (const auto& Pair : CachedCurves) if (Output.Curve.IsValid())
{ {
if (FMath::Abs(Pair.Value) > 0.01f) for (const auto& Pair : CachedCurves)
{ {
Output.Curve.Set(Pair.Key, Pair.Value); if (FMath::Abs(Pair.Value) > 0.01f)
{
Output.Curve.Set(Pair.Key, Pair.Value);
}
} }
} }
} }

View File

@ -379,6 +379,11 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
// Evaluate the upstream pose (pass-through) // Evaluate the upstream pose (pass-through)
BasePose.Evaluate(Output); BasePose.Evaluate(Output);
// Guard: in packaged+network builds the curve container may not be
// initialized yet (skeleton not fully loaded). All Output.Curve access
// must be gated on this flag to avoid null-pointer crashes.
const bool bCurveValid = Output.Curve.IsValid();
// ── Periodic diagnostic (runs for EVERY instance, before any early return) ─ // ── Periodic diagnostic (runs for EVERY instance, before any early return) ─
#if !UE_BUILD_SHIPPING #if !UE_BUILD_SHIPPING
if (++EvalDebugFrameCounter % 300 == 1) // ~every 5 seconds at 60fps if (++EvalDebugFrameCounter % 300 == 1) // ~every 5 seconds at 60fps
@ -432,11 +437,14 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
#if ELEVENLABS_EYE_DIAGNOSTIC == 1 #if ELEVENLABS_EYE_DIAGNOSTIC == 1
// MODE 1: CTRL curves → tests CORRECT MetaHuman CTRL naming // MODE 1: CTRL curves → tests CORRECT MetaHuman CTRL naming
// Real format: CTRL_expressions_eyeLook{Dir}{L/R} (NOT eyeLookUpLeft!) // Real format: CTRL_expressions_eyeLook{Dir}{L/R} (NOT eyeLookUpLeft!)
static const FName ForceCTRL(TEXT("CTRL_expressions_eyeLookUpL")); if (bCurveValid)
Output.Curve.Set(ForceCTRL, 1.0f); {
// Zero ARKit to isolate static const FName ForceCTRL(TEXT("CTRL_expressions_eyeLookUpL"));
static const FName ZeroARKit(TEXT("eyeLookUpLeft")); Output.Curve.Set(ForceCTRL, 1.0f);
Output.Curve.Set(ZeroARKit, 0.0f); // Zero ARKit to isolate
static const FName ZeroARKit(TEXT("eyeLookUpLeft"));
Output.Curve.Set(ZeroARKit, 0.0f);
}
// Reset eye bone to ref pose to isolate // Reset eye bone to ref pose to isolate
if (LeftEyeBoneIndex.GetInt() != INDEX_NONE if (LeftEyeBoneIndex.GetInt() != INDEX_NONE
&& LeftEyeBoneIndex.GetInt() < Output.Pose.GetNumBones()) && LeftEyeBoneIndex.GetInt() < Output.Pose.GetNumBones())
@ -451,11 +459,14 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
#elif ELEVENLABS_EYE_DIAGNOSTIC == 2 #elif ELEVENLABS_EYE_DIAGNOSTIC == 2
// MODE 2: ARKit curves → tests if mh_arkit_mapping_pose drives eyes // MODE 2: ARKit curves → tests if mh_arkit_mapping_pose drives eyes
static const FName ForceARKit(TEXT("eyeLookUpLeft")); if (bCurveValid)
Output.Curve.Set(ForceARKit, 1.0f); {
// Zero CTRL to isolate static const FName ForceARKit(TEXT("eyeLookUpLeft"));
static const FName ZeroCTRL(TEXT("CTRL_expressions_eyeLookUpLeft")); Output.Curve.Set(ForceARKit, 1.0f);
Output.Curve.Set(ZeroCTRL, 0.0f); // Zero CTRL to isolate
static const FName ZeroCTRL(TEXT("CTRL_expressions_eyeLookUpLeft"));
Output.Curve.Set(ZeroCTRL, 0.0f);
}
// Reset eye bone to ref pose to isolate // Reset eye bone to ref pose to isolate
if (LeftEyeBoneIndex.GetInt() != INDEX_NONE if (LeftEyeBoneIndex.GetInt() != INDEX_NONE
&& LeftEyeBoneIndex.GetInt() < Output.Pose.GetNumBones()) && LeftEyeBoneIndex.GetInt() < Output.Pose.GetNumBones())
@ -479,10 +490,13 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
(LookUp * LeftEyeRefPoseRotation).GetNormalized()); (LookUp * LeftEyeRefPoseRotation).GetNormalized());
} }
// Zero curves to isolate // Zero curves to isolate
static const FName ZeroARKit(TEXT("eyeLookUpLeft")); if (bCurveValid)
static const FName ZeroCTRL(TEXT("CTRL_expressions_eyeLookUpLeft")); {
Output.Curve.Set(ZeroARKit, 0.0f); static const FName ZeroARKit(TEXT("eyeLookUpLeft"));
Output.Curve.Set(ZeroCTRL, 0.0f); static const FName ZeroCTRL(TEXT("CTRL_expressions_eyeLookUpLeft"));
Output.Curve.Set(ZeroARKit, 0.0f);
Output.Curve.Set(ZeroCTRL, 0.0f);
}
if (++EyeDiagLogCounter % 300 == 1) if (++EyeDiagLogCounter % 300 == 1)
{ {
UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose, UE_LOG(LogPS_AI_ConvAgent_PostureAnimNode, Verbose,
@ -537,6 +551,7 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
} }
// (b) Blend CTRL eye curves: read animation's value, lerp with posture // (b) Blend CTRL eye curves: read animation's value, lerp with posture
if (bCurveValid)
{ {
const auto& CTRLMap = GetARKitToCTRLEyeMap(); const auto& CTRLMap = GetARKitToCTRLEyeMap();
for (const auto& Pair : CachedEyeCurves) for (const auto& Pair : CachedEyeCurves)
@ -551,15 +566,15 @@ void FAnimNode_PS_AI_ConvAgent_Posture::Evaluate_AnyThread(FPoseContext& Output)
Output.Curve.Set(*CTRLName, BlendedValue); Output.Curve.Set(*CTRLName, BlendedValue);
} }
} }
}
// (c) Zero ARKit eye curves to prevent mh_arkit_mapping_pose // (c) Zero ARKit eye curves to prevent mh_arkit_mapping_pose
// from overwriting our carefully blended CTRL values. // from overwriting our carefully blended CTRL values.
// mh_arkit converts ARKit→CTRL additively; zeroing means // mh_arkit converts ARKit→CTRL additively; zeroing means
// it adds nothing for eyes, preserving our blend. // it adds nothing for eyes, preserving our blend.
for (const auto& Pair : CachedEyeCurves) for (const auto& Pair : CachedEyeCurves)
{ {
Output.Curve.Set(Pair.Key, 0.0f); Output.Curve.Set(Pair.Key, 0.0f);
}
} }
} }

View File

@ -337,7 +337,15 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
if (FMath::Abs(BodyDelta) > 0.1f) if (FMath::Abs(BodyDelta) > 0.1f)
{ {
const float BodyStep = FMath::FInterpTo(0.0f, BodyDelta, SafeDeltaTime, BodyInterpSpeed); const float BodyStep = FMath::FInterpTo(0.0f, BodyDelta, SafeDeltaTime, BodyInterpSpeed);
Owner->AddActorWorldRotation(FRotator(0.0f, BodyStep, 0.0f)); // Only modify actor rotation on the authority (server/standalone).
// On clients, the rotation arrives via replication — calling
// AddActorWorldRotation here would fight with replicated updates,
// causing visible stuttering as the network periodically snaps
// the rotation back to the server's value.
if (Owner->HasAuthority())
{
Owner->AddActorWorldRotation(FRotator(0.0f, BodyStep, 0.0f));
}
} }
} }