Compare commits

...

2 Commits

Author SHA1 Message Date
33ec54150f Add bActive with smooth blend and auto-activation to all 3 AnimNode components
- Posture, FacialExpression, LipSync: bActive + ActivationBlendDuration for
  smooth alpha blend in/out (linear interp via FInterpConstantTo).
- Auto-activation: components bind to OnAgentConnected/OnAgentDisconnected,
  starting inactive and blending in when conversation begins.
- Without an agent component, bActive defaults to true (backward compatible).
- Add BlueprintFunctionLibrary with SetPostProcessAnimBlueprint helper
  (wraps UE5.5 SetOverridePostProcessAnimBP for per-instance BP setup).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 15:24:57 +01:00
5fcd98ba73 Add Android platform support to plugin
Whitelist Android in .uplugin Runtime module and handle
read-only APK paths for SSL certificate copy on Android
(ProjectSavedDir fallback). No change to Win64 behavior.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-01 14:14:30 +01:00
10 changed files with 380 additions and 17 deletions

View File

@ -22,7 +22,8 @@
"PlatformAllowList": [ "PlatformAllowList": [
"Win64", "Win64",
"Mac", "Mac",
"Linux" "Linux",
"Android"
] ]
}, },
{ {

View File

@ -46,13 +46,29 @@ void FPS_AI_ConvAgentModule::StartupModule()
void FPS_AI_ConvAgentModule::EnsureSSLCertificates() void FPS_AI_ConvAgentModule::EnsureSSLCertificates()
{ {
const FString ProjectCertPath = FPaths::ProjectContentDir() / TEXT("Certificates") / TEXT("cacert.pem"); const FString CertRelPath = FString(TEXT("Certificates")) / TEXT("cacert.pem");
const FString ProjectCertPath = FPaths::ProjectContentDir() / CertRelPath;
// Check standard location (works on all platforms, including Android if cert was staged).
if (FPlatformFileManager::Get().GetPlatformFile().FileExists(*ProjectCertPath)) if (FPlatformFileManager::Get().GetPlatformFile().FileExists(*ProjectCertPath))
{ {
UE_LOG(LogPS_AI_ConvAgent, Log, TEXT("SSL cacert.pem found at: %s"), *ProjectCertPath); UE_LOG(LogPS_AI_ConvAgent, Log, TEXT("SSL cacert.pem found at: %s"), *ProjectCertPath);
return; return;
} }
#if PLATFORM_ANDROID
// On Android, ProjectContentDir lives inside the APK (read-only).
// Use ProjectSavedDir as a writable fallback for the cert copy.
const FString CopyDestPath = FPaths::ProjectSavedDir() / CertRelPath;
if (FPlatformFileManager::Get().GetPlatformFile().FileExists(*CopyDestPath))
{
UE_LOG(LogPS_AI_ConvAgent, Log, TEXT("SSL cacert.pem found at: %s"), *CopyDestPath);
return;
}
#else
const FString CopyDestPath = ProjectCertPath;
#endif
// Try to auto-copy from the plugin's Resources directory. // Try to auto-copy from the plugin's Resources directory.
TSharedPtr<IPlugin> Plugin = IPluginManager::Get().FindPlugin(TEXT("PS_AI_ConvAgent")); TSharedPtr<IPlugin> Plugin = IPluginManager::Get().FindPlugin(TEXT("PS_AI_ConvAgent"));
if (Plugin.IsValid()) if (Plugin.IsValid())
@ -63,10 +79,10 @@ void FPS_AI_ConvAgentModule::EnsureSSLCertificates()
if (FPlatformFileManager::Get().GetPlatformFile().FileExists(*PluginCertPath)) if (FPlatformFileManager::Get().GetPlatformFile().FileExists(*PluginCertPath))
{ {
FPlatformFileManager::Get().GetPlatformFile().CreateDirectoryTree( FPlatformFileManager::Get().GetPlatformFile().CreateDirectoryTree(
*(FPaths::ProjectContentDir() / TEXT("Certificates"))); *FPaths::GetPath(CopyDestPath));
if (FPlatformFileManager::Get().GetPlatformFile().CopyFile(*ProjectCertPath, *PluginCertPath)) if (FPlatformFileManager::Get().GetPlatformFile().CopyFile(*CopyDestPath, *PluginCertPath))
{ {
UE_LOG(LogPS_AI_ConvAgent, Log, TEXT("Copied SSL cacert.pem from plugin to: %s"), *ProjectCertPath); UE_LOG(LogPS_AI_ConvAgent, Log, TEXT("Copied SSL cacert.pem from plugin to: %s"), *CopyDestPath);
return; return;
} }
} }

View File

@ -0,0 +1,17 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_BlueprintLibrary.h"
#include "Components/SkeletalMeshComponent.h"
void UPS_AI_ConvAgent_BlueprintLibrary::SetPostProcessAnimBlueprint(
USkeletalMeshComponent* SkelMeshComp,
TSubclassOf<UAnimInstance> AnimBPClass)
{
if (!SkelMeshComp)
{
UE_LOG(LogTemp, Warning, TEXT("[PS_AI_ConvAgent] SetPostProcessAnimBlueprint: SkelMeshComp is null."));
return;
}
SkelMeshComp->SetOverridePostProcessAnimBP(AnimBPClass);
}

View File

@ -41,10 +41,19 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::BeginPlay()
Agent->OnAgentEmotionChanged.AddDynamic( Agent->OnAgentEmotionChanged.AddDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged); this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged);
// Auto-activation: start inactive, activate when conversation connects.
Agent->OnAgentConnected.AddDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationConnected);
Agent->OnAgentDisconnected.AddDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationDisconnected);
bActive = false;
CurrentActiveAlpha = 0.0f;
if (bDebug) if (bDebug)
{ {
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log, UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,
TEXT("Facial expression bound to agent component on %s."), *Owner->GetName()); TEXT("Facial expression bound to agent on %s. Waiting for conversation."),
*Owner->GetName());
} }
} }
else else
@ -80,6 +89,10 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::EndPlay(const EEndPlayReason::T
{ {
AgentComponent->OnAgentEmotionChanged.RemoveDynamic( AgentComponent->OnAgentEmotionChanged.RemoveDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged); this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged);
AgentComponent->OnAgentConnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationConnected);
AgentComponent->OnAgentDisconnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationDisconnected);
} }
Super::EndPlay(EndPlayReason); Super::EndPlay(EndPlayReason);
@ -212,6 +225,31 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged(
} }
} }
// ─────────────────────────────────────────────────────────────────────────────
// Auto-activation handlers
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationConnected(
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo)
{
bActive = true;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log, TEXT("Conversation connected — facial expression activating."));
}
}
void UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationDisconnected(
int32 StatusCode, const FString& Reason)
{
bActive = false;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,
TEXT("Conversation disconnected (code=%d) — facial expression deactivating."), StatusCode);
}
}
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
// Tick — play emotion animation and crossfade // Tick — play emotion animation and crossfade
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
@ -237,6 +275,21 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::TickComponent(
} }
} }
// ── Smooth activation blend ──────────────────────────────────────────
{
const float TargetAlpha = bActive ? 1.0f : 0.0f;
if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f))
{
const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f);
CurrentActiveAlpha = FMath::FInterpConstantTo(
CurrentActiveAlpha, TargetAlpha, DeltaTime, BlendSpeed);
}
else
{
CurrentActiveAlpha = TargetAlpha;
}
}
// Nothing to play // Nothing to play
if (!ActiveAnim && !PrevAnim) if (!ActiveAnim && !PrevAnim)
return; return;
@ -312,6 +365,19 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::TickComponent(
} }
} }
} }
// ── Apply activation alpha to output curves ──────────────────────────
if (CurrentActiveAlpha < 0.001f)
{
CurrentEmotionCurves.Reset();
}
else if (CurrentActiveAlpha < 0.999f)
{
for (auto& Pair : CurrentEmotionCurves)
{
Pair.Value *= CurrentActiveAlpha;
}
}
} }
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────

View File

@ -259,10 +259,19 @@ void UPS_AI_ConvAgent_LipSyncComponent::BeginPlay()
// Enable partial response streaming if not already enabled // Enable partial response streaming if not already enabled
Agent->bEnableAgentPartialResponse = true; Agent->bEnableAgentPartialResponse = true;
// Auto-activation: start inactive, activate when conversation connects.
Agent->OnAgentConnected.AddDynamic(
this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationConnected);
Agent->OnAgentDisconnected.AddDynamic(
this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationDisconnected);
bActive = false;
CurrentActiveAlpha = 0.0f;
if (bDebug) if (bDebug)
{ {
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Lip sync bound to agent component on %s (audio + text + interruption)."), *Owner->GetName()); TEXT("Lip sync bound to agent on %s. Waiting for conversation."),
*Owner->GetName());
} }
} }
else else
@ -634,6 +643,10 @@ void UPS_AI_ConvAgent_LipSyncComponent::EndPlay(const EEndPlayReason::Type EndPl
this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentInterrupted); this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentInterrupted);
AgentComponent->OnAgentStoppedSpeaking.RemoveDynamic( AgentComponent->OnAgentStoppedSpeaking.RemoveDynamic(
this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentStopped); this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentStopped);
AgentComponent->OnAgentConnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationConnected);
AgentComponent->OnAgentDisconnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationDisconnected);
} }
AgentComponent.Reset(); AgentComponent.Reset();
SpectrumAnalyzer.Reset(); SpectrumAnalyzer.Reset();
@ -641,6 +654,31 @@ void UPS_AI_ConvAgent_LipSyncComponent::EndPlay(const EEndPlayReason::Type EndPl
Super::EndPlay(EndPlayReason); Super::EndPlay(EndPlayReason);
} }
// ─────────────────────────────────────────────────────────────────────────────
// Auto-activation handlers
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_LipSyncComponent::OnConversationConnected(
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo)
{
bActive = true;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, TEXT("Conversation connected — lip sync activating."));
}
}
void UPS_AI_ConvAgent_LipSyncComponent::OnConversationDisconnected(
int32 StatusCode, const FString& Reason)
{
bActive = false;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_LipSync, Log,
TEXT("Conversation disconnected (code=%d) — lip sync deactivating."), StatusCode);
}
}
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
// Tick — smooth visemes and apply morph targets // Tick — smooth visemes and apply morph targets
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
@ -650,6 +688,21 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic
{ {
Super::TickComponent(DeltaTime, TickType, ThisTickFunction); Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
// ── Smooth activation blend ──────────────────────────────────────────
{
const float TargetAlpha = bActive ? 1.0f : 0.0f;
if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f))
{
const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f);
CurrentActiveAlpha = FMath::FInterpConstantTo(
CurrentActiveAlpha, TargetAlpha, DeltaTime, BlendSpeed);
}
else
{
CurrentActiveAlpha = TargetAlpha;
}
}
// ── Lazy binding: in packaged builds, BeginPlay may run before the ──────── // ── Lazy binding: in packaged builds, BeginPlay may run before the ────────
// ElevenLabsComponent is fully initialized. Retry discovery until bound. // ElevenLabsComponent is fully initialized. Retry discovery until bound.
if (!AgentComponent.IsValid()) if (!AgentComponent.IsValid())
@ -977,6 +1030,19 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic
PreviousBlendshapes = CurrentBlendshapes; PreviousBlendshapes = CurrentBlendshapes;
} }
// ── Apply activation alpha to output blendshapes ─────────────────────
if (CurrentActiveAlpha < 0.001f)
{
CurrentBlendshapes.Reset();
}
else if (CurrentActiveAlpha < 0.999f)
{
for (auto& Pair : CurrentBlendshapes)
{
Pair.Value *= CurrentActiveAlpha;
}
}
// Auto-apply morph targets if a target mesh is set // Auto-apply morph targets if a target mesh is set
if (TargetMesh) if (TargetMesh)
{ {

View File

@ -1,6 +1,7 @@
// Copyright ASTERION. All Rights Reserved. // Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_PostureComponent.h" #include "PS_AI_ConvAgent_PostureComponent.h"
#include "PS_AI_ConvAgent_ElevenLabsComponent.h"
#include "Components/SkeletalMeshComponent.h" #include "Components/SkeletalMeshComponent.h"
#include "GameFramework/Actor.h" #include "GameFramework/Actor.h"
#include "Math/UnrealMathUtility.h" #include "Math/UnrealMathUtility.h"
@ -106,6 +107,71 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay()
*Owner->GetName(), MeshForwardYawOffset, OriginalActorYaw, *Owner->GetName(), MeshForwardYawOffset, OriginalActorYaw,
MaxEyeHorizontal, MaxEyeVertical, MaxHeadYaw, MaxHeadPitch); MaxEyeHorizontal, MaxEyeVertical, MaxHeadYaw, MaxHeadPitch);
} }
// Auto-activation: bind to agent conversation lifecycle.
// When an agent component is found, start inactive and wait for conversation.
auto* Agent = Owner->FindComponentByClass<UPS_AI_ConvAgent_ElevenLabsComponent>();
if (Agent)
{
AgentComponent = Agent;
Agent->OnAgentConnected.AddDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationConnected);
Agent->OnAgentDisconnected.AddDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected);
// Start inactive — will activate when conversation connects.
bActive = false;
CurrentActiveAlpha = 0.0f;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
TEXT("Auto-activation bound to agent on %s. Waiting for conversation."),
*Owner->GetName());
}
}
}
// ─────────────────────────────────────────────────────────────────────────────
// EndPlay
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
if (AgentComponent.IsValid())
{
AgentComponent->OnAgentConnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationConnected);
AgentComponent->OnAgentDisconnected.RemoveDynamic(
this, &UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected);
}
Super::EndPlay(EndPlayReason);
}
// ─────────────────────────────────────────────────────────────────────────────
// Auto-activation handlers
// ─────────────────────────────────────────────────────────────────────────────
void UPS_AI_ConvAgent_PostureComponent::OnConversationConnected(
const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo)
{
bActive = true;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log, TEXT("Conversation connected — posture activating."));
}
}
void UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected(
int32 StatusCode, const FString& Reason)
{
bActive = false;
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_Posture, Log,
TEXT("Conversation disconnected (code=%d) — posture deactivating."), StatusCode);
}
} }
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
@ -197,6 +263,21 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent(
// snapping in one frame. // snapping in one frame.
const float SafeDeltaTime = FMath::Min(DeltaTime, 0.05f); const float SafeDeltaTime = FMath::Min(DeltaTime, 0.05f);
// ── Smooth activation blend ──────────────────────────────────────────
{
const float TargetAlpha = bActive ? 1.0f : 0.0f;
if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f))
{
const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f);
CurrentActiveAlpha = FMath::FInterpConstantTo(
CurrentActiveAlpha, TargetAlpha, SafeDeltaTime, BlendSpeed);
}
else
{
CurrentActiveAlpha = TargetAlpha;
}
}
if (TargetActor) if (TargetActor)
{ {
// ── 1. Compute target position and eye origin ────────────────────── // ── 1. Compute target position and eye origin ──────────────────────

View File

@ -0,0 +1,31 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Kismet/BlueprintFunctionLibrary.h"
#include "PS_AI_ConvAgent_BlueprintLibrary.generated.h"
class USkeletalMeshComponent;
class UAnimInstance;
// ─────────────────────────────────────────────────────────────────────────────
// UPS_AI_ConvAgent_BlueprintLibrary
//
// Utility functions exposed to Blueprint for the PS AI ConvAgent plugin.
// ─────────────────────────────────────────────────────────────────────────────
UCLASS()
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_BlueprintLibrary : public UBlueprintFunctionLibrary
{
GENERATED_BODY()
public:
/** Assign a Post Process Anim Blueprint to a Skeletal Mesh Component at runtime.
* This allows per-instance override without modifying the Skeletal Mesh asset.
* @param SkelMeshComp The target Skeletal Mesh Component.
* @param AnimBPClass The AnimBlueprint class to use as post-process (nullptr to clear). */
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Utilities",
meta = (DisplayName = "Set Post Process Anim Blueprint"))
static void SetPostProcessAnimBlueprint(USkeletalMeshComponent* SkelMeshComp,
TSubclassOf<UAnimInstance> AnimBPClass);
};

View File

@ -47,6 +47,17 @@ public:
meta = (ToolTip = "Dedicated Emotion Pose Map asset.\nRight-click Content Browser → Miscellaneous → PS AI ConvAgent Emotion Pose Map.")) meta = (ToolTip = "Dedicated Emotion Pose Map asset.\nRight-click Content Browser → Miscellaneous → PS AI ConvAgent Emotion Pose Map."))
TObjectPtr<UPS_AI_ConvAgent_EmotionPoseMap> EmotionPoseMap; TObjectPtr<UPS_AI_ConvAgent_EmotionPoseMap> EmotionPoseMap;
/** When false, emotion curves smoothly blend to zero (passthrough).
* The underlying emotion playback keeps running so reactivation is seamless. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression",
meta = (ToolTip = "Enable facial expressions.\nWhen false, outputs blend to zero (passthrough)."))
bool bActive = true;
/** How long (seconds) to blend in/out when bActive changes. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression",
meta = (ClampMin = "0.05", ClampMax = "3.0"))
float ActivationBlendDuration = 0.5f;
/** Emotion crossfade duration in seconds. */ /** Emotion crossfade duration in seconds. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression", UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression",
meta = (ClampMin = "0.1", ClampMax = "3.0", meta = (ClampMin = "0.1", ClampMax = "3.0",
@ -97,6 +108,14 @@ private:
UFUNCTION() UFUNCTION()
void OnEmotionChanged(EPS_AI_ConvAgent_Emotion Emotion, EPS_AI_ConvAgent_EmotionIntensity Intensity); void OnEmotionChanged(EPS_AI_ConvAgent_Emotion Emotion, EPS_AI_ConvAgent_EmotionIntensity Intensity);
/** Automatically activate when conversation connects. */
UFUNCTION()
void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo);
/** Automatically deactivate when conversation disconnects. */
UFUNCTION()
void OnConversationDisconnected(int32 StatusCode, const FString& Reason);
// ── Helpers ─────────────────────────────────────────────────────────────── // ── Helpers ───────────────────────────────────────────────────────────────
/** Validate PoseMap emotion entries at BeginPlay. */ /** Validate PoseMap emotion entries at BeginPlay. */
@ -130,6 +149,9 @@ private:
/** Current blended emotion curves (evaluated each tick from playing anims). */ /** Current blended emotion curves (evaluated each tick from playing anims). */
TMap<FName, float> CurrentEmotionCurves; TMap<FName, float> CurrentEmotionCurves;
/** Current blend alpha (0 = fully inactive/passthrough, 1 = fully active). */
float CurrentActiveAlpha = 1.0f;
/** Active emotion (for change detection). */ /** Active emotion (for change detection). */
EPS_AI_ConvAgent_Emotion ActiveEmotion = EPS_AI_ConvAgent_Emotion::Neutral; EPS_AI_ConvAgent_Emotion ActiveEmotion = EPS_AI_ConvAgent_Emotion::Neutral;
EPS_AI_ConvAgent_EmotionIntensity ActiveEmotionIntensity = EPS_AI_ConvAgent_EmotionIntensity::Medium; EPS_AI_ConvAgent_EmotionIntensity ActiveEmotionIntensity = EPS_AI_ConvAgent_EmotionIntensity::Medium;

View File

@ -5,6 +5,7 @@
#include "CoreMinimal.h" #include "CoreMinimal.h"
#include "Components/ActorComponent.h" #include "Components/ActorComponent.h"
#include "DSP/SpectrumAnalyzer.h" #include "DSP/SpectrumAnalyzer.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_LipSyncComponent.generated.h" #include "PS_AI_ConvAgent_LipSyncComponent.generated.h"
class UPS_AI_ConvAgent_ElevenLabsComponent; class UPS_AI_ConvAgent_ElevenLabsComponent;
@ -56,6 +57,17 @@ public:
meta = (ToolTip = "Skeletal mesh to drive morph targets on.\nLeave empty to read values manually via GetCurrentBlendshapes().")) meta = (ToolTip = "Skeletal mesh to drive morph targets on.\nLeave empty to read values manually via GetCurrentBlendshapes()."))
TObjectPtr<USkeletalMeshComponent> TargetMesh; TObjectPtr<USkeletalMeshComponent> TargetMesh;
/** When false, lip sync blendshapes smoothly blend to zero (passthrough).
* The underlying audio analysis keeps running so reactivation is seamless. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync",
meta = (ToolTip = "Enable lip sync.\nWhen false, outputs blend to zero (passthrough)."))
bool bActive = true;
/** How long (seconds) to blend in/out when bActive changes. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync",
meta = (ClampMin = "0.05", ClampMax = "3.0"))
float ActivationBlendDuration = 0.5f;
/** Overall mouth movement intensity multiplier. */ /** Overall mouth movement intensity multiplier. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync", UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync",
meta = (ClampMin = "0.0", ClampMax = "3.0", meta = (ClampMin = "0.0", ClampMax = "3.0",
@ -178,6 +190,14 @@ private:
UFUNCTION() UFUNCTION()
void OnAgentStopped(); void OnAgentStopped();
/** Automatically activate when conversation connects. */
UFUNCTION()
void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo);
/** Automatically deactivate when conversation disconnects. */
UFUNCTION()
void OnConversationDisconnected(int32 StatusCode, const FString& Reason);
/** Clear all lip sync queues and reset mouth to neutral pose. */ /** Clear all lip sync queues and reset mouth to neutral pose. */
void ResetToNeutral(); void ResetToNeutral();
@ -234,6 +254,9 @@ private:
// to create continuous motion instead of 32ms step-wise jumps // to create continuous motion instead of 32ms step-wise jumps
TMap<FName, float> LastConsumedVisemes; TMap<FName, float> LastConsumedVisemes;
// Current blend alpha (0 = fully inactive/passthrough, 1 = fully active).
float CurrentActiveAlpha = 1.0f;
// MetaHuman mode: Face mesh has no morph targets, use animation curves instead. // MetaHuman mode: Face mesh has no morph targets, use animation curves instead.
// Set automatically in BeginPlay when TargetMesh has 0 morph targets. // Set automatically in BeginPlay when TargetMesh has 0 morph targets.
bool bUseCurveMode = false; bool bUseCurveMode = false;

View File

@ -5,8 +5,10 @@
#include "CoreMinimal.h" #include "CoreMinimal.h"
#include "Components/ActorComponent.h" #include "Components/ActorComponent.h"
#include "HAL/CriticalSection.h" #include "HAL/CriticalSection.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_PostureComponent.generated.h" #include "PS_AI_ConvAgent_PostureComponent.generated.h"
class UPS_AI_ConvAgent_ElevenLabsComponent;
class USkeletalMeshComponent; class USkeletalMeshComponent;
DECLARE_LOG_CATEGORY_EXTERN(LogPS_AI_ConvAgent_Posture, Log, All); DECLARE_LOG_CATEGORY_EXTERN(LogPS_AI_ConvAgent_Posture, Log, All);
@ -73,6 +75,18 @@ public:
meta = (ToolTip = "Target actor to look at.\nSet to null to return to neutral.")) meta = (ToolTip = "Target actor to look at.\nSet to null to return to neutral."))
TObjectPtr<AActor> TargetActor; TObjectPtr<AActor> TargetActor;
/** When false, all posture outputs smoothly blend to neutral (passthrough).
* The underlying tracking keeps running so reactivation is seamless.
* Controlled automatically by the conversation state, or manually. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
meta = (ToolTip = "Enable posture system.\nWhen false, outputs blend to neutral (passthrough)."))
bool bActive = true;
/** How long (seconds) to blend in/out when bActive changes. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
meta = (ClampMin = "0.05", ClampMax = "3.0"))
float ActivationBlendDuration = 0.5f;
/** When false, body rotation is frozen — only head and eyes track the target. /** When false, body rotation is frozen — only head and eyes track the target.
* Useful to have the agent notice the player (eyes+head) before fully engaging (body). */ * Useful to have the agent notice the player (eyes+head) before fully engaging (body). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture", UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture",
@ -218,21 +232,27 @@ public:
// ── Getters (read by AnimNode) ─────────────────────────────────────────── // ── Getters (read by AnimNode) ───────────────────────────────────────────
/** Get current eye gaze curves (8 ARKit eye look curves). /** Get current eye gaze curves (8 ARKit eye look curves).
* Returns a COPY safe to call from any thread. */ * Returns a COPY scaled by activation alpha safe to call from any thread. */
UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Posture") UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Posture")
TMap<FName, float> GetCurrentEyeCurves() const TMap<FName, float> GetCurrentEyeCurves() const
{ {
FScopeLock Lock(&PostureDataLock); FScopeLock Lock(&PostureDataLock);
return CurrentEyeCurves; if (CurrentActiveAlpha < 0.001f) return TMap<FName, float>();
if (CurrentActiveAlpha >= 0.999f) return CurrentEyeCurves;
TMap<FName, float> Scaled = CurrentEyeCurves;
for (auto& Pair : Scaled) { Pair.Value *= CurrentActiveAlpha; }
return Scaled;
} }
/** Get current head rotation offset (applied by AnimNode as FQuat to avoid /** Get current head rotation offset (applied by AnimNode as FQuat to avoid
* Euler round-trip that reintroduces parasitic tilt on diagonals). * Euler round-trip that reintroduces parasitic tilt on diagonals).
* Thread-safe copy. */ * Thread-safe copy, blended by activation alpha. */
FQuat GetCurrentHeadRotation() const FQuat GetCurrentHeadRotation() const
{ {
FScopeLock Lock(&PostureDataLock); FScopeLock Lock(&PostureDataLock);
return CurrentHeadRotation; if (CurrentActiveAlpha < 0.001f) return FQuat::Identity;
if (CurrentActiveAlpha >= 0.999f) return CurrentHeadRotation;
return FQuat::Slerp(FQuat::Identity, CurrentHeadRotation, CurrentActiveAlpha);
} }
/** Get the head bone name (used by AnimNode to resolve bone index). */ /** Get the head bone name (used by AnimNode to resolve bone index). */
@ -241,21 +261,35 @@ public:
/** Get the neck bone chain (used by AnimNode to resolve bone indices). */ /** Get the neck bone chain (used by AnimNode to resolve bone indices). */
const TArray<FPS_AI_ConvAgent_NeckBoneEntry>& GetNeckBoneChain() const { return NeckBoneChain; } const TArray<FPS_AI_ConvAgent_NeckBoneEntry>& GetNeckBoneChain() const { return NeckBoneChain; }
/** Get head animation compensation factor (0 = additive, 1 = full override). */ /** Get head animation compensation factor (0 = additive, 1 = full override).
float GetHeadAnimationCompensation() const { return HeadAnimationCompensation; } * Scaled by activation alpha for smooth passthrough when inactive. */
float GetHeadAnimationCompensation() const { return HeadAnimationCompensation * CurrentActiveAlpha; }
/** Get eye animation compensation factor (0 = additive, 1 = full override). */ /** Get eye animation compensation factor (0 = additive, 1 = full override).
float GetEyeAnimationCompensation() const { return EyeAnimationCompensation; } * Scaled by activation alpha for smooth passthrough when inactive. */
float GetEyeAnimationCompensation() const { return EyeAnimationCompensation * CurrentActiveAlpha; }
/** Get body drift compensation factor (0 = none, 1 = full). */ /** Get body drift compensation factor (0 = none, 1 = full).
float GetBodyDriftCompensation() const { return BodyDriftCompensation; } * Scaled by activation alpha for smooth passthrough when inactive. */
float GetBodyDriftCompensation() const { return BodyDriftCompensation * CurrentActiveAlpha; }
// ── UActorComponent overrides ──────────────────────────────────────────── // ── UActorComponent overrides ────────────────────────────────────────────
virtual void BeginPlay() override; virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;
virtual void TickComponent(float DeltaTime, ELevelTick TickType, virtual void TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction* ThisTickFunction) override; FActorComponentTickFunction* ThisTickFunction) override;
private: private:
// ── Event handlers ────────────────────────────────────────────────────────
/** Automatically activate when conversation connects. */
UFUNCTION()
void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo);
/** Automatically deactivate when conversation disconnects. */
UFUNCTION()
void OnConversationDisconnected(int32 StatusCode, const FString& Reason);
// ── Internals ──────────────────────────────────────────────────────────── // ── Internals ────────────────────────────────────────────────────────────
/** Map eye yaw/pitch angles to 8 ARKit eye curves. */ /** Map eye yaw/pitch angles to 8 ARKit eye curves. */
@ -263,6 +297,9 @@ private:
// ── Smoothed current values (head + eyes, body is actor yaw) ──────────── // ── Smoothed current values (head + eyes, body is actor yaw) ────────────
/** Current blend alpha (0 = fully inactive/passthrough, 1 = fully active). */
float CurrentActiveAlpha = 1.0f;
float CurrentHeadYaw = 0.0f; float CurrentHeadYaw = 0.0f;
float CurrentHeadPitch = 0.0f; float CurrentHeadPitch = 0.0f;
float CurrentEyeYaw = 0.0f; float CurrentEyeYaw = 0.0f;
@ -302,6 +339,9 @@ private:
/** Cached Face skeletal mesh component (for eye bone transforms). */ /** Cached Face skeletal mesh component (for eye bone transforms). */
TWeakObjectPtr<USkeletalMeshComponent> CachedFaceMesh; TWeakObjectPtr<USkeletalMeshComponent> CachedFaceMesh;
/** Cached reference to the agent component for auto-activation. */
TWeakObjectPtr<UPS_AI_ConvAgent_ElevenLabsComponent> AgentComponent;
#if !UE_BUILD_SHIPPING #if !UE_BUILD_SHIPPING
/** Frame counter for periodic debug logging. */ /** Frame counter for periodic debug logging. */
int32 DebugFrameCounter = 0; int32 DebugFrameCounter = 0;