diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_BlueprintLibrary.cpp b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_BlueprintLibrary.cpp new file mode 100644 index 0000000..5ca4d05 --- /dev/null +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_BlueprintLibrary.cpp @@ -0,0 +1,17 @@ +// Copyright ASTERION. All Rights Reserved. + +#include "PS_AI_ConvAgent_BlueprintLibrary.h" +#include "Components/SkeletalMeshComponent.h" + +void UPS_AI_ConvAgent_BlueprintLibrary::SetPostProcessAnimBlueprint( + USkeletalMeshComponent* SkelMeshComp, + TSubclassOf AnimBPClass) +{ + if (!SkelMeshComp) + { + UE_LOG(LogTemp, Warning, TEXT("[PS_AI_ConvAgent] SetPostProcessAnimBlueprint: SkelMeshComp is null.")); + return; + } + + SkelMeshComp->SetOverridePostProcessAnimBP(AnimBPClass); +} diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_FacialExpressionComponent.cpp b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_FacialExpressionComponent.cpp index f83af87..f81a67b 100644 --- a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_FacialExpressionComponent.cpp +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_FacialExpressionComponent.cpp @@ -41,10 +41,19 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::BeginPlay() Agent->OnAgentEmotionChanged.AddDynamic( this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged); + // Auto-activation: start inactive, activate when conversation connects. + Agent->OnAgentConnected.AddDynamic( + this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationConnected); + Agent->OnAgentDisconnected.AddDynamic( + this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationDisconnected); + bActive = false; + CurrentActiveAlpha = 0.0f; + if (bDebug) { UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log, - TEXT("Facial expression bound to agent component on %s."), *Owner->GetName()); + TEXT("Facial expression bound to agent on %s. Waiting for conversation."), + *Owner->GetName()); } } else @@ -80,6 +89,10 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::EndPlay(const EEndPlayReason::T { AgentComponent->OnAgentEmotionChanged.RemoveDynamic( this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged); + AgentComponent->OnAgentConnected.RemoveDynamic( + this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationConnected); + AgentComponent->OnAgentDisconnected.RemoveDynamic( + this, &UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationDisconnected); } Super::EndPlay(EndPlayReason); @@ -212,6 +225,31 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged( } } +// ───────────────────────────────────────────────────────────────────────────── +// Auto-activation handlers +// ───────────────────────────────────────────────────────────────────────────── + +void UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationConnected( + const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo) +{ + bActive = true; + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log, TEXT("Conversation connected — facial expression activating.")); + } +} + +void UPS_AI_ConvAgent_FacialExpressionComponent::OnConversationDisconnected( + int32 StatusCode, const FString& Reason) +{ + bActive = false; + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log, + TEXT("Conversation disconnected (code=%d) — facial expression deactivating."), StatusCode); + } +} + // ───────────────────────────────────────────────────────────────────────────── // Tick — play emotion animation and crossfade // ───────────────────────────────────────────────────────────────────────────── @@ -237,6 +275,21 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::TickComponent( } } + // ── Smooth activation blend ────────────────────────────────────────── + { + const float TargetAlpha = bActive ? 1.0f : 0.0f; + if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f)) + { + const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f); + CurrentActiveAlpha = FMath::FInterpConstantTo( + CurrentActiveAlpha, TargetAlpha, DeltaTime, BlendSpeed); + } + else + { + CurrentActiveAlpha = TargetAlpha; + } + } + // Nothing to play if (!ActiveAnim && !PrevAnim) return; @@ -312,6 +365,19 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::TickComponent( } } } + + // ── Apply activation alpha to output curves ────────────────────────── + if (CurrentActiveAlpha < 0.001f) + { + CurrentEmotionCurves.Reset(); + } + else if (CurrentActiveAlpha < 0.999f) + { + for (auto& Pair : CurrentEmotionCurves) + { + Pair.Value *= CurrentActiveAlpha; + } + } } // ───────────────────────────────────────────────────────────────────────────── diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_LipSyncComponent.cpp b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_LipSyncComponent.cpp index 6030436..f77014d 100644 --- a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_LipSyncComponent.cpp +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_LipSyncComponent.cpp @@ -259,10 +259,19 @@ void UPS_AI_ConvAgent_LipSyncComponent::BeginPlay() // Enable partial response streaming if not already enabled Agent->bEnableAgentPartialResponse = true; + // Auto-activation: start inactive, activate when conversation connects. + Agent->OnAgentConnected.AddDynamic( + this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationConnected); + Agent->OnAgentDisconnected.AddDynamic( + this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationDisconnected); + bActive = false; + CurrentActiveAlpha = 0.0f; + if (bDebug) { UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, - TEXT("Lip sync bound to agent component on %s (audio + text + interruption)."), *Owner->GetName()); + TEXT("Lip sync bound to agent on %s. Waiting for conversation."), + *Owner->GetName()); } } else @@ -634,6 +643,10 @@ void UPS_AI_ConvAgent_LipSyncComponent::EndPlay(const EEndPlayReason::Type EndPl this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentInterrupted); AgentComponent->OnAgentStoppedSpeaking.RemoveDynamic( this, &UPS_AI_ConvAgent_LipSyncComponent::OnAgentStopped); + AgentComponent->OnAgentConnected.RemoveDynamic( + this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationConnected); + AgentComponent->OnAgentDisconnected.RemoveDynamic( + this, &UPS_AI_ConvAgent_LipSyncComponent::OnConversationDisconnected); } AgentComponent.Reset(); SpectrumAnalyzer.Reset(); @@ -641,6 +654,31 @@ void UPS_AI_ConvAgent_LipSyncComponent::EndPlay(const EEndPlayReason::Type EndPl Super::EndPlay(EndPlayReason); } +// ───────────────────────────────────────────────────────────────────────────── +// Auto-activation handlers +// ───────────────────────────────────────────────────────────────────────────── + +void UPS_AI_ConvAgent_LipSyncComponent::OnConversationConnected( + const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo) +{ + bActive = true; + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, TEXT("Conversation connected — lip sync activating.")); + } +} + +void UPS_AI_ConvAgent_LipSyncComponent::OnConversationDisconnected( + int32 StatusCode, const FString& Reason) +{ + bActive = false; + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_LipSync, Log, + TEXT("Conversation disconnected (code=%d) — lip sync deactivating."), StatusCode); + } +} + // ───────────────────────────────────────────────────────────────────────────── // Tick — smooth visemes and apply morph targets // ───────────────────────────────────────────────────────────────────────────── @@ -650,6 +688,21 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic { Super::TickComponent(DeltaTime, TickType, ThisTickFunction); + // ── Smooth activation blend ────────────────────────────────────────── + { + const float TargetAlpha = bActive ? 1.0f : 0.0f; + if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f)) + { + const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f); + CurrentActiveAlpha = FMath::FInterpConstantTo( + CurrentActiveAlpha, TargetAlpha, DeltaTime, BlendSpeed); + } + else + { + CurrentActiveAlpha = TargetAlpha; + } + } + // ── Lazy binding: in packaged builds, BeginPlay may run before the ──────── // ElevenLabsComponent is fully initialized. Retry discovery until bound. if (!AgentComponent.IsValid()) @@ -977,6 +1030,19 @@ void UPS_AI_ConvAgent_LipSyncComponent::TickComponent(float DeltaTime, ELevelTic PreviousBlendshapes = CurrentBlendshapes; } + // ── Apply activation alpha to output blendshapes ───────────────────── + if (CurrentActiveAlpha < 0.001f) + { + CurrentBlendshapes.Reset(); + } + else if (CurrentActiveAlpha < 0.999f) + { + for (auto& Pair : CurrentBlendshapes) + { + Pair.Value *= CurrentActiveAlpha; + } + } + // Auto-apply morph targets if a target mesh is set if (TargetMesh) { diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_PostureComponent.cpp b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_PostureComponent.cpp index 830c3db..bb44dfb 100644 --- a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_PostureComponent.cpp +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Private/PS_AI_ConvAgent_PostureComponent.cpp @@ -1,6 +1,7 @@ // Copyright ASTERION. All Rights Reserved. #include "PS_AI_ConvAgent_PostureComponent.h" +#include "PS_AI_ConvAgent_ElevenLabsComponent.h" #include "Components/SkeletalMeshComponent.h" #include "GameFramework/Actor.h" #include "Math/UnrealMathUtility.h" @@ -106,6 +107,71 @@ void UPS_AI_ConvAgent_PostureComponent::BeginPlay() *Owner->GetName(), MeshForwardYawOffset, OriginalActorYaw, MaxEyeHorizontal, MaxEyeVertical, MaxHeadYaw, MaxHeadPitch); } + + // Auto-activation: bind to agent conversation lifecycle. + // When an agent component is found, start inactive and wait for conversation. + auto* Agent = Owner->FindComponentByClass(); + if (Agent) + { + AgentComponent = Agent; + Agent->OnAgentConnected.AddDynamic( + this, &UPS_AI_ConvAgent_PostureComponent::OnConversationConnected); + Agent->OnAgentDisconnected.AddDynamic( + this, &UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected); + + // Start inactive — will activate when conversation connects. + bActive = false; + CurrentActiveAlpha = 0.0f; + + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_Posture, Log, + TEXT("Auto-activation bound to agent on %s. Waiting for conversation."), + *Owner->GetName()); + } + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// EndPlay +// ───────────────────────────────────────────────────────────────────────────── + +void UPS_AI_ConvAgent_PostureComponent::EndPlay(const EEndPlayReason::Type EndPlayReason) +{ + if (AgentComponent.IsValid()) + { + AgentComponent->OnAgentConnected.RemoveDynamic( + this, &UPS_AI_ConvAgent_PostureComponent::OnConversationConnected); + AgentComponent->OnAgentDisconnected.RemoveDynamic( + this, &UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected); + } + + Super::EndPlay(EndPlayReason); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Auto-activation handlers +// ───────────────────────────────────────────────────────────────────────────── + +void UPS_AI_ConvAgent_PostureComponent::OnConversationConnected( + const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo) +{ + bActive = true; + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_Posture, Log, TEXT("Conversation connected — posture activating.")); + } +} + +void UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected( + int32 StatusCode, const FString& Reason) +{ + bActive = false; + if (bDebug) + { + UE_LOG(LogPS_AI_ConvAgent_Posture, Log, + TEXT("Conversation disconnected (code=%d) — posture deactivating."), StatusCode); + } } // ───────────────────────────────────────────────────────────────────────────── @@ -197,6 +263,21 @@ void UPS_AI_ConvAgent_PostureComponent::TickComponent( // snapping in one frame. const float SafeDeltaTime = FMath::Min(DeltaTime, 0.05f); + // ── Smooth activation blend ────────────────────────────────────────── + { + const float TargetAlpha = bActive ? 1.0f : 0.0f; + if (!FMath::IsNearlyEqual(CurrentActiveAlpha, TargetAlpha, 0.001f)) + { + const float BlendSpeed = 1.0f / FMath::Max(ActivationBlendDuration, 0.01f); + CurrentActiveAlpha = FMath::FInterpConstantTo( + CurrentActiveAlpha, TargetAlpha, SafeDeltaTime, BlendSpeed); + } + else + { + CurrentActiveAlpha = TargetAlpha; + } + } + if (TargetActor) { // ── 1. Compute target position and eye origin ────────────────────── diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_BlueprintLibrary.h b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_BlueprintLibrary.h new file mode 100644 index 0000000..fbe67e7 --- /dev/null +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_BlueprintLibrary.h @@ -0,0 +1,31 @@ +// Copyright ASTERION. All Rights Reserved. + +#pragma once + +#include "CoreMinimal.h" +#include "Kismet/BlueprintFunctionLibrary.h" +#include "PS_AI_ConvAgent_BlueprintLibrary.generated.h" + +class USkeletalMeshComponent; +class UAnimInstance; + +// ───────────────────────────────────────────────────────────────────────────── +// UPS_AI_ConvAgent_BlueprintLibrary +// +// Utility functions exposed to Blueprint for the PS AI ConvAgent plugin. +// ───────────────────────────────────────────────────────────────────────────── +UCLASS() +class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_BlueprintLibrary : public UBlueprintFunctionLibrary +{ + GENERATED_BODY() + +public: + /** Assign a Post Process Anim Blueprint to a Skeletal Mesh Component at runtime. + * This allows per-instance override without modifying the Skeletal Mesh asset. + * @param SkelMeshComp The target Skeletal Mesh Component. + * @param AnimBPClass The AnimBlueprint class to use as post-process (nullptr to clear). */ + UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Utilities", + meta = (DisplayName = "Set Post Process Anim Blueprint")) + static void SetPostProcessAnimBlueprint(USkeletalMeshComponent* SkelMeshComp, + TSubclassOf AnimBPClass); +}; diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_FacialExpressionComponent.h b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_FacialExpressionComponent.h index d10ff6a..4d5e1c9 100644 --- a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_FacialExpressionComponent.h +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_FacialExpressionComponent.h @@ -47,6 +47,17 @@ public: meta = (ToolTip = "Dedicated Emotion Pose Map asset.\nRight-click Content Browser → Miscellaneous → PS AI ConvAgent Emotion Pose Map.")) TObjectPtr EmotionPoseMap; + /** When false, emotion curves smoothly blend to zero (passthrough). + * The underlying emotion playback keeps running so reactivation is seamless. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression", + meta = (ToolTip = "Enable facial expressions.\nWhen false, outputs blend to zero (passthrough).")) + bool bActive = true; + + /** How long (seconds) to blend in/out when bActive changes. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression", + meta = (ClampMin = "0.05", ClampMax = "3.0")) + float ActivationBlendDuration = 0.5f; + /** Emotion crossfade duration in seconds. */ UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|FacialExpression", meta = (ClampMin = "0.1", ClampMax = "3.0", @@ -97,6 +108,14 @@ private: UFUNCTION() void OnEmotionChanged(EPS_AI_ConvAgent_Emotion Emotion, EPS_AI_ConvAgent_EmotionIntensity Intensity); + /** Automatically activate when conversation connects. */ + UFUNCTION() + void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo); + + /** Automatically deactivate when conversation disconnects. */ + UFUNCTION() + void OnConversationDisconnected(int32 StatusCode, const FString& Reason); + // ── Helpers ─────────────────────────────────────────────────────────────── /** Validate PoseMap emotion entries at BeginPlay. */ @@ -130,6 +149,9 @@ private: /** Current blended emotion curves (evaluated each tick from playing anims). */ TMap CurrentEmotionCurves; + /** Current blend alpha (0 = fully inactive/passthrough, 1 = fully active). */ + float CurrentActiveAlpha = 1.0f; + /** Active emotion (for change detection). */ EPS_AI_ConvAgent_Emotion ActiveEmotion = EPS_AI_ConvAgent_Emotion::Neutral; EPS_AI_ConvAgent_EmotionIntensity ActiveEmotionIntensity = EPS_AI_ConvAgent_EmotionIntensity::Medium; diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_LipSyncComponent.h b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_LipSyncComponent.h index f3d33ca..a91461e 100644 --- a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_LipSyncComponent.h +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_LipSyncComponent.h @@ -5,6 +5,7 @@ #include "CoreMinimal.h" #include "Components/ActorComponent.h" #include "DSP/SpectrumAnalyzer.h" +#include "PS_AI_ConvAgent_Definitions.h" #include "PS_AI_ConvAgent_LipSyncComponent.generated.h" class UPS_AI_ConvAgent_ElevenLabsComponent; @@ -56,6 +57,17 @@ public: meta = (ToolTip = "Skeletal mesh to drive morph targets on.\nLeave empty to read values manually via GetCurrentBlendshapes().")) TObjectPtr TargetMesh; + /** When false, lip sync blendshapes smoothly blend to zero (passthrough). + * The underlying audio analysis keeps running so reactivation is seamless. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync", + meta = (ToolTip = "Enable lip sync.\nWhen false, outputs blend to zero (passthrough).")) + bool bActive = true; + + /** How long (seconds) to blend in/out when bActive changes. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync", + meta = (ClampMin = "0.05", ClampMax = "3.0")) + float ActivationBlendDuration = 0.5f; + /** Overall mouth movement intensity multiplier. */ UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|LipSync", meta = (ClampMin = "0.0", ClampMax = "3.0", @@ -178,6 +190,14 @@ private: UFUNCTION() void OnAgentStopped(); + /** Automatically activate when conversation connects. */ + UFUNCTION() + void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo); + + /** Automatically deactivate when conversation disconnects. */ + UFUNCTION() + void OnConversationDisconnected(int32 StatusCode, const FString& Reason); + /** Clear all lip sync queues and reset mouth to neutral pose. */ void ResetToNeutral(); @@ -234,6 +254,9 @@ private: // to create continuous motion instead of 32ms step-wise jumps TMap LastConsumedVisemes; + // Current blend alpha (0 = fully inactive/passthrough, 1 = fully active). + float CurrentActiveAlpha = 1.0f; + // MetaHuman mode: Face mesh has no morph targets, use animation curves instead. // Set automatically in BeginPlay when TargetMesh has 0 morph targets. bool bUseCurveMode = false; diff --git a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_PostureComponent.h b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_PostureComponent.h index e5728d2..cba86be 100644 --- a/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_PostureComponent.h +++ b/Unreal/PS_AI_Agent/Plugins/PS_AI_ConvAgent/Source/PS_AI_ConvAgent/Public/PS_AI_ConvAgent_PostureComponent.h @@ -5,8 +5,10 @@ #include "CoreMinimal.h" #include "Components/ActorComponent.h" #include "HAL/CriticalSection.h" +#include "PS_AI_ConvAgent_Definitions.h" #include "PS_AI_ConvAgent_PostureComponent.generated.h" +class UPS_AI_ConvAgent_ElevenLabsComponent; class USkeletalMeshComponent; DECLARE_LOG_CATEGORY_EXTERN(LogPS_AI_ConvAgent_Posture, Log, All); @@ -73,6 +75,18 @@ public: meta = (ToolTip = "Target actor to look at.\nSet to null to return to neutral.")) TObjectPtr TargetActor; + /** When false, all posture outputs smoothly blend to neutral (passthrough). + * The underlying tracking keeps running so reactivation is seamless. + * Controlled automatically by the conversation state, or manually. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture", + meta = (ToolTip = "Enable posture system.\nWhen false, outputs blend to neutral (passthrough).")) + bool bActive = true; + + /** How long (seconds) to blend in/out when bActive changes. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture", + meta = (ClampMin = "0.05", ClampMax = "3.0")) + float ActivationBlendDuration = 0.5f; + /** When false, body rotation is frozen — only head and eyes track the target. * Useful to have the agent notice the player (eyes+head) before fully engaging (body). */ UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|Posture", @@ -218,21 +232,27 @@ public: // ── Getters (read by AnimNode) ─────────────────────────────────────────── /** Get current eye gaze curves (8 ARKit eye look curves). - * Returns a COPY — safe to call from any thread. */ + * Returns a COPY scaled by activation alpha — safe to call from any thread. */ UFUNCTION(BlueprintCallable, Category = "PS AI ConvAgent|Posture") TMap GetCurrentEyeCurves() const { FScopeLock Lock(&PostureDataLock); - return CurrentEyeCurves; + if (CurrentActiveAlpha < 0.001f) return TMap(); + if (CurrentActiveAlpha >= 0.999f) return CurrentEyeCurves; + TMap Scaled = CurrentEyeCurves; + for (auto& Pair : Scaled) { Pair.Value *= CurrentActiveAlpha; } + return Scaled; } /** Get current head rotation offset (applied by AnimNode as FQuat to avoid * Euler round-trip that reintroduces parasitic tilt on diagonals). - * Thread-safe copy. */ + * Thread-safe copy, blended by activation alpha. */ FQuat GetCurrentHeadRotation() const { FScopeLock Lock(&PostureDataLock); - return CurrentHeadRotation; + if (CurrentActiveAlpha < 0.001f) return FQuat::Identity; + if (CurrentActiveAlpha >= 0.999f) return CurrentHeadRotation; + return FQuat::Slerp(FQuat::Identity, CurrentHeadRotation, CurrentActiveAlpha); } /** Get the head bone name (used by AnimNode to resolve bone index). */ @@ -241,21 +261,35 @@ public: /** Get the neck bone chain (used by AnimNode to resolve bone indices). */ const TArray& GetNeckBoneChain() const { return NeckBoneChain; } - /** Get head animation compensation factor (0 = additive, 1 = full override). */ - float GetHeadAnimationCompensation() const { return HeadAnimationCompensation; } + /** Get head animation compensation factor (0 = additive, 1 = full override). + * Scaled by activation alpha for smooth passthrough when inactive. */ + float GetHeadAnimationCompensation() const { return HeadAnimationCompensation * CurrentActiveAlpha; } - /** Get eye animation compensation factor (0 = additive, 1 = full override). */ - float GetEyeAnimationCompensation() const { return EyeAnimationCompensation; } + /** Get eye animation compensation factor (0 = additive, 1 = full override). + * Scaled by activation alpha for smooth passthrough when inactive. */ + float GetEyeAnimationCompensation() const { return EyeAnimationCompensation * CurrentActiveAlpha; } - /** Get body drift compensation factor (0 = none, 1 = full). */ - float GetBodyDriftCompensation() const { return BodyDriftCompensation; } + /** Get body drift compensation factor (0 = none, 1 = full). + * Scaled by activation alpha for smooth passthrough when inactive. */ + float GetBodyDriftCompensation() const { return BodyDriftCompensation * CurrentActiveAlpha; } // ── UActorComponent overrides ──────────────────────────────────────────── virtual void BeginPlay() override; + virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override; virtual void TickComponent(float DeltaTime, ELevelTick TickType, FActorComponentTickFunction* ThisTickFunction) override; private: + // ── Event handlers ──────────────────────────────────────────────────────── + + /** Automatically activate when conversation connects. */ + UFUNCTION() + void OnConversationConnected(const FPS_AI_ConvAgent_ConversationInfo_ElevenLabs& ConversationInfo); + + /** Automatically deactivate when conversation disconnects. */ + UFUNCTION() + void OnConversationDisconnected(int32 StatusCode, const FString& Reason); + // ── Internals ──────────────────────────────────────────────────────────── /** Map eye yaw/pitch angles to 8 ARKit eye curves. */ @@ -263,6 +297,9 @@ private: // ── Smoothed current values (head + eyes, body is actor yaw) ──────────── + /** Current blend alpha (0 = fully inactive/passthrough, 1 = fully active). */ + float CurrentActiveAlpha = 1.0f; + float CurrentHeadYaw = 0.0f; float CurrentHeadPitch = 0.0f; float CurrentEyeYaw = 0.0f; @@ -302,6 +339,9 @@ private: /** Cached Face skeletal mesh component (for eye bone transforms). */ TWeakObjectPtr CachedFaceMesh; + /** Cached reference to the agent component for auto-activation. */ + TWeakObjectPtr AgentComponent; + #if !UE_BUILD_SHIPPING /** Frame counter for periodic debug logging. */ int32 DebugFrameCounter = 0;