Resolve some Bugs

This commit is contained in:
j.foucher 2026-03-02 12:37:28 +01:00
parent 259a77f9f6
commit 82b134bcc3
17 changed files with 126 additions and 26 deletions

View File

@ -167,3 +167,6 @@ ConnectionType=USBOnly
bUseManualIPAddress=False
ManualIPAddress=
[/Script/PS_AI_ConvAgent.PS_AI_ConvAgent_Settings_ElevenLabs]
API_Key=7b73c4244ccbec394cc010aaab01b0ec59ce0b11fc636ce4828354f675ca14a5

View File

@ -216,6 +216,9 @@ void UPS_AI_ConvAgent_FacialExpressionComponent::OnEmotionChanged(
// Begin crossfade
CrossfadeAlpha = 0.0f;
// Notify listeners (Blueprints, other components).
OnExpressionChanged.Broadcast(Emotion, Intensity);
if (bDebug)
{
UE_LOG(LogPS_AI_ConvAgent_FacialExpr, Log,

View File

@ -416,8 +416,19 @@ void UPS_AI_ConvAgent_InteractionComponent::AttachPostureTarget(
if (UPS_AI_ConvAgent_PostureComponent* Posture = FindPostureOnAgent(AgentPtr))
{
Posture->TargetActor = GetOwner();
// Eyes+head only at first — body tracking is enabled when listening starts.
Posture->bEnableBodyTracking = false;
// Reset the body target to the actor's current facing so body tracking
// starts fresh on re-entry. Without this, TargetBodyWorldYaw retains
// the stale value from the previous interaction and the body never moves
// (BodyDelta ≈ 0 because the actor is already at the old target yaw).
Posture->ResetBodyTarget();
// If the agent is already in an active conversation (re-entry),
// enable body tracking immediately — the conversation is already engaged,
// so HandleAgentResponseStarted won't fire again until the player speaks.
// On first interaction the agent isn't connected yet, so we start with
// eyes+head only and let HandleAgentResponseStarted enable body later.
Posture->bEnableBodyTracking = AgentPtr->IsConnected();
if (bDebug)
{

View File

@ -174,6 +174,14 @@ void UPS_AI_ConvAgent_PostureComponent::OnConversationDisconnected(
}
}
void UPS_AI_ConvAgent_PostureComponent::ResetBodyTarget()
{
if (AActor* Owner = GetOwner())
{
TargetBodyWorldYaw = Owner->GetActorRotation().Yaw;
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Map eye angles to 8 ARKit eye curves
// ─────────────────────────────────────────────────────────────────────────────

View File

@ -11,6 +11,10 @@ class UPS_AI_ConvAgent_ElevenLabsComponent;
class UPS_AI_ConvAgent_EmotionPoseMap;
class UAnimSequence;
DECLARE_DYNAMIC_MULTICAST_DELEGATE_TwoParams(FOnExpressionChanged,
EPS_AI_ConvAgent_Emotion, Emotion,
EPS_AI_ConvAgent_EmotionIntensity, Intensity);
// ─────────────────────────────────────────────────────────────────────────────
// UPS_AI_ConvAgent_FacialExpressionComponent
//
@ -95,6 +99,15 @@ public:
UFUNCTION(BlueprintPure, Category = "PS AI ConvAgent|FacialExpression")
static bool IsMouthCurve(const FName& CurveName);
// ── Events ───────────────────────────────────────────────────────────────
/** Fired when the facial expression changes (emotion + intensity).
* Subscribe in Blueprints to react to expression changes (trigger animations,
* particles, sounds, etc.). Only fires when the emotion actually differs
* from the previous one. */
UPROPERTY(BlueprintAssignable, Category = "PS AI ConvAgent|FacialExpression|Events")
FOnExpressionChanged OnExpressionChanged;
// ── UActorComponent overrides ─────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;

View File

@ -273,6 +273,11 @@ public:
* Scaled by activation alpha for smooth passthrough when inactive. */
float GetBodyDriftCompensation() const { return BodyDriftCompensation * CurrentActiveAlpha; }
/** Reset the persistent body yaw target to the actor's current facing.
* Call this when re-attaching a posture target so body tracking starts
* fresh instead of chasing a stale yaw from the previous interaction. */
void ResetBodyTarget();
// ── UActorComponent overrides ────────────────────────────────────────────
virtual void BeginPlay() override;
virtual void EndPlay(const EEndPlayReason::Type EndPlayReason) override;

View File

@ -58,6 +58,29 @@ static FString GetLLMLatencyHint(const FString& ModelID)
return FString();
}
// Language code → display name. Shared by BuildAgentPayload (to resolve
// {Language} placeholder) and the fetch handler (to strip the resolved fragment).
static FString GetLanguageDisplayName(const FString& LangCode)
{
static const TMap<FString, FString> LangNames = {
{TEXT("fr"), TEXT("French")}, {TEXT("de"), TEXT("German")},
{TEXT("es"), TEXT("Spanish")}, {TEXT("it"), TEXT("Italian")},
{TEXT("pt"), TEXT("Portuguese")}, {TEXT("ja"), TEXT("Japanese")},
{TEXT("ko"), TEXT("Korean")}, {TEXT("zh"), TEXT("Chinese")},
{TEXT("nl"), TEXT("Dutch")}, {TEXT("pl"), TEXT("Polish")},
{TEXT("ru"), TEXT("Russian")}, {TEXT("sv"), TEXT("Swedish")},
{TEXT("tr"), TEXT("Turkish")}, {TEXT("hi"), TEXT("Hindi")},
{TEXT("cs"), TEXT("Czech")}, {TEXT("ar"), TEXT("Arabic")},
{TEXT("id"), TEXT("Indonesian")}, {TEXT("fi"), TEXT("Finnish")},
{TEXT("da"), TEXT("Danish")}, {TEXT("el"), TEXT("Greek")},
{TEXT("hu"), TEXT("Hungarian")}, {TEXT("no"), TEXT("Norwegian")},
{TEXT("ro"), TEXT("Romanian")}, {TEXT("uk"), TEXT("Ukrainian")},
{TEXT("vi"), TEXT("Vietnamese")},
};
const FString* Found = LangNames.Find(LangCode);
return Found ? *Found : LangCode;
}
// ─────────────────────────────────────────────────────────────────────────────
// Factory
// ─────────────────────────────────────────────────────────────────────────────
@ -309,7 +332,6 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::CustomizeDetails(
.Font(IDetailLayoutBuilder::GetDetailFont())
]
.ValueContent()
.MaxDesiredWidth(600.f)
[
SNew(SBox)
.MinDesiredHeight(200.f)
@ -1091,13 +1113,65 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
// to avoid doubling them on next Update.
// Order matters: strip from earliest marker to preserve CharacterPrompt.
// 1. Language instruction marker
// 1. Language instruction — try exact fragment first, then marker fallback.
// Mirrors the emotion-tool approach: ElevenLabs may normalise
// double-newlines, so the "\n\n## …" marker alone can fail.
{
const FString LangMarker = TEXT("\n\n## Language");
int32 Idx = Prompt.Find(LangMarker, ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
bool bLangStripped = false;
// (a) Exact multilingual fragment match
if (!bLangStripped && !Asset->MultilingualPromptFragment.IsEmpty())
{
Prompt.LeftInline(Idx);
int32 Idx = Prompt.Find(Asset->MultilingualPromptFragment,
ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
{
Prompt.LeftInline(Idx);
bLangStripped = true;
}
}
// (b) Exact fixed-language fragment match (resolve {Language} placeholder)
if (!bLangStripped && !Asset->LanguagePromptFragment.IsEmpty())
{
// Extract language from JSON so we can resolve the placeholder.
FString FetchedLang;
(*AgentObj)->TryGetStringField(TEXT("language"), FetchedLang);
if (!FetchedLang.IsEmpty())
{
FString Resolved = Asset->LanguagePromptFragment;
Resolved.ReplaceInline(TEXT("{Language}"),
*GetLanguageDisplayName(FetchedLang));
int32 Idx = Prompt.Find(Resolved, ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
{
Prompt.LeftInline(Idx);
bLangStripped = true;
}
}
}
// (c) Marker fallback — double newline
if (!bLangStripped)
{
const FString LangMarker = TEXT("\n\n## Language");
int32 Idx = Prompt.Find(LangMarker, ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
{
Prompt.LeftInline(Idx);
bLangStripped = true;
}
}
// (d) Marker fallback — single newline (ElevenLabs may collapse \n\n)
if (!bLangStripped)
{
const FString LangMarkerAlt = TEXT("\n## Language");
int32 Idx = Prompt.Find(LangMarkerAlt, ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
{
Prompt.LeftInline(Idx);
}
}
}
@ -1394,24 +1468,7 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
{
// Fixed-language mode: force the LLM to always respond in one language.
// Replace {Language} placeholder with the actual language display name.
static const TMap<FString, FString> LangNames = {
{TEXT("fr"), TEXT("French")}, {TEXT("de"), TEXT("German")},
{TEXT("es"), TEXT("Spanish")}, {TEXT("it"), TEXT("Italian")},
{TEXT("pt"), TEXT("Portuguese")}, {TEXT("ja"), TEXT("Japanese")},
{TEXT("ko"), TEXT("Korean")}, {TEXT("zh"), TEXT("Chinese")},
{TEXT("nl"), TEXT("Dutch")}, {TEXT("pl"), TEXT("Polish")},
{TEXT("ru"), TEXT("Russian")}, {TEXT("sv"), TEXT("Swedish")},
{TEXT("tr"), TEXT("Turkish")}, {TEXT("hi"), TEXT("Hindi")},
{TEXT("cs"), TEXT("Czech")}, {TEXT("ar"), TEXT("Arabic")},
{TEXT("id"), TEXT("Indonesian")}, {TEXT("fi"), TEXT("Finnish")},
{TEXT("da"), TEXT("Danish")}, {TEXT("el"), TEXT("Greek")},
{TEXT("hu"), TEXT("Hungarian")}, {TEXT("no"), TEXT("Norwegian")},
{TEXT("ro"), TEXT("Romanian")}, {TEXT("uk"), TEXT("Ukrainian")},
{TEXT("vi"), TEXT("Vietnamese")},
};
const FString* LangName = LangNames.Find(Asset->Language);
const FString DisplayLang = LangName ? *LangName : Asset->Language;
const FString DisplayLang = GetLanguageDisplayName(Asset->Language);
FString LangFragment = Asset->LanguagePromptFragment;
LangFragment.ReplaceInline(TEXT("{Language}"), *DisplayLang);
FullPrompt += TEXT("\n\n");