Compare commits

...

2 Commits

Author SHA1 Message Date
66171cc0bd Add ElevenLabs Tool system, update ConvAgent binaries and cleanup patches
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 19:27:58 +02:00
c1afddc8b7 Add Scripted state and bAutoStartBehavior for external NPC control
Allow NPCs to start without a Behavior Tree and be controlled externally
via Blueprint, Level Sequences, or triggers. Adds Scripted enum state that
prevents BT services from overriding state, plus StartBehavior/StopBehavior
BlueprintCallable functions to toggle BT execution at runtime.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-03 19:26:26 +02:00
60 changed files with 1544 additions and 563 deletions

View File

@ -25,6 +25,9 @@ void UPS_AI_Behavior_BTService_EvaluateReaction::TickNode(
APS_AI_Behavior_AIController* AIC = Cast<APS_AI_Behavior_AIController>(OwnerComp.GetAIOwner()); APS_AI_Behavior_AIController* AIC = Cast<APS_AI_Behavior_AIController>(OwnerComp.GetAIOwner());
if (!AIC) { UE_LOG(LogPS_AI_Behavior, Warning, TEXT("EvaluateReaction: no AIC!")); return; } if (!AIC) { UE_LOG(LogPS_AI_Behavior, Warning, TEXT("EvaluateReaction: no AIC!")); return; }
// Scripted state: external control — don't touch the state
if (AIC->GetBehaviorState() == EPS_AI_Behavior_State::Scripted) { return; }
UPS_AI_Behavior_PersonalityComponent* Personality = AIC->GetPersonalityComponent(); UPS_AI_Behavior_PersonalityComponent* Personality = AIC->GetPersonalityComponent();
if (!Personality) { UE_LOG(LogPS_AI_Behavior, Warning, TEXT("[%s] EvaluateReaction: no PersonalityComponent!"), *AIC->GetName()); return; } if (!Personality) { UE_LOG(LogPS_AI_Behavior, Warning, TEXT("[%s] EvaluateReaction: no PersonalityComponent!"), *AIC->GetName()); return; }

View File

@ -25,6 +25,9 @@ void UPS_AI_Behavior_BTService_UpdateThreat::TickNode(
APS_AI_Behavior_AIController* AIC = Cast<APS_AI_Behavior_AIController>(OwnerComp.GetAIOwner()); APS_AI_Behavior_AIController* AIC = Cast<APS_AI_Behavior_AIController>(OwnerComp.GetAIOwner());
if (!AIC) return; if (!AIC) return;
// Scripted state: external control — don't accumulate threat
if (AIC->GetBehaviorState() == EPS_AI_Behavior_State::Scripted) { return; }
UBlackboardComponent* BB = OwnerComp.GetBlackboardComponent(); UBlackboardComponent* BB = OwnerComp.GetBlackboardComponent();
if (!BB) return; if (!BB) return;

View File

@ -80,12 +80,24 @@ void APS_AI_Behavior_AIController::OnPossess(APawn* InPawn)
} }
SetupBlackboard(); SetupBlackboard();
if (bAutoStartBehavior)
{
StartBehavior(); StartBehavior();
}
else
{
// Stay in Scripted state — no BT, no services, controlled externally
SetBehaviorState(EPS_AI_Behavior_State::Scripted);
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] bAutoStartBehavior=false — entering Scripted state."),
*GetName());
}
TryBindConversationAgent(); TryBindConversationAgent();
TryBindGazeComponent(); TryBindGazeComponent();
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] Possessed Pawn '%s' — BT started, TeamId=%d."), UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] Possessed Pawn '%s' — TeamId=%d, AutoStart=%d."),
*GetName(), *InPawn->GetName(), TeamId); *GetName(), *InPawn->GetName(), TeamId, (int32)bAutoStartBehavior);
} }
void APS_AI_Behavior_AIController::OnUnPossess() void APS_AI_Behavior_AIController::OnUnPossess()
@ -227,24 +239,84 @@ void APS_AI_Behavior_AIController::SetupBlackboard()
void APS_AI_Behavior_AIController::StartBehavior() void APS_AI_Behavior_AIController::StartBehavior()
{ {
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] StartBehavior called. Pawn=%s, HasBB=%d"),
*GetName(),
GetPawn() ? *GetPawn()->GetName() : TEXT("null"),
Blackboard != nullptr);
// Check if a BT was previously loaded and stopped (StopBehavior case)
UBehaviorTreeComponent* BTComp = Cast<UBehaviorTreeComponent>(GetBrainComponent());
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] BrainComponent=%s, BTComp=%s, CurrentTree=%s, IsRunning=%d"),
*GetName(),
GetBrainComponent() ? *GetBrainComponent()->GetName() : TEXT("null"),
BTComp ? *BTComp->GetName() : TEXT("null"),
(BTComp && BTComp->GetCurrentTree()) ? *BTComp->GetCurrentTree()->GetName() : TEXT("null"),
BTComp ? (int32)BTComp->IsRunning() : -1);
if (BTComp && BTComp->GetCurrentTree() && !BTComp->IsRunning())
{
BTComp->RestartLogic();
if (GetBehaviorState() == EPS_AI_Behavior_State::Scripted)
{
SetBehaviorState(EPS_AI_Behavior_State::Idle);
}
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] StartBehavior — BT restarted (RestartLogic). IsRunning=%d"),
*GetName(), (int32)BTComp->IsRunning());
return;
}
// First-time start: load and run the BT
UBehaviorTree* BTToRun = BehaviorTreeAsset; UBehaviorTree* BTToRun = BehaviorTreeAsset;
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] BehaviorTreeAsset=%s"),
*GetName(), BTToRun ? *BTToRun->GetName() : TEXT("null"));
// Fallback: get from personality profile // Fallback: get from personality profile
if (!BTToRun && PersonalityComp && PersonalityComp->Profile) if (!BTToRun && PersonalityComp && PersonalityComp->Profile)
{ {
BTToRun = PersonalityComp->Profile->DefaultBehaviorTree.LoadSynchronous(); BTToRun = PersonalityComp->Profile->DefaultBehaviorTree.LoadSynchronous();
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] Loaded from Profile: %s"),
*GetName(), BTToRun ? *BTToRun->GetName() : TEXT("null"));
} }
if (BTToRun) if (!BTToRun)
{
RunBehaviorTree(BTToRun);
}
else
{ {
UE_LOG(LogPS_AI_Behavior, Warning, UE_LOG(LogPS_AI_Behavior, Warning,
TEXT("[%s] No BehaviorTree assigned and none in PersonalityProfile — NPC will be inert."), TEXT("[%s] StartBehavior FAILED — No BehaviorTree assigned and none in PersonalityProfile."),
*GetName()); *GetName());
return;
} }
const bool bSuccess = RunBehaviorTree(BTToRun);
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] RunBehaviorTree('%s') returned %d"),
*GetName(), *BTToRun->GetName(), (int32)bSuccess);
// Check post-run state
BTComp = Cast<UBehaviorTreeComponent>(GetBrainComponent());
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] Post-run: BTComp=%s, IsRunning=%d, BB=%s"),
*GetName(),
BTComp ? *BTComp->GetName() : TEXT("null"),
BTComp ? (int32)BTComp->IsRunning() : -1,
Blackboard ? TEXT("valid") : TEXT("null"));
if (bSuccess && GetBehaviorState() == EPS_AI_Behavior_State::Scripted)
{
SetBehaviorState(EPS_AI_Behavior_State::Idle);
}
}
void APS_AI_Behavior_AIController::StopBehavior()
{
if (UBrainComponent* Brain = GetBrainComponent())
{
Brain->StopLogic(TEXT("Scripted"));
}
StopMovement();
SetBehaviorState(EPS_AI_Behavior_State::Scripted);
UE_LOG(LogPS_AI_Behavior, Log, TEXT("[%s] StopBehavior — BT stopped, entering Scripted state."), *GetName());
} }
void APS_AI_Behavior_AIController::SetBehaviorState(EPS_AI_Behavior_State NewState) void APS_AI_Behavior_AIController::SetBehaviorState(EPS_AI_Behavior_State NewState)
@ -274,6 +346,12 @@ void APS_AI_Behavior_AIController::SetBehaviorState(EPS_AI_Behavior_State NewSta
} }
} }
// ─── Scripted: stop movement, NPC stays alive ──────────────
if (NewState == EPS_AI_Behavior_State::Scripted)
{
StopMovement();
}
// ─── Dead: shut down all AI systems ───────────────────────── // ─── Dead: shut down all AI systems ─────────────────────────
if (NewState == EPS_AI_Behavior_State::Dead) if (NewState == EPS_AI_Behavior_State::Dead)
{ {

View File

@ -72,6 +72,11 @@ EPS_AI_Behavior_State UPS_AI_Behavior_PersonalityComponent::EvaluateReaction() c
return EPS_AI_Behavior_State::Dead; return EPS_AI_Behavior_State::Dead;
} }
if (CurrentState == EPS_AI_Behavior_State::Scripted)
{
return EPS_AI_Behavior_State::Scripted;
}
const float Courage = GetTrait(EPS_AI_Behavior_TraitAxis::Courage); const float Courage = GetTrait(EPS_AI_Behavior_TraitAxis::Courage);
const float Aggressivity = GetTrait(EPS_AI_Behavior_TraitAxis::Aggressivity); const float Aggressivity = GetTrait(EPS_AI_Behavior_TraitAxis::Aggressivity);
const float Caution = GetTrait(EPS_AI_Behavior_TraitAxis::Caution); const float Caution = GetTrait(EPS_AI_Behavior_TraitAxis::Caution);

View File

@ -59,6 +59,10 @@ public:
// ─── Configuration ────────────────────────────────────────────────── // ─── Configuration ──────────────────────────────────────────────────
/** If false, the Behavior Tree does NOT start on possess. Call StartBehavior() manually. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Behavior")
bool bAutoStartBehavior = true;
/** Behavior Tree to run. If null, uses the Profile's DefaultBehaviorTree. */ /** Behavior Tree to run. If null, uses the Profile's DefaultBehaviorTree. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Behavior") UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Behavior")
TObjectPtr<UBehaviorTree> BehaviorTreeAsset; TObjectPtr<UBehaviorTree> BehaviorTreeAsset;
@ -107,6 +111,14 @@ public:
UFUNCTION(BlueprintCallable, Category = "PS AI Behavior|Blackboard") UFUNCTION(BlueprintCallable, Category = "PS AI Behavior|Blackboard")
EPS_AI_Behavior_State GetBehaviorState() const; EPS_AI_Behavior_State GetBehaviorState() const;
/** Start (or restart) the Behavior Tree. If state is Scripted, transitions to Idle. */
UFUNCTION(BlueprintCallable, Category = "PS AI Behavior")
void StartBehavior();
/** Stop the Behavior Tree and enter Scripted state. NPC stays alive and perceptible. */
UFUNCTION(BlueprintCallable, Category = "PS AI Behavior")
void StopBehavior();
protected: protected:
virtual void OnPossess(APawn* InPawn) override; virtual void OnPossess(APawn* InPawn) override;
virtual void OnUnPossess() override; virtual void OnUnPossess() override;
@ -126,9 +138,6 @@ private:
/** Initialize Blackboard with required keys. */ /** Initialize Blackboard with required keys. */
void SetupBlackboard(); void SetupBlackboard();
/** Start the Behavior Tree (from asset or profile). */
void StartBehavior();
/** /**
* Attempt to bind to PS_AI_ConvAgent_ElevenLabsComponent if present on the Pawn. * Attempt to bind to PS_AI_ConvAgent_ElevenLabsComponent if present on the Pawn.
* Uses UObject reflection no compile-time dependency on PS_AI_ConvAgent. * Uses UObject reflection no compile-time dependency on PS_AI_ConvAgent.

View File

@ -43,6 +43,7 @@ enum class EPS_AI_Behavior_State : uint8
Fleeing UMETA(DisplayName = "Fleeing"), Fleeing UMETA(DisplayName = "Fleeing"),
TakingCover UMETA(DisplayName = "Taking Cover"), TakingCover UMETA(DisplayName = "Taking Cover"),
Dead UMETA(DisplayName = "Dead"), Dead UMETA(DisplayName = "Dead"),
Scripted UMETA(DisplayName = "Scripted"),
}; };
/** /**

View File

@ -5,7 +5,7 @@
#include "CoreMinimal.h" #include "CoreMinimal.h"
#include "Engine/DataAsset.h" #include "Engine/DataAsset.h"
#include "PS_AI_ConvAgent_Definitions.h" #include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h" #include "PS_AI_ConvAgent_Tool_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.generated.h" #include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.generated.h"
/** /**
@ -206,36 +206,18 @@ public:
meta = (ToolTip = "Start generating a response before confirming end-of-speech.\nReduces latency but may cause occasional false starts.\nDisable if the agent interrupts the user too often.")) meta = (ToolTip = "Start generating a response before confirming end-of-speech.\nReduces latency but may cause occasional false starts.\nDisable if the agent interrupts the user too often."))
bool bSpeculativeTurn = false; bool bSpeculativeTurn = false;
// ── Emotion Tool ───────────────────────────────────────────────────────── // ── Tools ────────────────────────────────────────────────────────────────
/** Include the built-in "set_emotion" client tool in the agent configuration. /** Standalone tools assigned to this agent.
* Allows the LLM to set facial expressions (Joy, Sadness, Anger, etc.) * Each tool is a global resource on ElevenLabs (managed via its own Data Asset).
* that drive the FacialExpression component in real-time. */ * The tool's PromptFragment is appended to CharacterPrompt on Create/Update.
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Emotion Tool", * The tool's ToolID is sent in prompt.tool_ids to the ElevenLabs API.
meta = (ToolTip = "Include the set_emotion client tool.\nAllows the LLM to drive facial expressions.")) *
bool bIncludeEmotionTool = true; * Drag Tool Data Assets here (e.g. set_emotion, perform_action, custom tools).
* Create tools in Content Browser: Miscellaneous > Data Asset > PS AI ConvAgent Tool. */
/** System prompt fragment appended to CharacterPrompt when bIncludeEmotionTool is true. UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Tools",
* Pre-filled with the standard emotion instruction. Editable for customization. */ meta = (ToolTip = "Standalone tools assigned to this agent.\nDrag Tool Data Assets here.\nEach tool's PromptFragment is appended to the system prompt."))
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Emotion Tool", TArray<TObjectPtr<UPS_AI_ConvAgent_Tool_ElevenLabs>> Tools;
meta = (MultiLine = "true", EditCondition = "bIncludeEmotionTool",
ToolTip = "Prompt instructions for the emotion tool.\nAppended to CharacterPrompt when creating/updating the agent."))
FString EmotionToolPromptFragment = TEXT(
"## Facial Expressions\n"
"You have a set_emotion tool to control your facial expression. "
"Use it whenever the emotional context changes:\n"
"- Call set_emotion with emotion=\"joy\" when happy, laughing, or excited\n"
"- Call set_emotion with emotion=\"sadness\" when empathetic or discussing sad topics\n"
"- Call set_emotion with emotion=\"anger\" when frustrated or discussing injustice\n"
"- Call set_emotion with emotion=\"surprise\" when reacting to unexpected information\n"
"- Call set_emotion with emotion=\"fear\" when discussing scary or worrying topics\n"
"- Call set_emotion with emotion=\"disgust\" when reacting to unpleasant things\n"
"- Call set_emotion with emotion=\"neutral\" to return to a calm expression\n\n"
"Use intensity to match the strength of the emotion:\n"
"- \"low\" for subtle hints (slight smile, mild concern)\n"
"- \"medium\" for normal expression (default)\n"
"- \"high\" for strong reactions (big laugh, deep sadness, shock)\n\n"
"Always return to neutral when the emotional moment passes.");
// ── Expressive Mode (V3 Conversational) ───────────────────────────────── // ── Expressive Mode (V3 Conversational) ─────────────────────────────────
@ -266,24 +248,6 @@ public:
"Example: \"That's great to hear! [laughs] I'm glad we could sort that out for you.\"\n\n" "Example: \"That's great to hear! [laughs] I'm glad we could sort that out for you.\"\n\n"
"Use these tags naturally and sparingly to enhance expressiveness without overusing them."); "Use these tags naturally and sparingly to enhance expressiveness without overusing them.");
// ── Action Tool ─────────────────────────────────────────────────────────
/** Include a configurable "perform_action" client tool in the agent configuration.
* Allows the LLM to trigger physical actions defined in the referenced ActionSet.
* Actions are handled via the OnAgentActionRequested event in Blueprint. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Action Tool",
meta = (ToolTip = "Include the perform_action client tool.\nRequires an ActionSet Data Asset with at least one action."))
bool bIncludeActionTool = false;
/** Reference to a reusable Action Set Data Asset.
* Create one in Content Browser (Miscellaneous > Data Asset > PS AI ConvAgent Action Set),
* define your actions there, then drag it here.
* The same ActionSet can be shared by multiple agents. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Action Tool",
meta = (EditCondition = "bIncludeActionTool",
ToolTip = "Drag an ActionSet Data Asset here.\nDefines which actions the agent can trigger."))
TObjectPtr<UPS_AI_ConvAgent_ActionSet_ElevenLabs> ActionSet;
// ── Dynamic Variables ──────────────────────────────────────────────────── // ── Dynamic Variables ────────────────────────────────────────────────────
/** Key-value pairs sent as dynamic_variables at conversation start. /** Key-value pairs sent as dynamic_variables at conversation start.

View File

@ -168,7 +168,7 @@ struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_ClientToolCall_ElevenLabs
}; };
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
// Agent action definition (used by ActionSet Data Asset) // Agent action definition (used by ActionSet Data Asset — deprecated, kept for compat)
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
/** Defines a single action that an agent can perform during conversation. */ /** Defines a single action that an agent can perform during conversation. */
USTRUCT(BlueprintType) USTRUCT(BlueprintType)
@ -184,3 +184,57 @@ struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_AgentAction_ElevenLabs
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs") UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs")
FString Description; FString Description;
}; };
// ─────────────────────────────────────────────────────────────────────────────
// Tool parameter data type (maps to JSON Schema "type")
// ─────────────────────────────────────────────────────────────────────────────
UENUM(BlueprintType)
enum class EPS_AI_ConvAgent_ToolParamType : uint8
{
String UMETA(DisplayName = "String"),
Integer UMETA(DisplayName = "Integer"),
Number UMETA(DisplayName = "Number"),
Boolean UMETA(DisplayName = "Boolean"),
};
// ─────────────────────────────────────────────────────────────────────────────
// Tool parameter definition (used by Tool Data Asset)
// ─────────────────────────────────────────────────────────────────────────────
/** Defines a single parameter for a standalone ElevenLabs tool.
* Maps directly to the ElevenLabs tool parameter UI:
* Identifier, Data type, Required, Description (LLM prompt), Enum Values. */
USTRUCT(BlueprintType)
struct PS_AI_CONVAGENT_API FPS_AI_ConvAgent_ToolParameter_ElevenLabs
{
GENERATED_BODY()
/** Parameter identifier (snake_case). Must match the name expected by the client code.
* Examples: "emotion", "intensity", "action", "target_name" */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs",
meta = (ToolTip = "Parameter name (snake_case).\nThe key sent in the tool call JSON."))
FString Name;
/** Data type for this parameter. Maps to JSON Schema "type". */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs",
meta = (ToolTip = "Data type: String, Integer, Number, or Boolean."))
EPS_AI_ConvAgent_ToolParamType Type = EPS_AI_ConvAgent_ToolParamType::String;
/** Whether this parameter is required in the tool call. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs",
meta = (ToolTip = "If true, the LLM must provide this parameter."))
bool bRequired = true;
/** Description passed to the LLM explaining what this parameter is
* and how to extract or determine its value from the conversation. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs",
meta = (MultiLine = "true",
ToolTip = "LLM prompt describing this parameter.\nGuides the LLM on what value to provide."))
FString Description;
/** Optional predefined values the LLM can choose from.
* If empty, the LLM can use any value of the specified type.
* Examples: ["joy","sadness","anger"] for emotion, ["low","medium","high"] for intensity. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "PS AI ConvAgent|ElevenLabs",
meta = (ToolTip = "Predefined enum values (optional).\nIf set, the LLM must choose from these."))
TArray<FString> EnumValues;
};

View File

@ -0,0 +1,90 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Engine/DataAsset.h"
#include "PS_AI_ConvAgent_Definitions.h"
#include "PS_AI_ConvAgent_Tool_ElevenLabs.generated.h"
/**
* Standalone tool definition for ElevenLabs Conversational AI agents.
*
* Each tool is a global resource on ElevenLabs, created once and shared
* by any number of agents. Assign tools to agents via the Tools array
* in the Agent Config data asset.
*
* Create via Content Browser > Miscellaneous > Data Asset >
* PS AI ConvAgent Tool (ElevenLabs).
*
* Examples:
* - set_emotion: drives facial expressions (emotion + intensity params)
* - perform_action: triggers physical actions (flee, draw_weapon, etc.)
* - Custom tools: any client-side tool the LLM can invoke
*/
UCLASS(BlueprintType, Blueprintable,
DisplayName = "PS AI ConvAgent Tool (ElevenLabs)")
class PS_AI_CONVAGENT_API UPS_AI_ConvAgent_Tool_ElevenLabs : public UPrimaryDataAsset
{
GENERATED_BODY()
public:
// ── Identity ────────────────────────────────────────────────────────────
/** Tool name (snake_case). Must match the name used in the LLM tool call.
* Examples: "set_emotion", "perform_action", "open_door" */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Tool",
meta = (ToolTip = "Tool name sent to/from the LLM (snake_case)."))
FString ToolName;
/** ElevenLabs tool ID. Auto-populated when you click Create Tool.
* Leave empty to create a new tool; set to update an existing one. */
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Tool",
meta = (ToolTip = "ElevenLabs tool ID.\nAuto-populated on Create. Used for Update/Fetch."))
FString ToolID;
/** Short description of what this tool does and when the LLM should use it.
* Sent as the tool's 'description' field to ElevenLabs. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Tool",
meta = (MultiLine = "true",
ToolTip = "Tool description for the LLM.\nExplains when and how to invoke this tool."))
FString ToolDescription;
// ── Prompt Fragment ─────────────────────────────────────────────────────
/** System prompt fragment appended to the agent's CharacterPrompt
* when this tool is assigned. Provides detailed instructions
* for the LLM on how to use this tool in context. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Prompt",
meta = (MultiLine = "true",
ToolTip = "Prompt instructions appended to CharacterPrompt.\nDescribes when and how the agent should use this tool."))
FString PromptFragment;
// ── Parameters ──────────────────────────────────────────────────────────
/** Tool parameters sent to the LLM.
* Each parameter defines an input the LLM must provide when calling this tool.
* Maps to the ElevenLabs tool parameter editor (Identifier, Data type,
* Required, Description, Enum Values).
*
* Examples:
* - set_emotion: param "emotion" (enum: joy,sadness,...) + param "intensity" (enum: low,medium,high)
* - perform_action: param "action" (enum: flee,draw_weapon,...) */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Parameters",
meta = (TitleProperty = "Name",
ToolTip = "Tool parameters.\nEach one is an input the LLM provides when calling this tool."))
TArray<FPS_AI_ConvAgent_ToolParameter_ElevenLabs> Parameters;
// ── Sync metadata ───────────────────────────────────────────────────────
/** ISO 8601 timestamp of the last successful sync with ElevenLabs. */
UPROPERTY(VisibleAnywhere, BlueprintReadOnly, Category = "Tool",
meta = (ToolTip = "Last sync timestamp (UTC)."))
FString LastSyncTimestamp;
// UPrimaryDataAsset interface
virtual FPrimaryAssetId GetPrimaryAssetId() const override
{
return FPrimaryAssetId(TEXT("Tool_ElevenLabs"), GetFName());
}
};

View File

@ -6,6 +6,8 @@
#include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h" #include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h" #include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSetCustomization_ElevenLabs.h" #include "PS_AI_ConvAgent_ActionSetCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_Tool_ElevenLabs.h"
#include "PS_AI_ConvAgent_ToolCustomization_ElevenLabs.h"
/** /**
* Editor module for PS_AI_ConvAgent plugin. * Editor module for PS_AI_ConvAgent plugin.
@ -28,6 +30,11 @@ public:
UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass()->GetFName(), UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass()->GetFName(),
FOnGetDetailCustomizationInstance::CreateStatic( FOnGetDetailCustomizationInstance::CreateStatic(
&FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::MakeInstance)); &FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::MakeInstance));
PropertyModule.RegisterCustomClassLayout(
UPS_AI_ConvAgent_Tool_ElevenLabs::StaticClass()->GetFName(),
FOnGetDetailCustomizationInstance::CreateStatic(
&FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::MakeInstance));
} }
virtual void ShutdownModule() override virtual void ShutdownModule() override
@ -42,6 +49,9 @@ public:
PropertyModule.UnregisterCustomClassLayout( PropertyModule.UnregisterCustomClassLayout(
UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass()->GetFName()); UPS_AI_ConvAgent_ActionSet_ElevenLabs::StaticClass()->GetFName());
PropertyModule.UnregisterCustomClassLayout(
UPS_AI_ConvAgent_Tool_ElevenLabs::StaticClass()->GetFName());
} }
} }
}; };

View File

@ -2,24 +2,13 @@
#include "PS_AI_ConvAgent_ActionSetCustomization_ElevenLabs.h" #include "PS_AI_ConvAgent_ActionSetCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h" #include "PS_AI_ConvAgent_ActionSet_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent.h" #include "PS_AI_ConvAgent.h"
#include "DetailLayoutBuilder.h" #include "DetailLayoutBuilder.h"
#include "DetailCategoryBuilder.h" #include "DetailCategoryBuilder.h"
#include "DetailWidgetRow.h" #include "DetailWidgetRow.h"
#include "Widgets/Input/SButton.h"
#include "Widgets/Text/STextBlock.h" #include "Widgets/Text/STextBlock.h"
#include "AssetRegistry/AssetRegistryModule.h"
#include "HttpModule.h"
#include "Interfaces/IHttpRequest.h"
#include "Interfaces/IHttpResponse.h"
#include "Dom/JsonObject.h"
#include "Serialization/JsonWriter.h"
#include "Serialization/JsonSerializer.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ActionSetEditor, Log, All); DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ActionSetEditor, Log, All);
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
@ -38,200 +27,37 @@ void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::CustomizeDetails(
{ {
DetailBuilder.GetObjectsBeingCustomized(SelectedObjects); DetailBuilder.GetObjectsBeingCustomized(SelectedObjects);
// ── Agent Sync category ───────────────────────────────────────────────── // ── Deprecation notice ──────────────────────────────────────────────────
IDetailCategoryBuilder& SyncCat = DetailBuilder.EditCategory( IDetailCategoryBuilder& DeprecatedCat = DetailBuilder.EditCategory(
TEXT("Agent Sync"), TEXT("Deprecated"),
FText::FromString(TEXT("Agent Sync")), FText::FromString(TEXT("Deprecated")),
ECategoryPriority::Important); ECategoryPriority::Important);
SyncCat.AddCustomRow(FText::FromString(TEXT("Update All Agents"))) DeprecatedCat.AddCustomRow(FText::FromString(TEXT("Deprecation Notice")))
.WholeRowContent() .WholeRowContent()
[ [
SNew(SVerticalBox) SNew(SVerticalBox)
+ SVerticalBox::Slot() + SVerticalBox::Slot()
.AutoHeight() .AutoHeight()
.Padding(0, 4) .Padding(0, 4)
[
SNew(SButton)
.Text(FText::FromString(TEXT("Update All Agents")))
.ToolTipText(FText::FromString(
TEXT("PATCH all AgentConfig assets that reference this ActionSet.")))
.OnClicked_Lambda([this]()
{
OnUpdateAllAgentsClicked();
return FReply::Handled();
})
]
+ SVerticalBox::Slot()
.AutoHeight()
.Padding(0, 2)
[ [
SAssignNew(StatusTextBlock, STextBlock) SAssignNew(StatusTextBlock, STextBlock)
.Text(FText::GetEmpty()) .Text(FText::FromString(
TEXT("ActionSet is deprecated. Use Tool Data Assets instead.\n"
"Create a Tool (Miscellaneous > Data Asset > PS AI ConvAgent Tool),\n"
"add your actions there, then assign it to agents via the Tools array.")))
.Font(IDetailLayoutBuilder::GetDetailFont()) .Font(IDetailLayoutBuilder::GetDetailFont())
.ColorAndOpacity(FSlateColor(FLinearColor(0.3f, 0.7f, 1.0f))) .ColorAndOpacity(FSlateColor(FLinearColor(1.0f, 0.7f, 0.2f))) // orange/warning
] ]
]; ];
} }
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
// Update All Agents // OnUpdateAllAgentsClicked — Deprecated, no-op
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::OnUpdateAllAgentsClicked() void FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs::OnUpdateAllAgentsClicked()
{ {
const UPS_AI_ConvAgent_ActionSet_ElevenLabs* ActionSetAsset = GetEditedAsset(); SetStatusError(TEXT("ActionSet is deprecated. Use Tool Data Assets instead."));
if (!ActionSetAsset)
{
SetStatusError(TEXT("No ActionSet asset selected."));
return;
}
const FString APIKey = GetAPIKey();
if (APIKey.IsEmpty())
{
SetStatusError(TEXT("API Key not set in Project Settings > PS AI ConvAgent - ElevenLabs."));
return;
}
// ── Scan all AgentConfig assets via Asset Registry ───────────────────────
FAssetRegistryModule& ARModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>("AssetRegistry");
IAssetRegistry& AssetRegistry = ARModule.Get();
TArray<FAssetData> AllAgentConfigs;
AssetRegistry.GetAssetsByClass(
UPS_AI_ConvAgent_AgentConfig_ElevenLabs::StaticClass()->GetClassPathName(),
AllAgentConfigs, true);
// ── Filter: bIncludeActionTool && ActionSet == this asset && AgentID not empty ─
TArray<UPS_AI_ConvAgent_AgentConfig_ElevenLabs*> MatchingConfigs;
for (const FAssetData& AD : AllAgentConfigs)
{
UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Config =
Cast<UPS_AI_ConvAgent_AgentConfig_ElevenLabs>(AD.GetAsset());
if (!Config) continue;
if (!Config->bIncludeActionTool) continue;
if (Config->ActionSet != ActionSetAsset) continue;
if (Config->AgentID.IsEmpty()) continue;
MatchingConfigs.Add(Config);
}
if (MatchingConfigs.Num() == 0)
{
SetStatusError(TEXT("No AgentConfig assets reference this ActionSet (with AgentID set)."));
return;
}
SetStatusText(FString::Printf(TEXT("Updating %d agent(s)..."), MatchingConfigs.Num()));
// ── Shared counter for async completion tracking ─────────────────────────
struct FBatchState
{
int32 Total = 0;
FThreadSafeCounter Succeeded;
FThreadSafeCounter Failed;
TArray<FString> Errors;
FCriticalSection ErrorLock;
};
TSharedPtr<FBatchState> State = MakeShareable(new FBatchState());
State->Total = MatchingConfigs.Num();
TWeakPtr<FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_ActionSetCustomization_ElevenLabs>(this->AsShared());
for (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Config : MatchingConfigs)
{
TSharedPtr<FJsonObject> Payload =
FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildAgentPayloadForAsset(Config);
FString PayloadStr;
TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr);
FJsonSerializer::Serialize(Payload.ToSharedRef(), Writer);
const FString URL = FString::Printf(
TEXT("https://api.elevenlabs.io/v1/convai/agents/%s"), *Config->AgentID);
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(URL);
Request->SetVerb(TEXT("PATCH"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Content-Type"), TEXT("application/json"));
Request->SetContentAsString(PayloadStr);
// Capture Config as weak pointer for safety.
TWeakObjectPtr<UPS_AI_ConvAgent_AgentConfig_ElevenLabs> WeakConfig(Config);
FString AgentName = Config->AgentName.IsEmpty() ? Config->AgentID : Config->AgentName;
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf, State, WeakConfig, AgentName]
(FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{
bool bSuccess = false;
FString ErrorMsg;
if (!bConnected || !Resp.IsValid())
{
ErrorMsg = FString::Printf(TEXT("%s: connection failed"), *AgentName);
}
else if (Resp->GetResponseCode() != 200)
{
ErrorMsg = FString::Printf(TEXT("%s: HTTP %d"),
*AgentName, Resp->GetResponseCode());
}
else
{
bSuccess = true;
// Update LastSyncTimestamp on the asset.
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Cfg = WeakConfig.Get())
{
Cfg->Modify();
Cfg->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
}
}
if (bSuccess)
{
State->Succeeded.Increment();
}
else
{
State->Failed.Increment();
FScopeLock Lock(&State->ErrorLock);
State->Errors.Add(ErrorMsg);
}
// Check if all requests are done.
const int32 Done = State->Succeeded.GetValue() + State->Failed.GetValue();
if (Done >= State->Total)
{
auto Pinned = WeakSelf.Pin();
if (!Pinned.IsValid()) return;
if (State->Failed.GetValue() == 0)
{
Pinned->SetStatusSuccess(FString::Printf(
TEXT("Updated %d/%d agents successfully."),
State->Succeeded.GetValue(), State->Total));
}
else
{
FString AllErrors;
{
FScopeLock Lock(&State->ErrorLock);
AllErrors = FString::Join(State->Errors, TEXT(", "));
}
Pinned->SetStatusError(FString::Printf(
TEXT("Updated %d/%d agents. Failures: %s"),
State->Succeeded.GetValue(), State->Total, *AllErrors));
}
}
});
Request->ProcessRequest();
UE_LOG(LogPS_AI_ActionSetEditor, Log,
TEXT(" → PATCH agent '%s' (ID: %s)"), *AgentName, *Config->AgentID);
}
} }
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────

View File

@ -2,8 +2,11 @@
#include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h" #include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h" #include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h"
#include "PS_AI_ConvAgent_Tool_ElevenLabs.h"
#include "PS_AI_ConvAgent.h" #include "PS_AI_ConvAgent.h"
#include "AssetRegistry/AssetRegistryModule.h"
#include "DetailLayoutBuilder.h" #include "DetailLayoutBuilder.h"
#include "DetailCategoryBuilder.h" #include "DetailCategoryBuilder.h"
#include "DetailWidgetRow.h" #include "DetailWidgetRow.h"
@ -540,108 +543,59 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnVoiceSelected(
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchModelsClicked() void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchModelsClicked()
{ {
const FString APIKey = GetAPIKey(); // Conversational AI agents support a fixed set of TTS models.
if (APIKey.IsEmpty()) // These are NOT returned by the general /v1/models endpoint
// (which lists speech-synthesis models, not agent TTS models).
// Source: ElevenLabs ConvAI API — PATCH /v1/convai/agents/{id} model_id enum.
struct FConvAIModel
{ {
SetStatusError(TEXT("API Key not set in Project Settings > PS AI ConvAgent - ElevenLabs.")); const TCHAR* ID;
return; const TCHAR* DisplayName;
};
static const FConvAIModel ConvAIModels[] = {
{ TEXT("eleven_v3_conversational"), TEXT("V3 Conversational (eleven_v3_conversational)") },
{ TEXT("eleven_turbo_v2_5"), TEXT("Turbo v2.5 (eleven_turbo_v2_5)") },
{ TEXT("eleven_flash_v2_5"), TEXT("Flash v2.5 (eleven_flash_v2_5)") },
{ TEXT("eleven_turbo_v2"), TEXT("Turbo v2 (eleven_turbo_v2)") },
{ TEXT("eleven_flash_v2"), TEXT("Flash v2 (eleven_flash_v2)") },
{ TEXT("eleven_multilingual_v2"), TEXT("Multilingual v2 (eleven_multilingual_v2)") },
};
ModelDisplayNames.Reset();
ModelIDs.Reset();
for (const auto& M : ConvAIModels)
{
ModelDisplayNames.Add(MakeShareable(new FString(M.DisplayName)));
ModelIDs.Add(M.ID);
} }
SetStatusText(TEXT("Fetching models...")); // Pre-select the currently set TTSModelID.
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = GetEditedAsset())
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(TEXT("https://api.elevenlabs.io/v1/models"));
Request->SetVerb(TEXT("GET"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Accept"), TEXT("application/json"));
TWeakPtr<FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs>(this->AsShared());
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{ {
auto Pinned = WeakSelf.Pin(); int32 Idx = ModelIDs.IndexOfByKey(Asset->TTSModelID);
if (!Pinned.IsValid()) return;
if (!bConnected || !Resp.IsValid()) // Inject the asset's current model if it's not in our known list.
{
Pinned->SetStatusError(TEXT("Could not reach ElevenLabs API."));
return;
}
if (Resp->GetResponseCode() != 200)
{
Pinned->SetStatusError(ParseAPIError(
Resp->GetResponseCode(), Resp->GetContentAsString()));
return;
}
// Response is a JSON array of model objects.
TArray<TSharedPtr<FJsonValue>> Models;
if (!FJsonSerializer::Deserialize(
TJsonReaderFactory<>::Create(Resp->GetContentAsString()), Models))
{
Pinned->SetStatusError(TEXT("Failed to parse models JSON."));
return;
}
Pinned->ModelDisplayNames.Reset();
Pinned->ModelIDs.Reset();
for (const auto& ModelVal : Models)
{
const TSharedPtr<FJsonObject>* ModelObj = nullptr;
if (!ModelVal->TryGetObject(ModelObj)) continue;
FString Name, ID;
(*ModelObj)->TryGetStringField(TEXT("name"), Name);
(*ModelObj)->TryGetStringField(TEXT("model_id"), ID);
// Only show TTS-capable models.
bool bCanTTS = false;
(*ModelObj)->TryGetBoolField(TEXT("can_do_text_to_speech"), bCanTTS);
if (!bCanTTS) continue;
if (!ID.IsEmpty())
{
FString DisplayStr = FString::Printf(TEXT("%s (%s)"), *Name, *ID);
Pinned->ModelDisplayNames.Add(MakeShareable(new FString(DisplayStr)));
Pinned->ModelIDs.Add(ID);
}
}
// Pre-select the currently set TTSModelID if it exists in the list.
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = Pinned->GetEditedAsset())
{
int32 Idx = Pinned->ModelIDs.IndexOfByKey(Asset->TTSModelID);
// Agent-only models (e.g. eleven_v3_conversational) may not appear
// in the general /v1/models list. Inject the asset's current model
// so the combo always reflects the actual value.
if (Idx == INDEX_NONE && !Asset->TTSModelID.IsEmpty()) if (Idx == INDEX_NONE && !Asset->TTSModelID.IsEmpty())
{ {
FString DisplayStr = FString::Printf(TEXT("%s"), *Asset->TTSModelID); ModelDisplayNames.Add(MakeShareable(new FString(Asset->TTSModelID)));
Pinned->ModelDisplayNames.Add(MakeShareable(new FString(DisplayStr))); ModelIDs.Add(Asset->TTSModelID);
Pinned->ModelIDs.Add(Asset->TTSModelID); Idx = ModelIDs.Num() - 1;
Idx = Pinned->ModelIDs.Num() - 1;
} }
if (Idx != INDEX_NONE && Pinned->ModelComboBox.IsValid()) if (Idx != INDEX_NONE && ModelComboBox.IsValid())
{ {
Pinned->ModelComboBox->SetSelectedItem(Pinned->ModelDisplayNames[Idx]); ModelComboBox->SetSelectedItem(ModelDisplayNames[Idx]);
} }
} }
if (Pinned->ModelComboBox.IsValid()) if (ModelComboBox.IsValid())
{ {
Pinned->ModelComboBox->RefreshOptions(); ModelComboBox->RefreshOptions();
} }
Pinned->SetStatusSuccess(FString::Printf(TEXT("Fetched %d TTS models."), Pinned->ModelIDs.Num())); SetStatusSuccess(FString::Printf(TEXT("Loaded %d ConvAI TTS models."), ModelIDs.Num()));
});
Request->ProcessRequest();
} }
void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnModelSelected( void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnModelSelected(
@ -656,6 +610,16 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnModelSelected(
{ {
Asset->Modify(); Asset->Modify();
Asset->TTSModelID = ModelIDs[Idx]; Asset->TTSModelID = ModelIDs[Idx];
// Auto-sync Expressive Mode: V3 Conversational requires it, other models don't support it.
const bool bIsV3 = Asset->TTSModelID == TEXT("eleven_v3_conversational");
if (Asset->bExpressiveMode != bIsV3)
{
Asset->bExpressiveMode = bIsV3;
UE_LOG(LogPS_AI_AgentConfigEditor, Log,
TEXT("TTS model changed to '%s' — %s Expressive Mode."),
*Asset->TTSModelID, bIsV3 ? TEXT("enabling") : TEXT("disabling"));
}
} }
} }
@ -937,7 +901,7 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnLanguageSelected(
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnCreateAgentClicked() void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnCreateAgentClicked()
{ {
const UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = GetEditedAsset(); UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = GetEditedAsset();
if (!Asset) if (!Asset)
{ {
SetStatusError(TEXT("No asset selected.")); SetStatusError(TEXT("No asset selected."));
@ -959,6 +923,18 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnCreateAgentClicked(
SetStatusText(TEXT("Creating agent...")); SetStatusText(TEXT("Creating agent..."));
// Validate that all assigned tools have been created on ElevenLabs
for (const auto& Tool : Asset->Tools)
{
if (Tool && Tool->ToolID.IsEmpty())
{
SetStatusError(FString::Printf(
TEXT("Tool '%s' has no ToolID. Create it first via the Tool editor."),
*Tool->ToolName));
return;
}
}
TSharedPtr<FJsonObject> Payload = BuildAgentPayload(); TSharedPtr<FJsonObject> Payload = BuildAgentPayload();
FString PayloadStr; FString PayloadStr;
TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr); TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr);
@ -977,18 +953,18 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnCreateAgentClicked(
Request->OnProcessRequestComplete().BindLambda( Request->OnProcessRequestComplete().BindLambda(
[WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected) [WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{ {
auto Pinned = WeakSelf.Pin(); auto P = WeakSelf.Pin();
if (!Pinned.IsValid()) return; if (!P.IsValid()) return;
if (!bConnected || !Resp.IsValid()) if (!bConnected || !Resp.IsValid())
{ {
Pinned->SetStatusError(TEXT("Could not reach ElevenLabs API.")); P->SetStatusError(TEXT("Could not reach ElevenLabs API."));
return; return;
} }
if (Resp->GetResponseCode() != 200 && Resp->GetResponseCode() != 201) if (Resp->GetResponseCode() != 200 && Resp->GetResponseCode() != 201)
{ {
Pinned->SetStatusError(ParseAPIError( P->SetStatusError(ParseAPIError(
Resp->GetResponseCode(), Resp->GetContentAsString())); Resp->GetResponseCode(), Resp->GetContentAsString()));
return; return;
} }
@ -997,26 +973,26 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnCreateAgentClicked(
if (!FJsonSerializer::Deserialize( if (!FJsonSerializer::Deserialize(
TJsonReaderFactory<>::Create(Resp->GetContentAsString()), Root) || !Root.IsValid()) TJsonReaderFactory<>::Create(Resp->GetContentAsString()), Root) || !Root.IsValid())
{ {
Pinned->SetStatusError(TEXT("Failed to parse response.")); P->SetStatusError(TEXT("Failed to parse response."));
return; return;
} }
FString NewAgentID; FString NewAgentID;
if (!Root->TryGetStringField(TEXT("agent_id"), NewAgentID)) if (!Root->TryGetStringField(TEXT("agent_id"), NewAgentID))
{ {
Pinned->SetStatusError(TEXT("No 'agent_id' in response.")); P->SetStatusError(TEXT("No 'agent_id' in response."));
return; return;
} }
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = Pinned->GetEditedAsset()) if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* A = P->GetEditedAsset())
{ {
Asset->Modify(); A->Modify();
Asset->AgentID = NewAgentID; A->AgentID = NewAgentID;
Asset->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601(); A->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
Asset->PostEditChange(); A->PostEditChange();
} }
Pinned->SetStatusSuccess(FString::Printf(TEXT("Agent created: %s"), *NewAgentID)); P->SetStatusSuccess(FString::Printf(TEXT("Agent created: %s"), *NewAgentID));
}); });
Request->ProcessRequest(); Request->ProcessRequest();
@ -1027,7 +1003,7 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnCreateAgentClicked(
// ───────────────────────────────────────────────────────────────────────────── // ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnUpdateAgentClicked() void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnUpdateAgentClicked()
{ {
const UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = GetEditedAsset(); UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = GetEditedAsset();
if (!Asset) if (!Asset)
{ {
SetStatusError(TEXT("No asset selected.")); SetStatusError(TEXT("No asset selected."));
@ -1047,6 +1023,18 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnUpdateAgentClicked(
return; return;
} }
// Validate that all assigned tools have been created on ElevenLabs
for (const auto& Tool : Asset->Tools)
{
if (Tool && Tool->ToolID.IsEmpty())
{
SetStatusError(FString::Printf(
TEXT("Tool '%s' has no ToolID. Create it first via the Tool editor."),
*Tool->ToolName));
return;
}
}
SetStatusText(TEXT("Updating agent...")); SetStatusText(TEXT("Updating agent..."));
TSharedPtr<FJsonObject> Payload = BuildAgentPayload(); TSharedPtr<FJsonObject> Payload = BuildAgentPayload();
@ -1070,29 +1058,29 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnUpdateAgentClicked(
Request->OnProcessRequestComplete().BindLambda( Request->OnProcessRequestComplete().BindLambda(
[WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected) [WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{ {
auto Pinned = WeakSelf.Pin(); auto P = WeakSelf.Pin();
if (!Pinned.IsValid()) return; if (!P.IsValid()) return;
if (!bConnected || !Resp.IsValid()) if (!bConnected || !Resp.IsValid())
{ {
Pinned->SetStatusError(TEXT("Could not reach ElevenLabs API.")); P->SetStatusError(TEXT("Could not reach ElevenLabs API."));
return; return;
} }
if (Resp->GetResponseCode() != 200) if (Resp->GetResponseCode() != 200)
{ {
Pinned->SetStatusError(ParseAPIError( P->SetStatusError(ParseAPIError(
Resp->GetResponseCode(), Resp->GetContentAsString())); Resp->GetResponseCode(), Resp->GetContentAsString()));
return; return;
} }
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset = Pinned->GetEditedAsset()) if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* A = P->GetEditedAsset())
{ {
Asset->Modify(); A->Modify();
Asset->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601(); A->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
} }
Pinned->SetStatusSuccess(TEXT("Agent updated successfully.")); P->SetStatusSuccess(TEXT("Agent updated successfully."));
}); });
Request->ProcessRequest(); Request->ProcessRequest();
@ -1287,46 +1275,16 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
} }
} }
// 3. Emotion tool fragment // 3. Tool PromptFragments — strip each assigned tool's fragment
if (!Asset->EmotionToolPromptFragment.IsEmpty()) for (const auto& Tool : Asset->Tools)
{ {
int32 Idx = Prompt.Find(Asset->EmotionToolPromptFragment, if (!Tool || Tool->PromptFragment.IsEmpty()) continue;
int32 Idx = Prompt.Find(Tool->PromptFragment,
ESearchCase::CaseSensitive); ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE) if (Idx != INDEX_NONE)
{ {
Prompt.LeftInline(Idx); Prompt.LeftInline(Idx);
} break; // First match truncates everything after
else
{
const FString EmotionMarker = TEXT("\n\n## Facial Expressions");
int32 MarkerIdx = Prompt.Find(EmotionMarker,
ESearchCase::CaseSensitive);
if (MarkerIdx != INDEX_NONE)
{
Prompt.LeftInline(MarkerIdx);
}
}
}
// 4. Action tool fragment (from ActionSet)
if (Asset->ActionSet && !Asset->ActionSet->ActionToolPromptFragment.IsEmpty())
{
int32 Idx = Prompt.Find(Asset->ActionSet->ActionToolPromptFragment,
ESearchCase::CaseSensitive);
if (Idx != INDEX_NONE)
{
Prompt.LeftInline(Idx);
}
else
{
// Fallback: strip by marker
const FString ActionMarker = TEXT("\n\n## Physical Actions");
int32 MarkerIdx = Prompt.Find(ActionMarker,
ESearchCase::CaseSensitive);
if (MarkerIdx != INDEX_NONE)
{
Prompt.LeftInline(MarkerIdx);
}
} }
} }
@ -1388,7 +1346,7 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
Asset->MaxTurns = MaxTurns; Asset->MaxTurns = MaxTurns;
} }
// expressive_mode (V3 Conversational) // expressive_mode — legacy location (agent level), kept for backwards compat
bool bExpressive = false; bool bExpressive = false;
if ((*AgentObj)->TryGetBoolField(TEXT("expressive_mode"), bExpressive)) if ((*AgentObj)->TryGetBoolField(TEXT("expressive_mode"), bExpressive))
{ {
@ -1411,11 +1369,15 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
{ {
Asset->TTSModelID = ModelID; Asset->TTSModelID = ModelID;
// Auto-detect Expressive Mode from V3 Conversational model // Auto-sync Expressive Mode from model selection
if (ModelID == TEXT("eleven_v3_conversational")) Asset->bExpressiveMode = (ModelID == TEXT("eleven_v3_conversational"));
{
Asset->bExpressiveMode = true;
} }
// expressive_mode from tts block (authoritative, overrides agent-level)
bool bTTSExpressive = false;
if ((*TTSObj)->TryGetBoolField(TEXT("expressive_mode"), bTTSExpressive))
{
Asset->bExpressiveMode = bTTSExpressive;
} }
double Stability = 0.5; double Stability = 0.5;
@ -1452,6 +1414,74 @@ void FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::OnFetchAgentClicked()
} }
} }
// Resolve tool_ids to Tool DataAssets via Asset Registry lookup
{
const TArray<TSharedPtr<FJsonValue>>* ToolIDsArray = nullptr;
if (!Root->TryGetArrayField(TEXT("use_tool_ids"), ToolIDsArray))
{
Root->TryGetArrayField(TEXT("tool_ids"), ToolIDsArray);
}
if (ToolIDsArray && ToolIDsArray->Num() > 0)
{
// Gather all tool IDs from the agent
TArray<FString> FetchedToolIDs;
for (const auto& Val : *ToolIDsArray)
{
FString TID;
if (Val->TryGetString(TID) && !TID.IsEmpty())
{
FetchedToolIDs.Add(TID);
}
}
// Search all Tool DataAssets in the project by ToolID
FAssetRegistryModule& ARModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(
"AssetRegistry");
IAssetRegistry& AssetRegistry = ARModule.Get();
TArray<FAssetData> AllToolAssets;
AssetRegistry.GetAssetsByClass(
UPS_AI_ConvAgent_Tool_ElevenLabs::StaticClass()->GetClassPathName(),
AllToolAssets, true);
TArray<UPS_AI_ConvAgent_Tool_ElevenLabs*> ResolvedTools;
for (const FString& TID : FetchedToolIDs)
{
bool bFound = false;
for (const FAssetData& AD : AllToolAssets)
{
UPS_AI_ConvAgent_Tool_ElevenLabs* ToolAsset =
Cast<UPS_AI_ConvAgent_Tool_ElevenLabs>(AD.GetAsset());
if (ToolAsset && ToolAsset->ToolID == TID)
{
ResolvedTools.Add(ToolAsset);
bFound = true;
UE_LOG(LogPS_AI_AgentConfigEditor, Log,
TEXT(" -> Resolved tool_id '%s' -> '%s'"),
*TID, *ToolAsset->ToolName);
break;
}
}
if (!bFound)
{
UE_LOG(LogPS_AI_AgentConfigEditor, Warning,
TEXT(" -> tool_id '%s' not found in any Tool DataAsset"), *TID);
}
}
// Update agent's Tools array with resolved assets
if (ResolvedTools.Num() > 0)
{
Asset->Tools.Empty();
for (auto* T : ResolvedTools)
{
Asset->Tools.Add(T);
}
}
}
}
Asset->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601(); Asset->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
// Refresh Language combo (static list, instant) // Refresh Language combo (static list, instant)
@ -1615,14 +1645,14 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
UE_LOG(LogPS_AI_AgentConfigEditor, Log, UE_LOG(LogPS_AI_AgentConfigEditor, Log,
TEXT("BuildAgentPayload: CharacterPrompt=%d chars, bMultilingual=%d, bAutoLangInstr=%d, Language='%s', " TEXT("BuildAgentPayload: CharacterPrompt=%d chars, bMultilingual=%d, bAutoLangInstr=%d, Language='%s', "
"LangFragment=%d chars, MultiFragment=%d chars, bEmotionTool=%d, bExpressiveMode=%d"), "LangFragment=%d chars, MultiFragment=%d chars, Tools=%d, bExpressiveMode=%d"),
Asset->CharacterPrompt.Len(), Asset->CharacterPrompt.Len(),
Asset->bMultilingual, Asset->bMultilingual,
Asset->bAutoLanguageInstruction, Asset->bAutoLanguageInstruction,
*Asset->Language, *Asset->Language,
Asset->LanguagePromptFragment.Len(), Asset->LanguagePromptFragment.Len(),
Asset->MultilingualPromptFragment.Len(), Asset->MultilingualPromptFragment.Len(),
Asset->bIncludeEmotionTool, Asset->Tools.Num(),
Asset->bExpressiveMode); Asset->bExpressiveMode);
// Language handling: multilingual mode vs fixed-language mode. // Language handling: multilingual mode vs fixed-language mode.
@ -1653,22 +1683,16 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" → Appended LanguagePromptFragment for '%s'"), *DisplayLang); UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" → Appended LanguagePromptFragment for '%s'"), *DisplayLang);
} }
// Append emotion tool instructions. // Append each assigned tool's PromptFragment.
if (Asset->bIncludeEmotionTool && !Asset->EmotionToolPromptFragment.IsEmpty()) for (const auto& Tool : Asset->Tools)
{
if (Tool && !Tool->PromptFragment.IsEmpty())
{ {
FullPrompt += TEXT("\n\n"); FullPrompt += TEXT("\n\n");
FullPrompt += Asset->EmotionToolPromptFragment; FullPrompt += Tool->PromptFragment;
UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" → Appended EmotionToolPromptFragment")); UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" -> Appended PromptFragment from tool '%s'"),
*Tool->ToolName);
} }
// Append action tool instructions from ActionSet.
if (Asset->bIncludeActionTool && Asset->ActionSet
&& Asset->ActionSet->Actions.Num() > 0
&& !Asset->ActionSet->ActionToolPromptFragment.IsEmpty())
{
FullPrompt += TEXT("\n\n");
FullPrompt += Asset->ActionSet->ActionToolPromptFragment;
UE_LOG(LogPS_AI_AgentConfigEditor, Log, TEXT(" → Appended ActionToolPromptFragment from ActionSet"));
} }
// Append expressive mode instructions (V3 Conversational audio tags). // Append expressive mode instructions (V3 Conversational audio tags).
@ -1689,26 +1713,19 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
PromptObj->SetStringField(TEXT("llm"), Asset->LLMModel); PromptObj->SetStringField(TEXT("llm"), Asset->LLMModel);
} }
// Build tools array: emotion tool + action tool (API path: conversation_config.agent.prompt.tools) // Tools are standalone resources managed via /v1/convai/tools.
TArray<TSharedPtr<FJsonValue>> Tools; // Reference them by ID in prompt.tool_ids.
if (Asset->bIncludeEmotionTool) // Each tool's ToolID is stored on its own Tool DataAsset.
TArray<TSharedPtr<FJsonValue>> ToolIDs;
for (const auto& Tool : Asset->Tools)
{ {
TSharedPtr<FJsonObject> EmotionTool = BuildEmotionToolDefinition(); if (Tool && !Tool->ToolID.IsEmpty())
Tools.Add(MakeShareable(new FJsonValueObject(EmotionTool)));
}
if (Asset->bIncludeActionTool && Asset->ActionSet
&& Asset->ActionSet->Actions.Num() > 0)
{ {
TSharedPtr<FJsonObject> ActionTool = BuildActionToolDefinition(Asset); ToolIDs.Add(MakeShareable(new FJsonValueString(Tool->ToolID)));
if (ActionTool)
{
Tools.Add(MakeShareable(new FJsonValueObject(ActionTool)));
} }
} }
if (Tools.Num() > 0) // Always set the array (empty = clear all tools from agent)
{ PromptObj->SetArrayField(TEXT("tool_ids"), ToolIDs);
PromptObj->SetArrayField(TEXT("tools"), Tools);
}
// agent // agent
TSharedPtr<FJsonObject> AgentObj = MakeShareable(new FJsonObject()); TSharedPtr<FJsonObject> AgentObj = MakeShareable(new FJsonObject());
@ -1725,11 +1742,6 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
{ {
AgentObj->SetNumberField(TEXT("max_tokens"), Asset->MaxTurns); AgentObj->SetNumberField(TEXT("max_tokens"), Asset->MaxTurns);
} }
if (Asset->bExpressiveMode)
{
AgentObj->SetBoolField(TEXT("expressive_mode"), true);
}
// tts // tts
TSharedPtr<FJsonObject> TTSObj = MakeShareable(new FJsonObject()); TSharedPtr<FJsonObject> TTSObj = MakeShareable(new FJsonObject());
if (!Asset->VoiceID.IsEmpty()) if (!Asset->VoiceID.IsEmpty())
@ -1737,24 +1749,17 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
TTSObj->SetStringField(TEXT("voice_id"), Asset->VoiceID); TTSObj->SetStringField(TEXT("voice_id"), Asset->VoiceID);
} }
// Expressive mode lives in the TTS block per the ElevenLabs API.
// The model dropdown auto-syncs bExpressiveMode (V3 ↔ expressive),
// so no forced override is needed here.
TTSObj->SetBoolField(TEXT("expressive_mode"), Asset->bExpressiveMode);
// Resolve TTS model. // Resolve TTS model.
// Multilingual and non-English agents require a multilingual-capable model: // Multilingual and non-English agents require a multilingual-capable model:
// eleven_multilingual_v2, eleven_turbo_v2_5, eleven_flash_v2_5 // eleven_multilingual_v2, eleven_turbo_v2_5, eleven_flash_v2_5
// Monolingual models (e.g. eleven_monolingual_v1) only support English. // Monolingual models (e.g. eleven_monolingual_v1) only support English.
FString ResolvedModelID = Asset->TTSModelID; FString ResolvedModelID = Asset->TTSModelID;
// Expressive mode requires V3 Conversational — override if needed.
if (Asset->bExpressiveMode)
{
if (ResolvedModelID != TEXT("eleven_v3_conversational"))
{
UE_LOG(LogPS_AI_AgentConfigEditor, Warning,
TEXT("Expressive mode: overriding TTS model '%s' → eleven_v3_conversational (required for audio tags)."),
*ResolvedModelID);
ResolvedModelID = TEXT("eleven_v3_conversational");
}
}
auto IsMultilingualModel = [](const FString& ModelID) -> bool auto IsMultilingualModel = [](const FString& ModelID) -> bool
{ {
return ModelID.Contains(TEXT("multilingual")) return ModelID.Contains(TEXT("multilingual"))
@ -1825,108 +1830,3 @@ TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::Bu
return Root; return Root;
} }
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildEmotionToolDefinition()
{
// Build the set_emotion client tool definition.
// Parameters: emotion (enum), intensity (enum).
// emotion parameter
TSharedPtr<FJsonObject> EmotionParam = MakeShareable(new FJsonObject());
EmotionParam->SetStringField(TEXT("type"), TEXT("string"));
EmotionParam->SetStringField(TEXT("description"), TEXT("The emotion to display."));
TArray<TSharedPtr<FJsonValue>> EmotionEnum;
for (const FString& E : {TEXT("joy"), TEXT("sadness"), TEXT("anger"), TEXT("surprise"),
TEXT("fear"), TEXT("disgust"), TEXT("neutral")})
{
EmotionEnum.Add(MakeShareable(new FJsonValueString(E)));
}
EmotionParam->SetArrayField(TEXT("enum"), EmotionEnum);
// intensity parameter
TSharedPtr<FJsonObject> IntensityParam = MakeShareable(new FJsonObject());
IntensityParam->SetStringField(TEXT("type"), TEXT("string"));
IntensityParam->SetStringField(TEXT("description"), TEXT("The intensity of the emotion."));
TArray<TSharedPtr<FJsonValue>> IntensityEnum;
for (const FString& I : {TEXT("low"), TEXT("medium"), TEXT("high")})
{
IntensityEnum.Add(MakeShareable(new FJsonValueString(I)));
}
IntensityParam->SetArrayField(TEXT("enum"), IntensityEnum);
// properties
TSharedPtr<FJsonObject> Properties = MakeShareable(new FJsonObject());
Properties->SetObjectField(TEXT("emotion"), EmotionParam);
Properties->SetObjectField(TEXT("intensity"), IntensityParam);
// required
TArray<TSharedPtr<FJsonValue>> Required;
Required.Add(MakeShareable(new FJsonValueString(TEXT("emotion"))));
Required.Add(MakeShareable(new FJsonValueString(TEXT("intensity"))));
// parameters
TSharedPtr<FJsonObject> Parameters = MakeShareable(new FJsonObject());
Parameters->SetStringField(TEXT("type"), TEXT("object"));
Parameters->SetObjectField(TEXT("properties"), Properties);
Parameters->SetArrayField(TEXT("required"), Required);
// Tool definition
TSharedPtr<FJsonObject> Tool = MakeShareable(new FJsonObject());
Tool->SetStringField(TEXT("type"), TEXT("client"));
Tool->SetStringField(TEXT("name"), TEXT("set_emotion"));
Tool->SetStringField(TEXT("description"),
TEXT("Set the character's facial expression emotion and intensity."));
Tool->SetObjectField(TEXT("parameters"), Parameters);
return Tool;
}
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildActionToolDefinition(
const UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset)
{
if (!Asset || !Asset->ActionSet || Asset->ActionSet->Actions.Num() == 0)
return nullptr;
// Build description with action list from the referenced ActionSet.
FString ParamDesc = TEXT("The action to perform. Available actions:");
TArray<TSharedPtr<FJsonValue>> ActionEnum;
for (const auto& Action : Asset->ActionSet->Actions)
{
if (!Action.Name.IsEmpty())
{
ActionEnum.Add(MakeShareable(new FJsonValueString(Action.Name)));
if (!Action.Description.IsEmpty())
{
ParamDesc += FString::Printf(TEXT("\n- %s: %s"),
*Action.Name, *Action.Description);
}
}
}
if (ActionEnum.Num() == 0) return nullptr;
// action parameter
TSharedPtr<FJsonObject> ActionParam = MakeShareable(new FJsonObject());
ActionParam->SetStringField(TEXT("type"), TEXT("string"));
ActionParam->SetStringField(TEXT("description"), ParamDesc);
ActionParam->SetArrayField(TEXT("enum"), ActionEnum);
// properties + required
TSharedPtr<FJsonObject> Properties = MakeShareable(new FJsonObject());
Properties->SetObjectField(TEXT("action"), ActionParam);
TArray<TSharedPtr<FJsonValue>> Required;
Required.Add(MakeShareable(new FJsonValueString(TEXT("action"))));
TSharedPtr<FJsonObject> Parameters = MakeShareable(new FJsonObject());
Parameters->SetStringField(TEXT("type"), TEXT("object"));
Parameters->SetObjectField(TEXT("properties"), Properties);
Parameters->SetArrayField(TEXT("required"), Required);
// Tool definition
TSharedPtr<FJsonObject> Tool = MakeShareable(new FJsonObject());
Tool->SetStringField(TEXT("type"), TEXT("client"));
Tool->SetStringField(TEXT("name"), TEXT("perform_action"));
Tool->SetStringField(TEXT("description"),
TEXT("Trigger a physical action or reaction for the character."));
Tool->SetObjectField(TEXT("parameters"), Parameters);
return Tool;
}

View File

@ -53,12 +53,9 @@ private:
public: public:
/** Build the full ElevenLabs API payload for any AgentConfig asset. /** Build the full ElevenLabs API payload for any AgentConfig asset.
* Static so it can be reused from other customizations (e.g. ActionSet batch update). */ * Static so it can be reused from other customizations (e.g. Tool batch agent update). */
static TSharedPtr<FJsonObject> BuildAgentPayloadForAsset( static TSharedPtr<FJsonObject> BuildAgentPayloadForAsset(
const class UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset); const class UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset);
static TSharedPtr<FJsonObject> BuildEmotionToolDefinition();
static TSharedPtr<FJsonObject> BuildActionToolDefinition(
const class UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Asset);
private: private:

View File

@ -0,0 +1,899 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_ToolCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent_Tool_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfig_ElevenLabs.h"
#include "PS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs.h"
#include "PS_AI_ConvAgent.h"
#include "DetailLayoutBuilder.h"
#include "DetailCategoryBuilder.h"
#include "DetailWidgetRow.h"
#include "Widgets/Input/SButton.h"
#include "Widgets/Text/STextBlock.h"
#include "AssetRegistry/AssetRegistryModule.h"
#include "HttpModule.h"
#include "Interfaces/IHttpRequest.h"
#include "Interfaces/IHttpResponse.h"
#include "Dom/JsonObject.h"
#include "Serialization/JsonReader.h"
#include "Serialization/JsonWriter.h"
#include "Serialization/JsonSerializer.h"
DEFINE_LOG_CATEGORY_STATIC(LogPS_AI_ToolEditor, Log, All);
// ─────────────────────────────────────────────────────────────────────────────
// Factory
// ─────────────────────────────────────────────────────────────────────────────
TSharedRef<IDetailCustomization> FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::MakeInstance()
{
return MakeShareable(new FPS_AI_ConvAgent_ToolCustomization_ElevenLabs());
}
// ─────────────────────────────────────────────────────────────────────────────
// CustomizeDetails
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::CustomizeDetails(
IDetailLayoutBuilder& DetailBuilder)
{
DetailBuilder.GetObjectsBeingCustomized(SelectedObjects);
// ── Tool Management category ────────────────────────────────────────────
IDetailCategoryBuilder& ToolCat = DetailBuilder.EditCategory(
TEXT("Tool Management"),
FText::FromString(TEXT("Tool Management")),
ECategoryPriority::Important);
ToolCat.AddCustomRow(FText::FromString(TEXT("Tool Actions")))
.WholeRowContent()
[
SNew(SVerticalBox)
// Create / Update / Fetch buttons on one row
+ SVerticalBox::Slot()
.AutoHeight()
.Padding(0, 4)
[
SNew(SHorizontalBox)
+ SHorizontalBox::Slot()
.AutoWidth()
.Padding(0, 0, 4, 0)
[
SNew(SButton)
.Text(FText::FromString(TEXT("Create Tool")))
.ToolTipText(FText::FromString(
TEXT("POST a new standalone tool to ElevenLabs.\nPopulates ToolID on success.")))
.OnClicked_Lambda([this]()
{
OnCreateToolClicked();
return FReply::Handled();
})
]
+ SHorizontalBox::Slot()
.AutoWidth()
.Padding(0, 0, 4, 0)
[
SNew(SButton)
.Text(FText::FromString(TEXT("Update Tool")))
.ToolTipText(FText::FromString(
TEXT("PATCH the existing tool on ElevenLabs with current settings.\nRequires a ToolID.")))
.OnClicked_Lambda([this]()
{
OnUpdateToolClicked();
return FReply::Handled();
})
]
+ SHorizontalBox::Slot()
.AutoWidth()
[
SNew(SButton)
.Text(FText::FromString(TEXT("Fetch Tool")))
.ToolTipText(FText::FromString(
TEXT("GET the tool definition from ElevenLabs and populate fields.\nRequires a ToolID.")))
.OnClicked_Lambda([this]()
{
OnFetchToolClicked();
return FReply::Handled();
})
]
]
// Update All Agents button
+ SVerticalBox::Slot()
.AutoHeight()
.Padding(0, 4)
[
SNew(SButton)
.Text(FText::FromString(TEXT("Update All Agents")))
.ToolTipText(FText::FromString(
TEXT("Re-PATCH all AgentConfig assets that reference this tool.\n"
"Use after changing the PromptFragment to update agent system prompts.")))
.OnClicked_Lambda([this]()
{
OnUpdateAllAgentsClicked();
return FReply::Handled();
})
]
// Status text
+ SVerticalBox::Slot()
.AutoHeight()
.Padding(0, 2)
[
SAssignNew(StatusTextBlock, STextBlock)
.Text(FText::GetEmpty())
.Font(IDetailLayoutBuilder::GetDetailFont())
.ColorAndOpacity(FSlateColor(FLinearColor(0.3f, 0.7f, 1.0f)))
]
];
}
// ─────────────────────────────────────────────────────────────────────────────
// BuildToolPayload — Generate the ElevenLabs tool definition JSON
// ─────────────────────────────────────────────────────────────────────────────
TSharedPtr<FJsonObject> FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::BuildToolPayload(
const UPS_AI_ConvAgent_Tool_ElevenLabs* Tool)
{
if (!Tool || Tool->ToolName.IsEmpty()) return nullptr;
// ElevenLabs tool API format:
// { "tool_config": { "type": "client", "name": "...", "parameters": { ... } } }
// BuildToolPayload returns the inner object (name, description, parameters).
// The caller adds "type": "client" and wraps in "tool_config".
TSharedPtr<FJsonObject> ToolObj = MakeShareable(new FJsonObject());
ToolObj->SetStringField(TEXT("name"), Tool->ToolName);
ToolObj->SetStringField(TEXT("description"),
Tool->ToolDescription.IsEmpty()
? FString::Printf(TEXT("Client tool: %s"), *Tool->ToolName)
: Tool->ToolDescription);
// ── Build parameters (JSON Schema format) ──────────────────────────────
// ElevenLabs uses standard JSON Schema for tool parameters:
// "emotion": { "type": "string", "description": "...", "enum": [...] }
if (Tool->Parameters.Num() > 0)
{
TSharedPtr<FJsonObject> Properties = MakeShareable(new FJsonObject());
TArray<TSharedPtr<FJsonValue>> Required;
for (const auto& Param : Tool->Parameters)
{
if (Param.Name.IsEmpty()) continue;
// Resolve type string
FString TypeStr;
switch (Param.Type)
{
case EPS_AI_ConvAgent_ToolParamType::String: TypeStr = TEXT("string"); break;
case EPS_AI_ConvAgent_ToolParamType::Integer: TypeStr = TEXT("integer"); break;
case EPS_AI_ConvAgent_ToolParamType::Number: TypeStr = TEXT("number"); break;
case EPS_AI_ConvAgent_ToolParamType::Boolean: TypeStr = TEXT("boolean"); break;
default: TypeStr = TEXT("string"); break;
}
// JSON Schema property: { "type": "string", "description": "...", "enum": [...] }
TSharedPtr<FJsonObject> ParamObj = MakeShareable(new FJsonObject());
ParamObj->SetStringField(TEXT("type"), TypeStr);
if (!Param.Description.IsEmpty())
{
ParamObj->SetStringField(TEXT("description"), Param.Description);
}
// Enum values (optional)
if (Param.EnumValues.Num() > 0)
{
TArray<TSharedPtr<FJsonValue>> EnumArr;
for (const FString& Val : Param.EnumValues)
{
if (!Val.IsEmpty())
{
EnumArr.Add(MakeShareable(new FJsonValueString(Val)));
}
}
if (EnumArr.Num() > 0)
{
ParamObj->SetArrayField(TEXT("enum"), EnumArr);
}
}
Properties->SetObjectField(Param.Name, ParamObj);
if (Param.bRequired)
{
Required.Add(MakeShareable(new FJsonValueString(Param.Name)));
}
}
TSharedPtr<FJsonObject> ParametersObj = MakeShareable(new FJsonObject());
ParametersObj->SetStringField(TEXT("type"), TEXT("object"));
ParametersObj->SetObjectField(TEXT("properties"), Properties);
if (Required.Num() > 0)
{
ParametersObj->SetArrayField(TEXT("required"), Required);
}
ToolObj->SetObjectField(TEXT("parameters"), ParametersObj);
}
return ToolObj;
}
// ─────────────────────────────────────────────────────────────────────────────
// Create Tool
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::OnCreateToolClicked()
{
UPS_AI_ConvAgent_Tool_ElevenLabs* Asset = GetEditedAsset();
if (!Asset)
{
SetStatusError(TEXT("No tool asset selected."));
return;
}
if (Asset->ToolName.IsEmpty())
{
SetStatusError(TEXT("ToolName is required."));
return;
}
if (!Asset->ToolID.IsEmpty())
{
SetStatusError(TEXT("Tool already has an ID. Use Update instead."));
return;
}
const FString APIKey = GetAPIKey();
if (APIKey.IsEmpty())
{
SetStatusError(TEXT("API Key not set in Project Settings."));
return;
}
TSharedPtr<FJsonObject> ToolPayload = BuildToolPayload(Asset);
if (!ToolPayload)
{
SetStatusError(TEXT("Failed to build tool payload."));
return;
}
SetStatusText(TEXT("Creating tool..."));
// Wrap as: { "tool_config": { "type": "client", ...payload... } }
// The "type" field is a discriminator inside tool_config (not a nested key).
ToolPayload->SetStringField(TEXT("type"), TEXT("client"));
TSharedPtr<FJsonObject> Wrapped = MakeShareable(new FJsonObject());
Wrapped->SetObjectField(TEXT("tool_config"), ToolPayload);
FString PayloadStr;
TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr);
FJsonSerializer::Serialize(Wrapped.ToSharedRef(), Writer);
UE_LOG(LogPS_AI_ToolEditor, Log, TEXT("Creating tool '%s': POST /v1/convai/tools"),
*Asset->ToolName);
UE_LOG(LogPS_AI_ToolEditor, Log, TEXT(" Payload: %s"), *PayloadStr);
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(TEXT("https://api.elevenlabs.io/v1/convai/tools"));
Request->SetVerb(TEXT("POST"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Content-Type"), TEXT("application/json"));
Request->SetContentAsString(PayloadStr);
TWeakPtr<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs>(this->AsShared());
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{
auto Pinned = WeakSelf.Pin();
if (!Pinned.IsValid()) return;
if (!bConnected || !Resp.IsValid())
{
Pinned->SetStatusError(TEXT("Could not reach ElevenLabs API."));
return;
}
const int32 Code = Resp->GetResponseCode();
if (Code != 200 && Code != 201)
{
Pinned->SetStatusError(ParseAPIError(Code, Resp->GetContentAsString()));
return;
}
// Parse response for tool_id
TSharedPtr<FJsonObject> Root;
if (!FJsonSerializer::Deserialize(
TJsonReaderFactory<>::Create(Resp->GetContentAsString()), Root) || !Root.IsValid())
{
Pinned->SetStatusError(TEXT("Failed to parse response."));
return;
}
FString NewToolID;
if (!Root->TryGetStringField(TEXT("tool_id"), NewToolID))
{
Root->TryGetStringField(TEXT("id"), NewToolID);
}
if (NewToolID.IsEmpty())
{
Pinned->SetStatusError(TEXT("No tool_id in response."));
return;
}
if (UPS_AI_ConvAgent_Tool_ElevenLabs* A = Pinned->GetEditedAsset())
{
A->Modify();
A->ToolID = NewToolID;
A->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
A->PostEditChange();
}
Pinned->SetStatusSuccess(FString::Printf(TEXT("Tool created: %s"), *NewToolID));
});
Request->ProcessRequest();
}
// ─────────────────────────────────────────────────────────────────────────────
// Update Tool
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::OnUpdateToolClicked()
{
UPS_AI_ConvAgent_Tool_ElevenLabs* Asset = GetEditedAsset();
if (!Asset)
{
SetStatusError(TEXT("No tool asset selected."));
return;
}
if (Asset->ToolID.IsEmpty())
{
SetStatusError(TEXT("No ToolID set. Use Create first."));
return;
}
const FString APIKey = GetAPIKey();
if (APIKey.IsEmpty())
{
SetStatusError(TEXT("API Key not set in Project Settings."));
return;
}
TSharedPtr<FJsonObject> ToolPayload = BuildToolPayload(Asset);
if (!ToolPayload)
{
SetStatusError(TEXT("Failed to build tool payload."));
return;
}
SetStatusText(TEXT("Updating tool..."));
// Add discriminator field and wrap: { "tool_config": { "type": "client", ... } }
ToolPayload->SetStringField(TEXT("type"), TEXT("client"));
TSharedPtr<FJsonObject> Wrapped = MakeShareable(new FJsonObject());
Wrapped->SetObjectField(TEXT("tool_config"), ToolPayload);
FString PayloadStr;
TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr);
FJsonSerializer::Serialize(Wrapped.ToSharedRef(), Writer);
const FString URL = FString::Printf(
TEXT("https://api.elevenlabs.io/v1/convai/tools/%s"), *Asset->ToolID);
UE_LOG(LogPS_AI_ToolEditor, Log, TEXT("Updating tool '%s': PATCH %s"),
*Asset->ToolName, *URL);
UE_LOG(LogPS_AI_ToolEditor, Log, TEXT(" Payload: %s"), *PayloadStr);
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(URL);
Request->SetVerb(TEXT("PATCH"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Content-Type"), TEXT("application/json"));
Request->SetContentAsString(PayloadStr);
TWeakPtr<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs>(this->AsShared());
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{
auto Pinned = WeakSelf.Pin();
if (!Pinned.IsValid()) return;
if (!bConnected || !Resp.IsValid())
{
Pinned->SetStatusError(TEXT("Could not reach ElevenLabs API."));
return;
}
if (Resp->GetResponseCode() != 200)
{
Pinned->SetStatusError(ParseAPIError(
Resp->GetResponseCode(), Resp->GetContentAsString()));
return;
}
if (UPS_AI_ConvAgent_Tool_ElevenLabs* A = Pinned->GetEditedAsset())
{
A->Modify();
A->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
}
Pinned->SetStatusSuccess(TEXT("Tool updated successfully."));
});
Request->ProcessRequest();
}
// ─────────────────────────────────────────────────────────────────────────────
// Fetch Tool
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::OnFetchToolClicked()
{
UPS_AI_ConvAgent_Tool_ElevenLabs* Asset = GetEditedAsset();
if (!Asset)
{
SetStatusError(TEXT("No tool asset selected."));
return;
}
if (Asset->ToolID.IsEmpty())
{
SetStatusError(TEXT("No ToolID set. Enter one first or use Create."));
return;
}
const FString APIKey = GetAPIKey();
if (APIKey.IsEmpty())
{
SetStatusError(TEXT("API Key not set in Project Settings."));
return;
}
SetStatusText(TEXT("Fetching tool..."));
const FString URL = FString::Printf(
TEXT("https://api.elevenlabs.io/v1/convai/tools/%s"), *Asset->ToolID);
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(URL);
Request->SetVerb(TEXT("GET"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Accept"), TEXT("application/json"));
TWeakPtr<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs>(this->AsShared());
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf](FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{
auto Pinned = WeakSelf.Pin();
if (!Pinned.IsValid()) return;
if (!bConnected || !Resp.IsValid())
{
Pinned->SetStatusError(TEXT("Could not reach ElevenLabs API."));
return;
}
if (Resp->GetResponseCode() != 200)
{
Pinned->SetStatusError(ParseAPIError(
Resp->GetResponseCode(), Resp->GetContentAsString()));
return;
}
TSharedPtr<FJsonObject> Root;
if (!FJsonSerializer::Deserialize(
TJsonReaderFactory<>::Create(Resp->GetContentAsString()), Root) || !Root.IsValid())
{
Pinned->SetStatusError(TEXT("Failed to parse response."));
return;
}
UPS_AI_ConvAgent_Tool_ElevenLabs* Asset = Pinned->GetEditedAsset();
if (!Asset) return;
Asset->Modify();
// The tool definition may be at root, inside "tool_config",
// or inside "tool_config.client" (ElevenLabs nested format).
TSharedPtr<FJsonObject> ToolDef = Root;
{
const TSharedPtr<FJsonObject>* ToolConfig = nullptr;
if (Root->TryGetObjectField(TEXT("tool_config"), ToolConfig))
{
const TSharedPtr<FJsonObject>* ClientObj = nullptr;
if ((*ToolConfig)->TryGetObjectField(TEXT("client"), ClientObj))
{
ToolDef = *ClientObj;
}
else
{
ToolDef = *ToolConfig;
}
}
}
// Populate from response
FString Name;
if (ToolDef->TryGetStringField(TEXT("name"), Name))
{
Asset->ToolName = Name;
}
FString Description;
if (ToolDef->TryGetStringField(TEXT("description"), Description))
{
Asset->ToolDescription = Description;
}
// Parse parameters from the tool definition
const TSharedPtr<FJsonObject>* Params = nullptr;
if (ToolDef->TryGetObjectField(TEXT("parameters"), Params))
{
const TSharedPtr<FJsonObject>* Props = nullptr;
if ((*Params)->TryGetObjectField(TEXT("properties"), Props))
{
// Get required array for bRequired flag
TSet<FString> RequiredSet;
const TArray<TSharedPtr<FJsonValue>>* RequiredArr = nullptr;
if ((*Params)->TryGetArrayField(TEXT("required"), RequiredArr))
{
for (const auto& Val : *RequiredArr)
{
FString ReqName;
if (Val->TryGetString(ReqName))
{
RequiredSet.Add(ReqName);
}
}
}
Asset->Parameters.Empty();
// Iterate all properties.
// ElevenLabs format: type is a KEY inside the property:
// "emotion": { "string": { "description": "...", "enum": [...] } }
// Also handle JSON Schema format as fallback:
// "emotion": { "type": "string", "description": "..." }
for (const auto& Pair : (*Props)->Values)
{
const TSharedPtr<FJsonObject>* PropObj = nullptr;
if (!Pair.Value->TryGetObject(PropObj)) continue;
FPS_AI_ConvAgent_ToolParameter_ElevenLabs Param;
Param.Name = Pair.Key;
Param.bRequired = RequiredSet.Contains(Pair.Key);
// Try ElevenLabs format: type-as-key
static const TArray<TPair<FString, EPS_AI_ConvAgent_ToolParamType>> TypeKeys = {
{TEXT("string"), EPS_AI_ConvAgent_ToolParamType::String},
{TEXT("integer"), EPS_AI_ConvAgent_ToolParamType::Integer},
{TEXT("number"), EPS_AI_ConvAgent_ToolParamType::Number},
{TEXT("boolean"), EPS_AI_ConvAgent_ToolParamType::Boolean},
};
bool bFoundTypeKey = false;
for (const auto& TK : TypeKeys)
{
const TSharedPtr<FJsonObject>* TypeInner = nullptr;
if ((*PropObj)->TryGetObjectField(TK.Key, TypeInner))
{
Param.Type = TK.Value;
(*TypeInner)->TryGetStringField(TEXT("description"), Param.Description);
const TArray<TSharedPtr<FJsonValue>>* EnumArr = nullptr;
if ((*TypeInner)->TryGetArrayField(TEXT("enum"), EnumArr))
{
for (const auto& EVal : *EnumArr)
{
FString EnumStr;
if (EVal->TryGetString(EnumStr))
{
Param.EnumValues.Add(EnumStr);
}
}
}
bFoundTypeKey = true;
break;
}
}
// Fallback: JSON Schema format
if (!bFoundTypeKey)
{
FString TypeStr;
if ((*PropObj)->TryGetStringField(TEXT("type"), TypeStr))
{
if (TypeStr == TEXT("integer"))
Param.Type = EPS_AI_ConvAgent_ToolParamType::Integer;
else if (TypeStr == TEXT("number"))
Param.Type = EPS_AI_ConvAgent_ToolParamType::Number;
else if (TypeStr == TEXT("boolean"))
Param.Type = EPS_AI_ConvAgent_ToolParamType::Boolean;
else
Param.Type = EPS_AI_ConvAgent_ToolParamType::String;
}
(*PropObj)->TryGetStringField(TEXT("description"), Param.Description);
const TArray<TSharedPtr<FJsonValue>>* EnumArr = nullptr;
if ((*PropObj)->TryGetArrayField(TEXT("enum"), EnumArr))
{
for (const auto& EVal : *EnumArr)
{
FString EnumStr;
if (EVal->TryGetString(EnumStr))
{
Param.EnumValues.Add(EnumStr);
}
}
}
}
Asset->Parameters.Add(Param);
}
}
}
Asset->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
Asset->PostEditChange();
Pinned->SetStatusSuccess(FString::Printf(TEXT("Tool fetched: %s (%s)"),
*Asset->ToolName, *Asset->ToolID));
});
Request->ProcessRequest();
}
// ─────────────────────────────────────────────────────────────────────────────
// Update All Agents — re-PATCH agents that reference this tool
// ─────────────────────────────────────────────────────────────────────────────
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::OnUpdateAllAgentsClicked()
{
const UPS_AI_ConvAgent_Tool_ElevenLabs* ToolAsset = GetEditedAsset();
if (!ToolAsset)
{
SetStatusError(TEXT("No tool asset selected."));
return;
}
const FString APIKey = GetAPIKey();
if (APIKey.IsEmpty())
{
SetStatusError(TEXT("API Key not set in Project Settings > PS AI ConvAgent - ElevenLabs."));
return;
}
// ── Scan all AgentConfig assets that reference this tool ─────────────────
FAssetRegistryModule& ARModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(
"AssetRegistry");
IAssetRegistry& AssetRegistry = ARModule.Get();
TArray<FAssetData> AllAgentConfigs;
AssetRegistry.GetAssetsByClass(
UPS_AI_ConvAgent_AgentConfig_ElevenLabs::StaticClass()->GetClassPathName(),
AllAgentConfigs, true);
TArray<UPS_AI_ConvAgent_AgentConfig_ElevenLabs*> MatchingConfigs;
for (const FAssetData& AD : AllAgentConfigs)
{
UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Config =
Cast<UPS_AI_ConvAgent_AgentConfig_ElevenLabs>(AD.GetAsset());
if (!Config) continue;
if (Config->AgentID.IsEmpty()) continue;
// Check if this agent's Tools array contains our tool
bool bReferencesTool = false;
for (const auto& T : Config->Tools)
{
if (T == ToolAsset)
{
bReferencesTool = true;
break;
}
}
if (bReferencesTool)
{
MatchingConfigs.Add(Config);
}
}
if (MatchingConfigs.Num() == 0)
{
SetStatusError(TEXT("No AgentConfig assets reference this tool (with AgentID set)."));
return;
}
SetStatusText(FString::Printf(TEXT("Updating %d agent(s)..."), MatchingConfigs.Num()));
// ── Shared counter for async completion tracking ─────────────────────────
struct FBatchState
{
int32 Total = 0;
FThreadSafeCounter Succeeded;
FThreadSafeCounter Failed;
TArray<FString> Errors;
FCriticalSection ErrorLock;
};
TSharedPtr<FBatchState> State = MakeShareable(new FBatchState());
State->Total = MatchingConfigs.Num();
TWeakPtr<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs> WeakSelf =
StaticCastSharedRef<FPS_AI_ConvAgent_ToolCustomization_ElevenLabs>(this->AsShared());
for (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Config : MatchingConfigs)
{
TWeakObjectPtr<UPS_AI_ConvAgent_AgentConfig_ElevenLabs> WeakConfig(Config);
FString AgentName = Config->AgentName.IsEmpty() ? Config->AgentID : Config->AgentName;
FString ConfigAgentID = Config->AgentID;
UE_LOG(LogPS_AI_ToolEditor, Log,
TEXT(" -> Updating agent '%s' (ID: %s)"), *AgentName, *ConfigAgentID);
// Build payload and PATCH agent
TSharedPtr<FJsonObject> Payload =
FPS_AI_ConvAgent_AgentConfigCustomization_ElevenLabs::BuildAgentPayloadForAsset(Config);
FString PayloadStr;
TSharedRef<TJsonWriter<>> Writer = TJsonWriterFactory<>::Create(&PayloadStr);
FJsonSerializer::Serialize(Payload.ToSharedRef(), Writer);
const FString URL = FString::Printf(
TEXT("https://api.elevenlabs.io/v1/convai/agents/%s"), *ConfigAgentID);
TSharedRef<IHttpRequest, ESPMode::ThreadSafe> Request = FHttpModule::Get().CreateRequest();
Request->SetURL(URL);
Request->SetVerb(TEXT("PATCH"));
Request->SetHeader(TEXT("xi-api-key"), APIKey);
Request->SetHeader(TEXT("Content-Type"), TEXT("application/json"));
Request->SetContentAsString(PayloadStr);
Request->OnProcessRequestComplete().BindLambda(
[WeakSelf, State, WeakConfig, AgentName]
(FHttpRequestPtr Req, FHttpResponsePtr Resp, bool bConnected)
{
bool bSuccess = false;
FString ErrorMsg;
if (!bConnected || !Resp.IsValid())
{
ErrorMsg = FString::Printf(TEXT("%s: connection failed"), *AgentName);
}
else if (Resp->GetResponseCode() != 200)
{
ErrorMsg = FString::Printf(TEXT("%s: HTTP %d"),
*AgentName, Resp->GetResponseCode());
}
else
{
bSuccess = true;
if (UPS_AI_ConvAgent_AgentConfig_ElevenLabs* Cfg = WeakConfig.Get())
{
Cfg->Modify();
Cfg->LastSyncTimestamp = FDateTime::UtcNow().ToIso8601();
}
}
if (bSuccess)
{
State->Succeeded.Increment();
}
else
{
State->Failed.Increment();
FScopeLock Lock(&State->ErrorLock);
State->Errors.Add(ErrorMsg);
}
const int32 Done = State->Succeeded.GetValue() + State->Failed.GetValue();
if (Done >= State->Total)
{
auto Pinned = WeakSelf.Pin();
if (!Pinned.IsValid()) return;
if (State->Failed.GetValue() == 0)
{
Pinned->SetStatusSuccess(FString::Printf(
TEXT("Updated %d/%d agents successfully."),
State->Succeeded.GetValue(), State->Total));
}
else
{
FString AllErrors;
{
FScopeLock Lock(&State->ErrorLock);
AllErrors = FString::Join(State->Errors, TEXT(", "));
}
Pinned->SetStatusError(FString::Printf(
TEXT("Updated %d/%d agents. Failures: %s"),
State->Succeeded.GetValue(), State->Total, *AllErrors));
}
}
});
Request->ProcessRequest();
}
}
// ─────────────────────────────────────────────────────────────────────────────
// Helpers
// ─────────────────────────────────────────────────────────────────────────────
FString FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::GetAPIKey() const
{
if (FPS_AI_ConvAgentModule::IsAvailable())
{
if (const UPS_AI_ConvAgent_Settings_ElevenLabs* Settings =
FPS_AI_ConvAgentModule::Get().GetSettings())
{
return Settings->API_Key;
}
}
return FString();
}
UPS_AI_ConvAgent_Tool_ElevenLabs* FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::GetEditedAsset() const
{
for (const TWeakObjectPtr<UObject>& Obj : SelectedObjects)
{
if (UPS_AI_ConvAgent_Tool_ElevenLabs* Asset =
Cast<UPS_AI_ConvAgent_Tool_ElevenLabs>(Obj.Get()))
{
return Asset;
}
}
return nullptr;
}
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::SetStatusText(const FString& Text)
{
UE_LOG(LogPS_AI_ToolEditor, Log, TEXT("%s"), *Text);
if (StatusTextBlock.IsValid())
{
StatusTextBlock->SetText(FText::FromString(Text));
StatusTextBlock->SetColorAndOpacity(FSlateColor(FLinearColor(0.3f, 0.7f, 1.0f)));
}
}
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::SetStatusError(const FString& Text)
{
UE_LOG(LogPS_AI_ToolEditor, Error, TEXT("%s"), *Text);
if (StatusTextBlock.IsValid())
{
StatusTextBlock->SetText(FText::FromString(Text));
StatusTextBlock->SetColorAndOpacity(FSlateColor(FLinearColor(1.0f, 0.25f, 0.25f)));
}
}
void FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::SetStatusSuccess(const FString& Text)
{
UE_LOG(LogPS_AI_ToolEditor, Log, TEXT("%s"), *Text);
if (StatusTextBlock.IsValid())
{
StatusTextBlock->SetText(FText::FromString(Text));
StatusTextBlock->SetColorAndOpacity(FSlateColor(FLinearColor(0.2f, 0.9f, 0.3f)));
}
}
FString FPS_AI_ConvAgent_ToolCustomization_ElevenLabs::ParseAPIError(
int32 HttpCode, const FString& ResponseBody)
{
TSharedPtr<FJsonObject> Root;
if (FJsonSerializer::Deserialize(TJsonReaderFactory<>::Create(ResponseBody), Root) && Root.IsValid())
{
const TSharedPtr<FJsonObject>* DetailObj = nullptr;
if (Root->TryGetObjectField(TEXT("detail"), DetailObj))
{
FString Message;
if ((*DetailObj)->TryGetStringField(TEXT("message"), Message))
{
return FString::Printf(TEXT("HTTP %d: %s"), HttpCode, *Message);
}
}
FString DetailStr;
if (Root->TryGetStringField(TEXT("detail"), DetailStr))
{
return FString::Printf(TEXT("HTTP %d: %s"), HttpCode, *DetailStr);
}
}
return FString::Printf(TEXT("HTTP %d: %s"), HttpCode, *ResponseBody.Left(200));
}

View File

@ -0,0 +1,60 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "IDetailCustomization.h"
class IDetailLayoutBuilder;
/**
* Detail Customization for UPS_AI_ConvAgent_Tool_ElevenLabs data assets.
*
* Provides:
* - Tool Management: "Create Tool" / "Update Tool" / "Fetch Tool" buttons
* Manages standalone tools on ElevenLabs via /v1/convai/tools API.
* - Agent Sync: "Update All Agents" button
* Re-PATCHes all AgentConfig assets that reference this tool
* (needed when PromptFragment changes, since it's baked into agent prompts).
*/
class FPS_AI_ConvAgent_ToolCustomization_ElevenLabs : public IDetailCustomization
{
public:
static TSharedRef<IDetailCustomization> MakeInstance();
virtual void CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) override;
private:
// ── Tool API ────────────────────────────────────────────────────────────
void OnCreateToolClicked();
void OnUpdateToolClicked();
void OnFetchToolClicked();
// ── Agent Sync ──────────────────────────────────────────────────────────
void OnUpdateAllAgentsClicked();
// ── Helpers ─────────────────────────────────────────────────────────────
FString GetAPIKey() const;
/** Retrieve the Tool data asset being edited (first selected object). */
class UPS_AI_ConvAgent_Tool_ElevenLabs* GetEditedAsset() const;
/** Build the ElevenLabs tool definition JSON from a Tool data asset.
* Returns the inner definition caller wraps in {"tool_config": ...} for POST/PATCH.
* Handles set_emotion (hardcoded params), action-type tools (Actions array),
* and generic tools (no parameters). */
static TSharedPtr<FJsonObject> BuildToolPayload(
const class UPS_AI_ConvAgent_Tool_ElevenLabs* Tool);
/** Display a status message. Color: red for errors, green for success, cyan for info. */
void SetStatusText(const FString& Text);
void SetStatusError(const FString& Text);
void SetStatusSuccess(const FString& Text);
/** Parse ElevenLabs API error JSON and return a human-readable message. */
static FString ParseAPIError(int32 HttpCode, const FString& ResponseBody);
// ── Cached state ────────────────────────────────────────────────────────
TArray<TWeakObjectPtr<UObject>> SelectedObjects;
TSharedPtr<class STextBlock> StatusTextBlock;
};

View File

@ -0,0 +1,29 @@
// Copyright ASTERION. All Rights Reserved.
#include "PS_AI_ConvAgent_ToolFactory_ElevenLabs.h"
#include "PS_AI_ConvAgent_Tool_ElevenLabs.h"
#include "AssetTypeCategories.h"
UPS_AI_ConvAgent_ToolFactory_ElevenLabs::UPS_AI_ConvAgent_ToolFactory_ElevenLabs()
{
SupportedClass = UPS_AI_ConvAgent_Tool_ElevenLabs::StaticClass();
bCreateNew = true;
bEditAfterNew = true;
}
UObject* UPS_AI_ConvAgent_ToolFactory_ElevenLabs::FactoryCreateNew(
UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags,
UObject* Context, FFeedbackContext* Warn)
{
return NewObject<UPS_AI_ConvAgent_Tool_ElevenLabs>(InParent, Class, Name, Flags);
}
FText UPS_AI_ConvAgent_ToolFactory_ElevenLabs::GetDisplayName() const
{
return FText::FromString(TEXT("PS AI ConvAgent Tool (ElevenLabs)"));
}
uint32 UPS_AI_ConvAgent_ToolFactory_ElevenLabs::GetMenuCategories() const
{
return EAssetTypeCategories::Misc;
}

View File

@ -0,0 +1,27 @@
// Copyright ASTERION. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Factories/Factory.h"
#include "PS_AI_ConvAgent_ToolFactory_ElevenLabs.generated.h"
/**
* Factory that lets users create PS_AI_ConvAgent_Tool_ElevenLabs assets
* directly from the Content Browser (right-click > Miscellaneous).
*/
UCLASS()
class UPS_AI_ConvAgent_ToolFactory_ElevenLabs : public UFactory
{
GENERATED_BODY()
public:
UPS_AI_ConvAgent_ToolFactory_ElevenLabs();
virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent,
FName Name, EObjectFlags Flags, UObject* Context,
FFeedbackContext* Warn) override;
virtual FText GetDisplayName() const override;
virtual uint32 GetMenuCategories() const override;
};

26
tools.txt Normal file
View File

@ -0,0 +1,26 @@
## Facial Expressions
You have a set_emotion tool to control your facial expression. Use it whenever the emotional context changes:
- Call set_emotion with emotion="joy" when happy, laughing, or excited
- Call set_emotion with emotion="sadness" when empathetic or discussing sad topics
- Call set_emotion with emotion="anger" when frustrated or discussing injustice
- Call set_emotion with emotion="surprise" when reacting to unexpected information
- Call set_emotion with emotion="fear" when discussing scary or worrying topics
- Call set_emotion with emotion="disgust" when reacting to unpleasant things
- Call set_emotion with emotion="neutral" to return to a calm expression
Use intensity to match the strength of the emotion:
- "low" for subtle hints (slight smile, mild concern)
- "medium" for normal expression (default)
- "high" for strong reactions (big laugh, deep sadness, shock)
Always return to neutral when the emotional moment passes.
## Réactions physiques (perform_action)
Tu as un couteau sur toi. Tu es prêt à t'en servir si tu te sens menacé.
- Le flic te demande tes papiers ou de t'identifier → perform_action draw_weapon immédiatement
- Le flic essaie de te fouiller ou te touche → perform_action draw_weapon immédiatement
- Le flic essaie de vous séparer toi et ton pote → perform_action flee
- Si le flic sort son arme en premier → perform_action raise_hands