fix: update model in ui when switched in the ADE (#297)

This commit is contained in:
Kian Jones
2025-12-18 15:50:19 -05:00
committed by GitHub
parent 1087ccc2c2
commit fc43e2d6ed
2 changed files with 80 additions and 1 deletions

View File

@@ -433,6 +433,10 @@ export default function App({
| null
>(null);
const [llmConfig, setLlmConfig] = useState<LlmConfig | null>(null);
const llmConfigRef = useRef(llmConfig);
useEffect(() => {
llmConfigRef.current = llmConfig;
}, [llmConfig]);
const [currentModelId, setCurrentModelId] = useState<string | null>(null);
const [agentName, setAgentName] = useState<string | null>(null);
const [agentDescription, setAgentDescription] = useState<string | null>(null);
@@ -831,12 +835,64 @@ export default function App({
agentIdRef.current,
currentInput,
);
// Define callback to sync agent state on first message chunk
// This ensures the UI shows the correct model as early as possible
const syncAgentState = async () => {
try {
const client = await getClient();
const agent = await client.agents.retrieve(agentIdRef.current);
// Check if the model has changed by comparing llm_config
const currentModel = llmConfigRef.current?.model;
const currentEndpoint = llmConfigRef.current?.model_endpoint_type;
const agentModel = agent.llm_config.model;
const agentEndpoint = agent.llm_config.model_endpoint_type;
if (
currentModel !== agentModel ||
currentEndpoint !== agentEndpoint
) {
// Model has changed - update local state
setLlmConfig(agent.llm_config);
// Derive model ID from llm_config for ModelSelector
// Try to find matching model by handle in models.json
const { getModelInfo } = await import("../agent/model");
const agentModelHandle =
agent.llm_config.model_endpoint_type && agent.llm_config.model
? `${agent.llm_config.model_endpoint_type}/${agent.llm_config.model}`
: agent.llm_config.model;
const modelInfo = getModelInfo(agentModelHandle || "");
if (modelInfo) {
setCurrentModelId(modelInfo.id);
} else {
// Model not in models.json (e.g., BYOK model) - use handle as ID
setCurrentModelId(agentModelHandle || null);
}
// Also update agent state if other fields changed
setAgentName(agent.name);
setAgentDescription(agent.description ?? null);
const lastRunCompletion = (
agent as { last_run_completion?: string }
).last_run_completion;
setAgentLastRunAt(lastRunCompletion ?? null);
}
} catch (error) {
// Silently fail - don't interrupt the conversation flow
console.error("Failed to sync agent state:", error);
}
};
const { stopReason, approval, approvals, apiDurationMs, lastRunId } =
await drainStreamWithResume(
stream,
buffersRef.current,
refreshDerivedThrottled,
abortControllerRef.current?.signal,
syncAgentState,
);
// Track API duration

View File

@@ -31,6 +31,7 @@ export async function drainStream(
buffers: ReturnType<typeof createBuffers>,
refresh: () => void,
abortSignal?: AbortSignal,
onFirstMessage?: () => void,
): Promise<DrainResult> {
const startTime = performance.now();
@@ -47,6 +48,7 @@ export async function drainStream(
let stopReason: StopReasonType | null = null;
let lastRunId: string | null = null;
let lastSeqId: number | null = null;
let hasCalledFirstMessage = false;
for await (const chunk of stream) {
// console.log("chunk", chunk);
@@ -71,6 +73,18 @@ export async function drainStream(
if (chunk.message_type === "ping") continue;
// Call onFirstMessage callback on the first agent response chunk
if (
!hasCalledFirstMessage &&
onFirstMessage &&
(chunk.message_type === "reasoning_message" ||
chunk.message_type === "assistant_message")
) {
hasCalledFirstMessage = true;
// Call async in background - don't block stream processing
queueMicrotask(() => onFirstMessage());
}
// Remove tool from pending approvals when it completes (server-side execution finished)
// This means the tool was executed server-side and doesn't need approval
if (chunk.message_type === "tool_return_message") {
@@ -218,6 +232,7 @@ export async function drainStream(
* @param buffers - Buffer to accumulate chunks
* @param refresh - Callback to refresh UI
* @param abortSignal - Optional abort signal for cancellation
* @param onFirstMessage - Optional callback to invoke on first message chunk
* @returns Result with stop_reason, approval info, and timing
*/
export async function drainStreamWithResume(
@@ -225,11 +240,18 @@ export async function drainStreamWithResume(
buffers: ReturnType<typeof createBuffers>,
refresh: () => void,
abortSignal?: AbortSignal,
onFirstMessage?: () => void,
): Promise<DrainResult> {
const overallStartTime = performance.now();
// Attempt initial drain
let result = await drainStream(stream, buffers, refresh, abortSignal);
let result = await drainStream(
stream,
buffers,
refresh,
abortSignal,
onFirstMessage,
);
// If stream ended without proper stop_reason and we have resume info, try once to reconnect
if (
@@ -247,6 +269,7 @@ export async function drainStreamWithResume(
});
// Continue draining from where we left off
// Note: Don't pass onFirstMessage again - already called in initial drain
const resumeResult = await drainStream(
resumeStream,
buffers,