From 82c4794616ab7b9e54e580492e7411e0e12e9253 Mon Sep 17 00:00:00 2001 From: jnjpng Date: Fri, 6 Mar 2026 14:36:44 -0800 Subject: [PATCH] feat(message): add OpenAI websocket responses mode header (#1297) Co-authored-by: Letta Code --- src/agent/message.ts | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/agent/message.ts b/src/agent/message.ts index 7f20a06..803e50d 100644 --- a/src/agent/message.ts +++ b/src/agent/message.ts @@ -96,7 +96,11 @@ export async function sendMessageStream( opts: SendMessageStreamOptions = { streamTokens: true, background: true }, // Disable SDK retries by default - state management happens outside the stream, // so retries would violate idempotency and create race conditions - requestOptions: { maxRetries?: number; signal?: AbortSignal } = { + requestOptions: { + maxRetries?: number; + signal?: AbortSignal; + headers?: Record; + } = { maxRetries: 0, }, ): Promise> { @@ -123,10 +127,21 @@ export async function sendMessageStream( ); } + const extraHeaders: Record = {}; + if (process.env.LETTA_RESPONSES_WS === "1") { + extraHeaders["X-Experimental-OpenAI-Responses-Websocket"] = "true"; + } + const stream = await client.conversations.messages.create( resolvedConversationId, requestBody, - requestOptions, + { + ...requestOptions, + headers: { + ...((requestOptions.headers as Record) ?? {}), + ...extraHeaders, + }, + }, ); if (requestStartTime !== undefined) {