fix: add conversation_id support to LettaAgentV3 constructor (#9156)

**Error:**
```
TypeError: LettaAgentV2.__init__() got an unexpected keyword argument 'conversation_id'
```

**Trace:** https://letta.grafana.net/goto/afbk4da3fuxhcf?orgId=stacks-1189126

**Problem:**
The `POST /v1/conversations/{conversation_id}/compact` endpoint was failing
because `LettaAgentV3` inherits from `LettaAgentV2` without overriding
`__init__`, so passing `conversation_id` to the constructor failed.

**Fix:**
1. Add `__init__` to `LettaAgentV3` that accepts optional `conversation_id`
2. Remove redundant `conversation_id` param from `_checkpoint_messages` -
   use `self.conversation_id` consistently instead
3. Clean up internal callers that were passing `conversation_id=self.conversation_id`

Backward compatible - existing code creating `LettaAgentV3(agent_state, actor)`
still works since `conversation_id` defaults to `None`.

👾 Generated with [Letta Code](https://letta.com)

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
cthomas
2026-01-28 11:34:16 -08:00
committed by Caren Thomas
parent bb2145c24c
commit 372c8dcc85

View File

@@ -41,6 +41,7 @@ from letta.schemas.step import StepProgression
from letta.schemas.step_metrics import StepMetrics
from letta.schemas.tool_execution_result import ToolExecutionResult
from letta.schemas.usage import LettaUsageStatistics
from letta.schemas.user import User
from letta.server.rest_api.utils import (
create_approval_request_message_from_llm_response,
create_letta_messages_from_llm_response,
@@ -72,6 +73,16 @@ class LettaAgentV3(LettaAgentV2):
* Support Gemini / OpenAI client
"""
def __init__(
self,
agent_state: AgentState,
actor: User,
conversation_id: str | None = None,
):
super().__init__(agent_state, actor)
# Set conversation_id after parent init (which calls _initialize_state)
self.conversation_id = conversation_id
def _initialize_state(self):
super()._initialize_state()
self._require_tool_call = False
@@ -485,9 +496,7 @@ class LettaAgentV3(LettaAgentV2):
context_window=self.agent_state.llm_config.context_window,
)
async def _checkpoint_messages(
self, run_id: str, step_id: str, new_messages: list[Message], in_context_messages: list[Message], conversation_id: str | None = None
):
async def _checkpoint_messages(self, run_id: str, step_id: str, new_messages: list[Message], in_context_messages: list[Message]):
"""
Checkpoint the current message state - run this only when the current messages are 'safe' - meaning the step has completed successfully.
@@ -506,7 +515,7 @@ class LettaAgentV3(LettaAgentV2):
for message in new_messages:
message.step_id = step_id
message.run_id = run_id
message.conversation_id = conversation_id
message.conversation_id = self.conversation_id
# persist the new message objects - ONLY place where messages are persisted
persisted_messages = await self.message_manager.create_many_messages_async(
@@ -808,7 +817,6 @@ class LettaAgentV3(LettaAgentV2):
step_id=step_id,
new_messages=[summary_message],
in_context_messages=messages,
conversation_id=self.conversation_id,
)
else:
@@ -878,7 +886,6 @@ class LettaAgentV3(LettaAgentV2):
step_id=step_id,
new_messages=input_messages_to_persist + new_messages,
in_context_messages=messages, # update the in-context messages
conversation_id=self.conversation_id,
)
# yield back generated messages
@@ -928,7 +935,6 @@ class LettaAgentV3(LettaAgentV2):
step_id=step_id,
new_messages=[summary_message],
in_context_messages=messages,
conversation_id=self.conversation_id,
)
except Exception as e: