Files
letta-server/letta/server/rest_api/routers/openai/assistants/schemas.py
Sarah Wooders ece8dab05d feat: various fixes (#2320)
Co-authored-by: Shubham Naik <shub@memgpt.ai>
Co-authored-by: Matt Zhou <mattzh1314@gmail.com>
Co-authored-by: Shubham Naik <shubham.naik10@gmail.com>
Co-authored-by: Caren Thomas <caren@letta.com>
Co-authored-by: cpacker <packercharles@gmail.com>
2024-12-31 10:53:33 +04:00

116 lines
5.5 KiB
Python

from typing import List, Optional
from pydantic import BaseModel, Field
from letta.schemas.openai.openai import MessageRoleType, OpenAIMessage, OpenAIThread, ToolCall, ToolCallOutput
class CreateAssistantRequest(BaseModel):
model: str = Field(..., description="The model to use for the assistant.")
name: str = Field(..., description="The name of the assistant.")
description: str = Field(None, description="The description of the assistant.")
instructions: str = Field(..., description="The instructions for the assistant.")
tools: List[str] = Field(None, description="The tools used by the assistant.")
file_ids: List[str] = Field(None, description="List of file IDs associated with the assistant.")
metadata: dict = Field(None, description="Metadata associated with the assistant.")
# letta-only (not openai)
embedding_model: str = Field(None, description="The model to use for the assistant.")
## TODO: remove
# user_id: str = Field(..., description="The unique identifier of the user.")
class CreateThreadRequest(BaseModel):
messages: Optional[List[str]] = Field(None, description="List of message IDs associated with the thread.")
metadata: Optional[dict] = Field(None, description="Metadata associated with the thread.")
# letta-only
assistant_name: Optional[str] = Field(None, description="The name of the assistant (i.e. Letta preset)")
class ModifyThreadRequest(BaseModel):
metadata: dict = Field(None, description="Metadata associated with the thread.")
class ModifyMessageRequest(BaseModel):
metadata: dict = Field(None, description="Metadata associated with the message.")
class ModifyRunRequest(BaseModel):
metadata: dict = Field(None, description="Metadata associated with the run.")
class CreateMessageRequest(BaseModel):
role: str = Field(..., description="Role of the message sender (either 'user' or 'system')")
content: str = Field(..., description="The message content to be processed by the agent.")
file_ids: Optional[List[str]] = Field(None, description="List of file IDs associated with the message.")
metadata: Optional[dict] = Field(None, description="Metadata associated with the message.")
class UserMessageRequest(BaseModel):
user_id: str = Field(..., description="The unique identifier of the user.")
agent_id: str = Field(..., description="The unique identifier of the agent.")
message: str = Field(..., description="The message content to be processed by the agent.")
stream: bool = Field(default=False, description="Flag to determine if the response should be streamed. Set to True for streaming.")
role: MessageRoleType = Field(default=MessageRoleType.user, description="Role of the message sender (either 'user' or 'system')")
class UserMessageResponse(BaseModel):
messages: List[dict] = Field(..., description="List of messages generated by the agent in response to the received message.")
class GetAgentMessagesRequest(BaseModel):
user_id: str = Field(..., description="The unique identifier of the user.")
agent_id: str = Field(..., description="The unique identifier of the agent.")
start: int = Field(..., description="Message index to start on (reverse chronological).")
count: int = Field(..., description="How many messages to retrieve.")
class ListMessagesResponse(BaseModel):
messages: List[OpenAIMessage] = Field(..., description="List of message objects.")
class CreateAssistantFileRequest(BaseModel):
file_id: str = Field(..., description="The unique identifier of the file.")
class CreateRunRequest(BaseModel):
assistant_id: str = Field(..., description="The unique identifier of the assistant.")
model: Optional[str] = Field(None, description="The model used by the run.")
instructions: str = Field(..., description="The instructions for the run.")
additional_instructions: Optional[str] = Field(None, description="Additional instructions for the run.")
tools: Optional[List[ToolCall]] = Field(None, description="The tools used by the run (overrides assistant).")
metadata: Optional[dict] = Field(None, description="Metadata associated with the run.")
class CreateThreadRunRequest(BaseModel):
assistant_id: str = Field(..., description="The unique identifier of the assistant.")
thread: OpenAIThread = Field(..., description="The thread to run.")
model: str = Field(..., description="The model used by the run.")
instructions: str = Field(..., description="The instructions for the run.")
tools: Optional[List[ToolCall]] = Field(None, description="The tools used by the run (overrides assistant).")
metadata: Optional[dict] = Field(None, description="Metadata associated with the run.")
class DeleteAssistantResponse(BaseModel):
id: str = Field(..., description="The unique identifier of the agent.")
object: str = "assistant.deleted"
deleted: bool = Field(..., description="Whether the agent was deleted.")
class DeleteAssistantFileResponse(BaseModel):
id: str = Field(..., description="The unique identifier of the file.")
object: str = "assistant.file.deleted"
deleted: bool = Field(..., description="Whether the file was deleted.")
class DeleteThreadResponse(BaseModel):
id: str = Field(..., description="The unique identifier of the agent.")
object: str = "thread.deleted"
deleted: bool = Field(..., description="Whether the agent was deleted.")
class SubmitToolOutputsToRunRequest(BaseModel):
tools_outputs: List[ToolCallOutput] = Field(..., description="The tool outputs to submit.")