run black, add isort config to pyproject.toml

This commit is contained in:
Caren Thomas
2024-12-26 19:43:11 -08:00
parent ea0cca7bad
commit fd8961c39e
146 changed files with 476 additions and 655 deletions

View File

@@ -5,13 +5,9 @@ from typing import List, Optional, Union
from letta.llm_api.helpers import make_post_request
from letta.schemas.message import Message
from letta.schemas.openai.chat_completion_request import ChatCompletionRequest, Tool
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse, Choice, FunctionCall
from letta.schemas.openai.chat_completion_response import (
ChatCompletionResponse,
Choice,
FunctionCall,
)
from letta.schemas.openai.chat_completion_response import (
Message as ChoiceMessage, # NOTE: avoid conflict with our own Letta Message datatype
Message as ChoiceMessage, # NOTE: avoid conflict with our own Letta Message datatype
)
from letta.schemas.openai.chat_completion_response import ToolCall, UsageStatistics
from letta.utils import get_utc_time, smart_urljoin

View File

@@ -1,6 +1,7 @@
from collections import defaultdict
import requests
from letta.llm_api.helpers import make_post_request
from letta.schemas.llm_config import LLMConfig
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse

View File

@@ -3,16 +3,13 @@ import uuid
from typing import List, Optional, Union
import requests
from letta.local_llm.utils import count_tokens
from letta.schemas.message import Message
from letta.schemas.openai.chat_completion_request import ChatCompletionRequest, Tool
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse, Choice, FunctionCall
from letta.schemas.openai.chat_completion_response import (
ChatCompletionResponse,
Choice,
FunctionCall,
)
from letta.schemas.openai.chat_completion_response import (
Message as ChoiceMessage, # NOTE: avoid conflict with our own Letta Message datatype
Message as ChoiceMessage, # NOTE: avoid conflict with our own Letta Message datatype
)
from letta.schemas.openai.chat_completion_response import ToolCall, UsageStatistics
from letta.utils import get_tool_call_id, get_utc_time, json_dumps, smart_urljoin
@@ -275,10 +272,7 @@ def convert_tools_to_cohere_format(tools: List[Tool], inner_thoughts_in_kwargs:
if inner_thoughts_in_kwargs:
# NOTE: since Cohere doesn't allow "text" in the response when a tool call happens, if we want
# a simultaneous CoT + tool call we need to put it inside a kwarg
from letta.local_llm.constants import (
INNER_THOUGHTS_KWARG,
INNER_THOUGHTS_KWARG_DESCRIPTION,
)
from letta.local_llm.constants import INNER_THOUGHTS_KWARG, INNER_THOUGHTS_KWARG_DESCRIPTION
for cohere_tool in tools_dict_list:
cohere_tool["parameter_definitions"][INNER_THOUGHTS_KWARG] = {

View File

@@ -2,19 +2,13 @@ import uuid
from typing import List, Optional, Tuple
import requests
from letta.constants import NON_USER_MSG_PREFIX
from letta.llm_api.helpers import make_post_request
from letta.local_llm.json_parser import clean_json_string_extra_backslash
from letta.local_llm.utils import count_tokens
from letta.schemas.openai.chat_completion_request import Tool
from letta.schemas.openai.chat_completion_response import (
ChatCompletionResponse,
Choice,
FunctionCall,
Message,
ToolCall,
UsageStatistics,
)
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse, Choice, FunctionCall, Message, ToolCall, UsageStatistics
from letta.utils import get_tool_call_id, get_utc_time, json_dumps
@@ -229,10 +223,7 @@ def convert_tools_to_google_ai_format(tools: List[Tool], inner_thoughts_in_kwarg
param_fields["type"] = param_fields["type"].upper()
# Add inner thoughts
if inner_thoughts_in_kwargs:
from letta.local_llm.constants import (
INNER_THOUGHTS_KWARG,
INNER_THOUGHTS_KWARG_DESCRIPTION,
)
from letta.local_llm.constants import INNER_THOUGHTS_KWARG, INNER_THOUGHTS_KWARG_DESCRIPTION
func["parameters"]["properties"][INNER_THOUGHTS_KWARG] = {
"type": "STRING",

View File

@@ -5,6 +5,7 @@ from collections import OrderedDict
from typing import Any, List, Union
import requests
from letta.constants import OPENAI_CONTEXT_WINDOW_ERROR_SUBSTRING
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse, Choice
from letta.utils import json_dumps, printd

View File

@@ -3,42 +3,27 @@ import time
from typing import List, Optional, Union
import requests
from letta.constants import CLI_WARNING_PREFIX
from letta.errors import LettaConfigurationError, RateLimitExceededError
from letta.llm_api.anthropic import anthropic_chat_completions_request
from letta.llm_api.azure_openai import azure_openai_chat_completions_request
from letta.llm_api.google_ai import (
convert_tools_to_google_ai_format,
google_ai_chat_completions_request,
)
from letta.llm_api.helpers import (
add_inner_thoughts_to_functions,
unpack_all_inner_thoughts_from_kwargs,
)
from letta.llm_api.google_ai import convert_tools_to_google_ai_format, google_ai_chat_completions_request
from letta.llm_api.helpers import add_inner_thoughts_to_functions, unpack_all_inner_thoughts_from_kwargs
from letta.llm_api.openai import (
build_openai_chat_completions_request,
openai_chat_completions_process_stream,
openai_chat_completions_request,
build_openai_chat_completions_request,
openai_chat_completions_process_stream,
openai_chat_completions_request,
)
from letta.local_llm.chat_completion_proxy import get_chat_completion
from letta.local_llm.constants import (
INNER_THOUGHTS_KWARG,
INNER_THOUGHTS_KWARG_DESCRIPTION,
)
from letta.local_llm.constants import INNER_THOUGHTS_KWARG, INNER_THOUGHTS_KWARG_DESCRIPTION
from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
from letta.schemas.llm_config import LLMConfig
from letta.schemas.message import Message
from letta.schemas.openai.chat_completion_request import (
ChatCompletionRequest,
Tool,
cast_message_to_subtype,
)
from letta.schemas.openai.chat_completion_request import ChatCompletionRequest, Tool, cast_message_to_subtype
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
from letta.settings import ModelSettings
from letta.streaming_interface import (
AgentChunkStreamingInterface,
AgentRefreshStreamingInterface,
)
from letta.streaming_interface import AgentChunkStreamingInterface, AgentRefreshStreamingInterface
LLM_API_PROVIDER_OPTIONS = ["openai", "azure", "anthropic", "google_ai", "cohere", "local", "groq"]

View File

@@ -1,4 +1,5 @@
import requests
from letta.utils import printd, smart_urljoin

View File

@@ -6,44 +6,29 @@ import httpx
import requests
from httpx_sse import connect_sse
from httpx_sse._exceptions import SSEError
from letta.constants import OPENAI_CONTEXT_WINDOW_ERROR_SUBSTRING
from letta.errors import LLMError
from letta.llm_api.helpers import (
add_inner_thoughts_to_functions,
convert_to_structured_output,
make_post_request,
)
from letta.local_llm.constants import (
INNER_THOUGHTS_KWARG,
INNER_THOUGHTS_KWARG_DESCRIPTION,
)
from letta.llm_api.helpers import add_inner_thoughts_to_functions, convert_to_structured_output, make_post_request
from letta.local_llm.constants import INNER_THOUGHTS_KWARG, INNER_THOUGHTS_KWARG_DESCRIPTION
from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
from letta.schemas.llm_config import LLMConfig
from letta.schemas.message import Message as _Message
from letta.schemas.message import MessageRole as _MessageRole
from letta.schemas.openai.chat_completion_request import ChatCompletionRequest
from letta.schemas.openai.chat_completion_request import (
FunctionCall as ToolFunctionChoiceFunctionCall,
)
from letta.schemas.openai.chat_completion_request import (
Tool,
ToolFunctionChoice,
cast_message_to_subtype,
)
from letta.schemas.openai.chat_completion_request import FunctionCall as ToolFunctionChoiceFunctionCall
from letta.schemas.openai.chat_completion_request import Tool, ToolFunctionChoice, cast_message_to_subtype
from letta.schemas.openai.chat_completion_response import (
ChatCompletionChunkResponse,
ChatCompletionResponse,
Choice,
FunctionCall,
Message,
ToolCall,
UsageStatistics,
ChatCompletionChunkResponse,
ChatCompletionResponse,
Choice,
FunctionCall,
Message,
ToolCall,
UsageStatistics,
)
from letta.schemas.openai.embedding_response import EmbeddingResponse
from letta.streaming_interface import (
AgentChunkStreamingInterface,
AgentRefreshStreamingInterface,
)
from letta.streaming_interface import AgentChunkStreamingInterface, AgentRefreshStreamingInterface
from letta.utils import get_tool_call_id, smart_urljoin
OPENAI_SSE_DONE = "[DONE]"