diff --git a/examples/langchain_tool_usage.py b/examples/langchain_tool_usage.py index cf55d120..3ce4eb39 100644 --- a/examples/langchain_tool_usage.py +++ b/examples/langchain_tool_usage.py @@ -73,7 +73,7 @@ def main(): print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}") # Send a message to the agent - send_message_response = client.user_message(agent_id=agent_state.id, message="How do you pronounce Albert Einstein's name?") + send_message_response = client.user_message(agent_id=agent_state.id, message="Tell me a fun fact about Albert Einstein!") for message in send_message_response.messages: response_json = json.dumps(message.model_dump(), indent=4) print(f"{response_json}\n") diff --git a/letta/client/client.py b/letta/client/client.py index a2b62fa7..413e6b64 100644 --- a/letta/client/client.py +++ b/letta/client/client.py @@ -2950,18 +2950,11 @@ class LocalClient(AbstractClient): langchain_tool=langchain_tool, additional_imports_module_attr_map=additional_imports_module_attr_map, ) - return self.server.tool_manager.create_or_update_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=self.user) - - def load_crewai_tool(self, crewai_tool: "CrewAIBaseTool", additional_imports_module_attr_map: dict[str, str] = None) -> Tool: - tool_create = ToolCreate.from_crewai( - crewai_tool=crewai_tool, - additional_imports_module_attr_map=additional_imports_module_attr_map, - ) - return self.server.tool_manager.create_or_update_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=self.user) + return self.server.tool_manager.create_or_update_langchain_tool(tool_create=tool_create, actor=self.user) def load_composio_tool(self, action: "ActionType") -> Tool: tool_create = ToolCreate.from_composio(action_name=action.name) - return self.server.tool_manager.create_or_update_composio_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=self.user) + return self.server.tool_manager.create_or_update_composio_tool(tool_create=tool_create, actor=self.user) def create_tool( self, diff --git a/letta/functions/helpers.py b/letta/functions/helpers.py index bd2643c4..8c232cd5 100644 --- a/letta/functions/helpers.py +++ b/letta/functions/helpers.py @@ -230,9 +230,7 @@ def generate_imported_tool_instantiation_call_str(obj: Any) -> Optional[str]: def is_base_model(obj: Any): - from langchain_core.pydantic_v1 import BaseModel as LangChainBaseModel - - return isinstance(obj, BaseModel) or isinstance(obj, LangChainBaseModel) + return isinstance(obj, BaseModel) def generate_import_code(module_attr_map: Optional[dict]): diff --git a/letta/orm/enums.py b/letta/orm/enums.py index e87d28d2..d3aac7ab 100644 --- a/letta/orm/enums.py +++ b/letta/orm/enums.py @@ -7,6 +7,7 @@ class ToolType(str, Enum): LETTA_MEMORY_CORE = "letta_memory_core" LETTA_MULTI_AGENT_CORE = "letta_multi_agent_core" EXTERNAL_COMPOSIO = "external_composio" + EXTERNAL_LANGCHAIN = "external_langchain" class JobType(str, Enum): diff --git a/letta/schemas/tool.py b/letta/schemas/tool.py index ab4736e6..f17498c1 100644 --- a/letta/schemas/tool.py +++ b/letta/schemas/tool.py @@ -79,7 +79,7 @@ class Tool(BaseTool): self.json_schema = get_json_schema_from_module(module_name=LETTA_MULTI_AGENT_TOOL_MODULE_NAME, function_name=self.name) elif self.tool_type == ToolType.EXTERNAL_COMPOSIO: # If it is a composio tool, we generate both the source code and json schema on the fly here - # TODO: This is brittle, need to think long term about how to improve this + # TODO: Deriving the composio action name is brittle, need to think long term about how to improve this try: composio_action = generate_composio_action_from_func_name(self.name) tool_create = ToolCreate.from_composio(composio_action) diff --git a/letta/server/rest_api/routers/v1/tools.py b/letta/server/rest_api/routers/v1/tools.py index 65a403c8..1d607d84 100644 --- a/letta/server/rest_api/routers/v1/tools.py +++ b/letta/server/rest_api/routers/v1/tools.py @@ -231,7 +231,7 @@ def add_composio_tool( try: tool_create = ToolCreate.from_composio(action_name=composio_action_name) - return server.tool_manager.create_or_update_composio_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=actor) + return server.tool_manager.create_or_update_composio_tool(tool_create=tool_create, actor=actor) except EnumStringNotFound as e: raise HTTPException( status_code=400, # Bad Request diff --git a/letta/services/tool_manager.py b/letta/services/tool_manager.py index 0aa08206..27188f93 100644 --- a/letta/services/tool_manager.py +++ b/letta/services/tool_manager.py @@ -11,7 +11,7 @@ from letta.orm.enums import ToolType from letta.orm.errors import NoResultFound from letta.orm.tool import Tool as ToolModel from letta.schemas.tool import Tool as PydanticTool -from letta.schemas.tool import ToolUpdate +from letta.schemas.tool import ToolCreate, ToolUpdate from letta.schemas.user import User as PydanticUser from letta.utils import enforce_types, printd @@ -57,9 +57,12 @@ class ToolManager: return tool @enforce_types - def create_or_update_composio_tool(self, pydantic_tool: PydanticTool, actor: PydanticUser) -> PydanticTool: - pydantic_tool.tool_type = ToolType.EXTERNAL_COMPOSIO - return self.create_or_update_tool(pydantic_tool, actor) + def create_or_update_composio_tool(self, tool_create: ToolCreate, actor: PydanticUser) -> PydanticTool: + return self.create_or_update_tool(PydanticTool(tool_type=ToolType.EXTERNAL_COMPOSIO, **tool_create.model_dump()), actor) + + @enforce_types + def create_or_update_langchain_tool(self, tool_create: ToolCreate, actor: PydanticUser) -> PydanticTool: + return self.create_or_update_tool(PydanticTool(tool_type=ToolType.EXTERNAL_LANGCHAIN, **tool_create.model_dump()), actor) @enforce_types def create_tool(self, pydantic_tool: PydanticTool, actor: PydanticUser) -> PydanticTool: diff --git a/tests/integration_test_tool_execution_sandbox.py b/tests/integration_test_tool_execution_sandbox.py index 8418ac47..ea3e6473 100644 --- a/tests/integration_test_tool_execution_sandbox.py +++ b/tests/integration_test_tool_execution_sandbox.py @@ -190,7 +190,7 @@ def list_tool(test_user): def composio_github_star_tool(test_user): tool_manager = ToolManager() tool_create = ToolCreate.from_composio(action_name="GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER") - tool = tool_manager.create_or_update_composio_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=test_user) + tool = tool_manager.create_or_update_composio_tool(tool_create=tool_create, actor=test_user) yield tool @@ -198,7 +198,7 @@ def composio_github_star_tool(test_user): def composio_gmail_get_profile_tool(test_user): tool_manager = ToolManager() tool_create = ToolCreate.from_composio(action_name="GMAIL_GET_PROFILE") - tool = tool_manager.create_or_update_composio_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=test_user) + tool = tool_manager.create_or_update_composio_tool(tool_create=tool_create, actor=test_user) yield tool diff --git a/tests/test_managers.py b/tests/test_managers.py index 0b6d629b..a4d8adce 100644 --- a/tests/test_managers.py +++ b/tests/test_managers.py @@ -198,7 +198,7 @@ def print_tool(server: SyncServer, default_user, default_organization): @pytest.fixture def composio_github_star_tool(server, default_user): tool_create = ToolCreate.from_composio(action_name="GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER") - tool = server.tool_manager.create_or_update_composio_tool(pydantic_tool=PydanticTool(**tool_create.model_dump()), actor=default_user) + tool = server.tool_manager.create_or_update_composio_tool(tool_create=tool_create, actor=default_user) yield tool diff --git a/tests/test_tool_schema_parsing.py b/tests/test_tool_schema_parsing.py index 627302ed..4a3fb3cc 100644 --- a/tests/test_tool_schema_parsing.py +++ b/tests/test_tool_schema_parsing.py @@ -205,3 +205,31 @@ def test_composio_tool_schema_generation(openai_model: str, structured_output: b print(f"Failed to call OpenAI using schema {schema} generated from {action_name}\n\n") raise + + +@pytest.mark.parametrize("openai_model", ["gpt-4o-mini"]) +@pytest.mark.parametrize("structured_output", [True]) +def test_langchain_tool_schema_generation(openai_model: str, structured_output: bool): + """Test that we can generate the schemas for some Langchain tools.""" + from langchain_community.tools import WikipediaQueryRun + from langchain_community.utilities import WikipediaAPIWrapper + + api_wrapper = WikipediaAPIWrapper(top_k_results=1, doc_content_chars_max=500) + langchain_tool = WikipediaQueryRun(api_wrapper=api_wrapper) + + tool_create = ToolCreate.from_langchain( + langchain_tool=langchain_tool, + additional_imports_module_attr_map={"langchain_community.utilities": "WikipediaAPIWrapper"}, + ) + + assert tool_create.json_schema + schema = tool_create.json_schema + print(f"The schema for {langchain_tool.name}: {json.dumps(schema, indent=4)}\n\n") + + try: + _openai_payload(openai_model, schema, structured_output) + print(f"Successfully called OpenAI using schema {schema} generated from {langchain_tool.name}\n\n") + except: + print(f"Failed to call OpenAI using schema {schema} generated from {langchain_tool.name}\n\n") + + raise