feat: npm requirements for tools

This commit is contained in:
Andy Li
2025-08-04 14:15:10 -07:00
committed by GitHub
parent 2927295ee7
commit 64d91d6c8a
7 changed files with 59 additions and 94 deletions

View File

@@ -0,0 +1,31 @@
"""npm requirements in tools
Revision ID: d007f4ca66bf
Revises: 74e860718e0d
Create Date: 2025-08-04 13:40:32.707036
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "d007f4ca66bf"
down_revision: Union[str, None] = "74e860718e0d"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("tools", sa.Column("npm_requirements", sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("tools", "npm_requirements")
# ### end Alembic commands ###

View File

@@ -43,11 +43,12 @@ class Tool(SqlalchemyBase, OrganizationMixin):
tags: Mapped[List] = mapped_column(JSON, doc="Metadata tags used to filter tools.")
source_type: Mapped[ToolSourceType] = mapped_column(String, doc="The type of the source code.", default=ToolSourceType.json)
source_code: Mapped[Optional[str]] = mapped_column(String, doc="The source code of the function.")
json_schema: Mapped[Optional[dict]] = mapped_column(JSON, default=lambda: {}, doc="The OAI compatable JSON schema of the function.")
json_schema: Mapped[Optional[dict]] = mapped_column(JSON, default=lambda: {}, doc="The OAI compatible JSON schema of the function.")
args_json_schema: Mapped[Optional[dict]] = mapped_column(JSON, default=lambda: {}, doc="The JSON schema of the function arguments.")
pip_requirements: Mapped[Optional[List]] = mapped_column(
JSON, nullable=True, doc="Optional list of pip packages required by this tool."
)
npm_requirements: Mapped[list | None] = mapped_column(JSON, doc="Optional list of npm packages required by this tool.")
metadata_: Mapped[Optional[dict]] = mapped_column(JSON, default=lambda: {}, doc="A dictionary of additional metadata for the tool.")
# relationships
organization: Mapped["Organization"] = relationship("Organization", back_populates="tools", lazy="selectin")

View File

@@ -132,7 +132,8 @@ class ToolSourceType(str, Enum):
"""Defines what a tool was derived from"""
python = "python"
json = "json"
typescript = "typescript"
json = "json" # TODO (cliandy): is this still valid?
class ActorType(str, Enum):

View File

@@ -0,0 +1,12 @@
from pydantic import BaseModel, Field
class NpmRequirement(BaseModel):
name: str = Field(..., min_length=1, description="Name of the npm package.")
version: str | None = Field(None, description="Optional version of the package, following semantic versioning.")
def __str__(self) -> str:
"""Return a npm-installable string format."""
if self.version:
return f'{self.name}@"{self.version}"'
return self.name

View File

@@ -81,7 +81,9 @@ class E2BSandboxConfig(BaseModel):
class ModalSandboxConfig(BaseModel):
timeout: int = Field(5 * 60, description="Time limit for the sandbox (in seconds).")
pip_requirements: Optional[List[str]] = Field(None, description="A list of pip packages to install in the Modal sandbox")
pip_requirements: list[str] | None = Field(None, description="A list of pip packages to install in the Modal sandbox")
npm_requirements: list[str] | None = Field(None, description="A list of npm packages to install in the Modal sandbox")
language: Literal["python", "typescript"] = "python"
@property
def type(self) -> "SandboxType":

View File

@@ -24,6 +24,7 @@ from letta.functions.schema_generator import (
from letta.log import get_logger
from letta.schemas.enums import ToolType
from letta.schemas.letta_base import LettaBase
from letta.schemas.npm_requirement import NpmRequirement
from letta.schemas.pip_requirement import PipRequirement
logger = get_logger(__name__)
@@ -60,7 +61,8 @@ class Tool(BaseTool):
# tool configuration
return_char_limit: int = Field(FUNCTION_RETURN_CHAR_LIMIT, description="The maximum number of characters in the response.")
pip_requirements: Optional[List[PipRequirement]] = Field(None, description="Optional list of pip packages required by this tool.")
pip_requirements: list[PipRequirement] | None = Field(None, description="Optional list of pip packages required by this tool.")
npm_requirements: list[NpmRequirement] | None = Field(None, description="Optional list of npm packages required by this tool.")
# metadata fields
created_by_id: Optional[str] = Field(None, description="The id of the user that made this Tool.")
@@ -144,7 +146,8 @@ class ToolCreate(LettaBase):
)
args_json_schema: Optional[Dict] = Field(None, description="The args JSON schema of the function.")
return_char_limit: int = Field(FUNCTION_RETURN_CHAR_LIMIT, description="The maximum number of characters in the response.")
pip_requirements: Optional[List[PipRequirement]] = Field(None, description="Optional list of pip packages required by this tool.")
pip_requirements: list[PipRequirement] | None = Field(None, description="Optional list of pip packages required by this tool.")
npm_requirements: list[NpmRequirement] | None = Field(None, description="Optional list of npm packages required by this tool.")
@classmethod
def from_mcp(cls, mcp_server_name: str, mcp_tool: MCPTool) -> "ToolCreate":
@@ -206,39 +209,6 @@ class ToolCreate(LettaBase):
json_schema=json_schema,
)
@classmethod
def from_langchain(
cls,
langchain_tool: "LangChainBaseTool",
additional_imports_module_attr_map: dict[str, str] = None,
) -> "ToolCreate":
"""
Class method to create an instance of Tool from a Langchain tool (must be from langchain_community.tools).
Args:
langchain_tool (LangChainBaseTool): An instance of a LangChain BaseTool (BaseTool from LangChain)
additional_imports_module_attr_map (dict[str, str]): A mapping of module names to attribute name. This is used internally to import all the required classes for the langchain tool. For example, you would pass in `{"langchain_community.utilities": "WikipediaAPIWrapper"}` for `from langchain_community.tools import WikipediaQueryRun`. NOTE: You do NOT need to specify the tool import here, that is done automatically for you.
Returns:
Tool: A Letta Tool initialized with attributes derived from the provided LangChain BaseTool object.
"""
from letta.functions.helpers import generate_langchain_tool_wrapper
description = langchain_tool.description
source_type = "python"
tags = ["langchain"]
# NOTE: langchain tools may come from different packages
wrapper_func_name, wrapper_function_str = generate_langchain_tool_wrapper(langchain_tool, additional_imports_module_attr_map)
json_schema = generate_schema_from_args_schema_v2(langchain_tool.args_schema, name=wrapper_func_name, description=description)
return cls(
description=description,
source_type=source_type,
tags=tags,
source_code=wrapper_function_str,
json_schema=json_schema,
)
class ToolUpdate(LettaBase):
description: Optional[str] = Field(None, description="The description of the tool.")
@@ -250,7 +220,8 @@ class ToolUpdate(LettaBase):
)
args_json_schema: Optional[Dict] = Field(None, description="The args JSON schema of the function.")
return_char_limit: Optional[int] = Field(None, description="The maximum number of characters in the response.")
pip_requirements: Optional[List[PipRequirement]] = Field(None, description="Optional list of pip packages required by this tool.")
pip_requirements: list[PipRequirement] | None = Field(None, description="Optional list of pip packages required by this tool.")
npm_requirements: list[NpmRequirement] | None = Field(None, description="Optional list of npm packages required by this tool.")
class Config:
extra = "ignore" # Allows extra fields without validation errors
@@ -267,4 +238,5 @@ class ToolRunFromSource(LettaBase):
json_schema: Optional[Dict] = Field(
None, description="The JSON schema of the function (auto-generated from source_code if not provided)"
)
pip_requirements: Optional[List[PipRequirement]] = Field(None, description="Optional list of pip packages required by this tool.")
pip_requirements: list[PipRequirement] | None = Field(None, description="Optional list of pip packages required by this tool.")
npm_requirements: list[NpmRequirement] | None = Field(None, description="Optional list of npm packages required by this tool.")

View File

@@ -313,60 +313,6 @@ def test_composio_tool_schema_generation(openai_model: str, structured_output: b
print(f"Total execution time: {end_time - start_time:.2f} seconds")
@pytest.mark.parametrize("openai_model", ["gpt-4o-mini"])
@pytest.mark.parametrize("structured_output", [True])
def test_langchain_tool_schema_generation(openai_model: str, structured_output: bool):
"""Test that we can generate the schemas for some Langchain tools."""
from langchain_community.tools import WikipediaQueryRun
from langchain_community.utilities import WikipediaAPIWrapper
api_wrapper = WikipediaAPIWrapper(top_k_results=1, doc_content_chars_max=500)
langchain_tool = WikipediaQueryRun(api_wrapper=api_wrapper)
tool_create = ToolCreate.from_langchain(
langchain_tool=langchain_tool,
additional_imports_module_attr_map={"langchain_community.utilities": "WikipediaAPIWrapper"},
)
assert tool_create.json_schema
schema = tool_create.json_schema
print(f"The schema for {langchain_tool.name}: {json.dumps(schema, indent=4)}\n\n")
try:
if structured_output:
tool_schema = convert_to_structured_output(schema)
else:
tool_schema = schema
api_key = os.getenv("OPENAI_API_KEY")
assert api_key is not None, "OPENAI_API_KEY must be set"
system_prompt = "You job is to test the tool that you've been provided. Don't ask for any clarification on the args, just come up with some dummy data and try executing the tool."
url = "https://api.openai.com/v1/chat/completions"
headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}
data = {
"model": openai_model,
"messages": [
{"role": "system", "content": system_prompt},
],
"tools": [
{
"type": "function",
"function": tool_schema,
}
],
"tool_choice": "auto",
"parallel_tool_calls": False,
}
make_post_request(url, headers, data)
print(f"Successfully called OpenAI using schema generated from {langchain_tool.name}\n\n")
except Exception:
print(f"Failed to call OpenAI using schema generated from {langchain_tool.name}\n\n")
raise
# Helper function for pydantic args schema test
def _run_pydantic_args_test(filename, openai_model, structured_output):
"""Run a single pydantic args schema test case"""