feat: Add endpoint to list files attached to an agent (#4082)
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
@@ -108,3 +108,26 @@ class FileAgent(FileAgentBase):
|
||||
default_factory=datetime.utcnow,
|
||||
description="Row last-update timestamp (UTC).",
|
||||
)
|
||||
|
||||
|
||||
class AgentFileAttachment(LettaBase):
|
||||
"""Response model for agent file attachments showing file status in agent context"""
|
||||
|
||||
id: str = Field(..., description="Unique identifier of the file-agent relationship")
|
||||
file_id: str = Field(..., description="Unique identifier of the file")
|
||||
file_name: str = Field(..., description="Name of the file")
|
||||
folder_id: str = Field(..., description="Unique identifier of the folder/source")
|
||||
folder_name: str = Field(..., description="Name of the folder/source")
|
||||
is_open: bool = Field(..., description="Whether the file is currently open in the agent's context")
|
||||
last_accessed_at: Optional[datetime] = Field(None, description="Timestamp of last access by the agent")
|
||||
visible_content: Optional[str] = Field(None, description="Portion of the file visible to the agent if open")
|
||||
start_line: Optional[int] = Field(None, description="Starting line number if file was opened with line range")
|
||||
end_line: Optional[int] = Field(None, description="Ending line number if file was opened with line range")
|
||||
|
||||
|
||||
class PaginatedAgentFiles(LettaBase):
|
||||
"""Paginated response for agent files"""
|
||||
|
||||
files: List[AgentFileAttachment] = Field(..., description="List of file attachments for the agent")
|
||||
next_cursor: Optional[str] = Field(None, description="Cursor for fetching the next page (file-agent relationship ID)")
|
||||
has_more: bool = Field(..., description="Whether more results exist after this page")
|
||||
|
||||
@@ -26,6 +26,7 @@ from letta.schemas.agent import AgentState, AgentType, CreateAgent, UpdateAgent
|
||||
from letta.schemas.agent_file import AgentFileSchema
|
||||
from letta.schemas.block import Block, BlockUpdate
|
||||
from letta.schemas.enums import JobType
|
||||
from letta.schemas.file import AgentFileAttachment, PaginatedAgentFiles
|
||||
from letta.schemas.group import Group
|
||||
from letta.schemas.job import JobStatus, JobUpdate, LettaRequestConfig
|
||||
from letta.schemas.letta_message import LettaMessageUnion, LettaMessageUpdateUnion, MessageType
|
||||
@@ -728,6 +729,49 @@ async def list_agent_folders(
|
||||
return await server.agent_manager.list_attached_sources_async(agent_id=agent_id, actor=actor)
|
||||
|
||||
|
||||
@router.get("/{agent_id}/files", response_model=PaginatedAgentFiles, operation_id="list_agent_files")
|
||||
async def list_agent_files(
|
||||
agent_id: str,
|
||||
cursor: Optional[str] = Query(None, description="Pagination cursor from previous response"),
|
||||
limit: int = Query(20, ge=1, le=100, description="Number of items to return (1-100)"),
|
||||
is_open: Optional[bool] = Query(None, description="Filter by open status (true for open files, false for closed files)"),
|
||||
server: "SyncServer" = Depends(get_letta_server),
|
||||
actor_id: str | None = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
||||
):
|
||||
"""
|
||||
Get the files attached to an agent with their open/closed status (paginated).
|
||||
"""
|
||||
actor = await server.user_manager.get_actor_or_default_async(actor_id=actor_id)
|
||||
|
||||
# get paginated file-agent relationships for this agent
|
||||
file_agents, next_cursor, has_more = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=agent_id, actor=actor, cursor=cursor, limit=limit, is_open=is_open
|
||||
)
|
||||
|
||||
# enrich with file and source metadata
|
||||
enriched_files = []
|
||||
for fa in file_agents:
|
||||
# get source/folder metadata
|
||||
source = await server.source_manager.get_source_by_id(source_id=fa.source_id, actor=actor)
|
||||
|
||||
# build response object
|
||||
attachment = AgentFileAttachment(
|
||||
id=fa.id,
|
||||
file_id=fa.file_id,
|
||||
file_name=fa.file_name,
|
||||
folder_id=fa.source_id,
|
||||
folder_name=source.name if source else "Unknown",
|
||||
is_open=fa.is_open,
|
||||
last_accessed_at=fa.last_accessed_at,
|
||||
visible_content=fa.visible_content,
|
||||
start_line=fa.start_line,
|
||||
end_line=fa.end_line,
|
||||
)
|
||||
enriched_files.append(attachment)
|
||||
|
||||
return PaginatedAgentFiles(files=enriched_files, next_cursor=next_cursor, has_more=has_more)
|
||||
|
||||
|
||||
# TODO: remove? can also get with agent blocks
|
||||
@router.get("/{agent_id}/core-memory", response_model=Memory, operation_id="retrieve_agent_memory")
|
||||
async def retrieve_agent_memory(
|
||||
|
||||
@@ -267,6 +267,7 @@ async def upload_file_to_folder(
|
||||
original_filename=original_filename, source_id=folder_id, actor=actor
|
||||
)
|
||||
|
||||
unique_filename = None
|
||||
if existing_file:
|
||||
# Duplicate found, handle based on strategy
|
||||
if duplicate_handling == DuplicateFileHandling.ERROR:
|
||||
|
||||
@@ -293,6 +293,66 @@ class FileAgentManager:
|
||||
else:
|
||||
return [r.to_pydantic() for r in rows]
|
||||
|
||||
@enforce_types
|
||||
@trace_method
|
||||
async def list_files_for_agent_paginated(
|
||||
self,
|
||||
agent_id: str,
|
||||
actor: PydanticUser,
|
||||
cursor: Optional[str] = None,
|
||||
limit: int = 20,
|
||||
is_open: Optional[bool] = None,
|
||||
) -> tuple[List[PydanticFileAgent], Optional[str], bool]:
|
||||
"""
|
||||
Return paginated file associations for an agent.
|
||||
|
||||
Args:
|
||||
agent_id: The agent ID to get files for
|
||||
actor: User performing the action
|
||||
cursor: Pagination cursor (file-agent ID to start after)
|
||||
limit: Maximum number of results to return
|
||||
is_open: Optional filter for open/closed status (None = all, True = open only, False = closed only)
|
||||
|
||||
Returns:
|
||||
Tuple of (file_agents, next_cursor, has_more)
|
||||
"""
|
||||
async with db_registry.async_session() as session:
|
||||
conditions = [
|
||||
FileAgentModel.agent_id == agent_id,
|
||||
FileAgentModel.organization_id == actor.organization_id,
|
||||
FileAgentModel.is_deleted == False,
|
||||
]
|
||||
|
||||
# apply is_open filter if specified
|
||||
if is_open is not None:
|
||||
conditions.append(FileAgentModel.is_open == is_open)
|
||||
|
||||
# apply cursor if provided (get records after this ID)
|
||||
if cursor:
|
||||
conditions.append(FileAgentModel.id > cursor)
|
||||
|
||||
query = select(FileAgentModel).where(and_(*conditions))
|
||||
|
||||
# order by ID for stable pagination
|
||||
query = query.order_by(FileAgentModel.id)
|
||||
|
||||
# fetch limit + 1 to check if there are more results
|
||||
query = query.limit(limit + 1)
|
||||
|
||||
result = await session.execute(query)
|
||||
rows = result.scalars().all()
|
||||
|
||||
# check if we got more records than requested (meaning there are more pages)
|
||||
has_more = len(rows) > limit
|
||||
if has_more:
|
||||
# trim back to the requested limit
|
||||
rows = rows[:limit]
|
||||
|
||||
# get cursor for next page (ID of last item in current page)
|
||||
next_cursor = rows[-1].id if rows else None
|
||||
|
||||
return [r.to_pydantic() for r in rows], next_cursor, has_more
|
||||
|
||||
@enforce_types
|
||||
@trace_method
|
||||
async def list_agents_for_file(
|
||||
|
||||
@@ -9644,6 +9644,200 @@ async def test_list_files_and_agents(
|
||||
assert file_blocks[0].label == default_file.file_name
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_files_for_agent_paginated_basic(
|
||||
server,
|
||||
default_user,
|
||||
sarah_agent,
|
||||
default_source,
|
||||
):
|
||||
"""Test basic pagination functionality."""
|
||||
# create 5 files and attach them to sarah
|
||||
for i in range(5):
|
||||
file_metadata = PydanticFileMetadata(
|
||||
file_name=f"paginated_file_{i}.txt",
|
||||
source_id=default_source.id,
|
||||
organization_id=default_user.organization_id,
|
||||
)
|
||||
file = await server.file_manager.create_file(file_metadata, actor=default_user)
|
||||
await server.file_agent_manager.attach_file(
|
||||
agent_id=sarah_agent.id,
|
||||
file_id=file.id,
|
||||
file_name=file.file_name,
|
||||
source_id=file.source_id,
|
||||
actor=default_user,
|
||||
max_files_open=sarah_agent.max_files_open,
|
||||
)
|
||||
|
||||
# get first page
|
||||
page1, cursor1, has_more1 = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=sarah_agent.id,
|
||||
actor=default_user,
|
||||
limit=3,
|
||||
)
|
||||
assert len(page1) == 3
|
||||
assert has_more1 is True
|
||||
assert cursor1 is not None
|
||||
|
||||
# get second page using cursor
|
||||
page2, cursor2, has_more2 = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=sarah_agent.id,
|
||||
actor=default_user,
|
||||
cursor=cursor1,
|
||||
limit=3,
|
||||
)
|
||||
assert len(page2) == 2 # only 2 files left (5 total - 3 already fetched)
|
||||
assert has_more2 is False
|
||||
assert cursor2 is not None
|
||||
|
||||
# verify no overlap between pages
|
||||
page1_ids = {fa.id for fa in page1}
|
||||
page2_ids = {fa.id for fa in page2}
|
||||
assert page1_ids.isdisjoint(page2_ids)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_files_for_agent_paginated_filter_open(
|
||||
server,
|
||||
default_user,
|
||||
sarah_agent,
|
||||
default_source,
|
||||
):
|
||||
"""Test pagination with is_open=True filter."""
|
||||
# create files: 3 open, 2 closed
|
||||
for i in range(5):
|
||||
file_metadata = PydanticFileMetadata(
|
||||
file_name=f"filter_file_{i}.txt",
|
||||
source_id=default_source.id,
|
||||
organization_id=default_user.organization_id,
|
||||
)
|
||||
file = await server.file_manager.create_file(file_metadata, actor=default_user)
|
||||
await server.file_agent_manager.attach_file(
|
||||
agent_id=sarah_agent.id,
|
||||
file_id=file.id,
|
||||
file_name=file.file_name,
|
||||
source_id=file.source_id,
|
||||
actor=default_user,
|
||||
is_open=(i < 3), # first 3 are open
|
||||
max_files_open=sarah_agent.max_files_open,
|
||||
)
|
||||
|
||||
# get only open files
|
||||
open_files, cursor, has_more = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=sarah_agent.id,
|
||||
actor=default_user,
|
||||
is_open=True,
|
||||
limit=10,
|
||||
)
|
||||
assert len(open_files) == 3
|
||||
assert has_more is False
|
||||
assert all(fa.is_open for fa in open_files)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_files_for_agent_paginated_filter_closed(
|
||||
server,
|
||||
default_user,
|
||||
sarah_agent,
|
||||
default_source,
|
||||
):
|
||||
"""Test pagination with is_open=False filter."""
|
||||
# create files: 2 open, 4 closed
|
||||
for i in range(6):
|
||||
file_metadata = PydanticFileMetadata(
|
||||
file_name=f"closed_file_{i}.txt",
|
||||
source_id=default_source.id,
|
||||
organization_id=default_user.organization_id,
|
||||
)
|
||||
file = await server.file_manager.create_file(file_metadata, actor=default_user)
|
||||
await server.file_agent_manager.attach_file(
|
||||
agent_id=sarah_agent.id,
|
||||
file_id=file.id,
|
||||
file_name=file.file_name,
|
||||
source_id=file.source_id,
|
||||
actor=default_user,
|
||||
is_open=(i < 2), # first 2 are open, rest are closed
|
||||
max_files_open=sarah_agent.max_files_open,
|
||||
)
|
||||
|
||||
# paginate through closed files
|
||||
page1, cursor1, has_more1 = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=sarah_agent.id,
|
||||
actor=default_user,
|
||||
is_open=False,
|
||||
limit=2,
|
||||
)
|
||||
assert len(page1) == 2
|
||||
assert has_more1 is True
|
||||
assert all(not fa.is_open for fa in page1)
|
||||
|
||||
# get second page of closed files
|
||||
page2, cursor2, has_more2 = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=sarah_agent.id,
|
||||
actor=default_user,
|
||||
is_open=False,
|
||||
cursor=cursor1,
|
||||
limit=3,
|
||||
)
|
||||
assert len(page2) == 2 # only 2 closed files left
|
||||
assert has_more2 is False
|
||||
assert all(not fa.is_open for fa in page2)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_files_for_agent_paginated_empty(
|
||||
server,
|
||||
default_user,
|
||||
charles_agent,
|
||||
):
|
||||
"""Test pagination with agent that has no files."""
|
||||
# charles_agent has no files attached in this test
|
||||
result, cursor, has_more = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=charles_agent.id,
|
||||
actor=default_user,
|
||||
limit=10,
|
||||
)
|
||||
assert len(result) == 0
|
||||
assert cursor is None
|
||||
assert has_more is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_files_for_agent_paginated_large_limit(
|
||||
server,
|
||||
default_user,
|
||||
sarah_agent,
|
||||
default_source,
|
||||
):
|
||||
"""Test that large limit returns all files without pagination."""
|
||||
# create 3 files
|
||||
for i in range(3):
|
||||
file_metadata = PydanticFileMetadata(
|
||||
file_name=f"all_files_{i}.txt",
|
||||
source_id=default_source.id,
|
||||
organization_id=default_user.organization_id,
|
||||
)
|
||||
file = await server.file_manager.create_file(file_metadata, actor=default_user)
|
||||
await server.file_agent_manager.attach_file(
|
||||
agent_id=sarah_agent.id,
|
||||
file_id=file.id,
|
||||
file_name=file.file_name,
|
||||
source_id=file.source_id,
|
||||
actor=default_user,
|
||||
max_files_open=sarah_agent.max_files_open,
|
||||
)
|
||||
|
||||
# request with large limit
|
||||
all_files, cursor, has_more = await server.file_agent_manager.list_files_for_agent_paginated(
|
||||
agent_id=sarah_agent.id,
|
||||
actor=default_user,
|
||||
limit=100,
|
||||
)
|
||||
assert len(all_files) == 3
|
||||
assert has_more is False
|
||||
assert cursor is not None # cursor is still set to last item
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_detach_file(server, file_attachment, default_user):
|
||||
await server.file_agent_manager.detach_file(
|
||||
|
||||
Reference in New Issue
Block a user