Files
letta-server/memgpt/server/ws_api/interface.py
Robin Goetz a68e2c838d feat: adding first poc of web UI (#625)
* updated local APIs to return usage info (#585)

* updated APIs to return usage info

* tested all endpoints

* added autogen as an extra (#616)

* added autogen as an extra

* updated docs

Co-authored-by: hemanthsavasere <hemanth.savasere@gmail.com>

* Update LICENSE

* Add safeguard on tokens returned by functions (#576)

* swapping out hardcoded str for prefix (forgot to include in #569)

* add extra failout when the summarizer tries to run on a single message

* added function response validation code, currently will truncate responses based on character count

* added return type hints (functions/tools should either return strings or None)

* discuss function output length in custom function section

* made the truncation more informative

* patch bug where None.copy() throws runtime error (#617)

* allow passing custom host to uvicorn (#618)

* feat: initial poc for socket server

* feat: initial poc for frontend based on react

Set up an nx workspace which maks it easy to manage dependencies and added shadcn components
that allow us to build good-looking ui in a fairly simple way.
UI is a very simple and basic chat that starts with a message of the user and then simply displays the
answer string that is sent back from the fastapi ws endpoint

* feat: mapp arguments to json and return new messages

Except for the previous user message we return all newly generated messages and let the frontend figure out how to display them.

* feat: display messages based on role and show inner thoughts and connection status

* chore: build newest frontend

* feat(frontend): show loader while waiting for first message and disable send button until connection is open

* feat: make agent send the first message and loop similar to CLI

currently the CLI loops until the correct function call sends a message to the user. this is an initial try to achieve a similar behavior in the socket server

* chore: build new version of frontend

* fix: rename lib directory so it is not excluded as part of python gitignore

* chore: rebuild frontend app

* fix: save agent at end of each response to allow the conversation to carry on over multiple sessions

* feat: restructure server to support multiple endpoints and add agents and sources endpoint

* feat: setup frontend routing and settings page

* chore: build frontend

* feat: another iteration of web interface

changes include: websocket for chat. switching between different agents. introduction of zustand state management

* feat: adjust frontend to work with memgpt rest-api

* feat: adjust existing rest_api to serve and interact with frontend

* feat: build latest frontend

* chore: build latest frontend

* fix: cleanup workspace

---------

Co-authored-by: Charles Packer <packercharles@gmail.com>
Co-authored-by: hemanthsavasere <hemanth.savasere@gmail.com>
2024-01-11 14:47:51 +01:00

112 lines
4.1 KiB
Python

import asyncio
import threading
from memgpt.interface import AgentInterface
import memgpt.server.ws_api.protocol as protocol
class BaseWebSocketInterface(AgentInterface):
"""Interface for interacting with a MemGPT agent over a WebSocket"""
def __init__(self):
self.clients = set()
def register_client(self, websocket):
"""Register a new client connection"""
self.clients.add(websocket)
def unregister_client(self, websocket):
"""Unregister a client connection"""
self.clients.remove(websocket)
def step_yield(self):
pass
class AsyncWebSocketInterface(BaseWebSocketInterface):
"""WebSocket calls are async"""
async def user_message(self, msg):
"""Handle reception of a user message"""
# Logic to process the user message and possibly trigger agent's response
pass
async def internal_monologue(self, msg):
"""Handle the agent's internal monologue"""
print(msg)
# Send the internal monologue to all clients
if self.clients: # Check if there are any clients connected
await asyncio.gather(*[client.send_text(protocol.server_agent_internal_monologue(msg)) for client in self.clients])
async def assistant_message(self, msg):
"""Handle the agent sending a message"""
print(msg)
# Send the assistant's message to all clients
if self.clients:
await asyncio.gather(*[client.send_text(protocol.server_agent_assistant_message(msg)) for client in self.clients])
async def function_message(self, msg):
"""Handle the agent calling a function"""
print(msg)
# Send the function call message to all clients
if self.clients:
await asyncio.gather(*[client.send_text(protocol.server_agent_function_message(msg)) for client in self.clients])
class SyncWebSocketInterface(BaseWebSocketInterface):
def __init__(self):
super().__init__()
self.clients = set()
self.loop = asyncio.new_event_loop() # Create a new event loop
self.thread = threading.Thread(target=self._run_event_loop, daemon=True)
self.thread.start()
def _run_event_loop(self):
"""Run the dedicated event loop and handle its closure."""
asyncio.set_event_loop(self.loop)
try:
self.loop.run_forever()
finally:
# Run the cleanup tasks in the event loop
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
self.loop.close()
def _run_async(self, coroutine):
"""Schedule coroutine to be run in the dedicated event loop."""
if not self.loop.is_closed():
asyncio.run_coroutine_threadsafe(coroutine, self.loop)
async def _send_to_all_clients(self, clients, msg):
"""Asynchronously sends a message to all clients."""
if clients:
await asyncio.gather(*(client.send_text(msg) for client in clients))
def user_message(self, msg):
"""Handle reception of a user message"""
# Logic to process the user message and possibly trigger agent's response
pass
def internal_monologue(self, msg):
"""Handle the agent's internal monologue"""
print(msg)
if self.clients:
self._run_async(self._send_to_all_clients(self.clients, protocol.server_agent_internal_monologue(msg)))
def assistant_message(self, msg):
"""Handle the agent sending a message"""
print(msg)
if self.clients:
self._run_async(self._send_to_all_clients(self.clients, protocol.server_agent_assistant_message(msg)))
def function_message(self, msg):
"""Handle the agent calling a function"""
print(msg)
if self.clients:
self._run_async(self._send_to_all_clients(self.clients, protocol.server_agent_function_message(msg)))
def close(self):
"""Shut down the WebSocket interface and its event loop."""
self.loop.call_soon_threadsafe(self.loop.stop) # Signal the loop to stop
self.thread.join() # Wait for the thread to finish