Files
letta-server/letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py
Shubham Naik 5a743d1dc4 Add 'apps/core/' from commit 'ea2a7395f4023f5b9fab03e6273db3b64a1181d5'
git-subtree-dir: apps/core
git-subtree-mainline: a8963e11e7a5a0059acbc849ce768e1eee80df61
git-subtree-split: ea2a7395f4023f5b9fab03e6273db3b64a1181d5
2024-12-22 20:31:22 -08:00

12 lines
408 B
Python

from abc import ABC, abstractmethod
class LLMChatCompletionWrapper(ABC):
@abstractmethod
def chat_completion_to_prompt(self, messages, functions, function_documentation=None):
"""Go from ChatCompletion to a single prompt string"""
@abstractmethod
def output_to_chat_completion_response(self, raw_llm_output):
"""Turn the LLM output string into a ChatCompletion response"""