feat(JSON Response): Enable JSON Response format for all Openai Calls… (#1401)

Signed-off-by: Lenaxia <github@47north.lat>
Co-authored-by: Lenaxia <github@47north.lat>
This commit is contained in:
lenaxia
2024-05-22 19:05:34 -07:00
committed by GitHub
parent fc6c8423cd
commit f370670db1
2 changed files with 7 additions and 2 deletions

View File

@@ -159,6 +159,7 @@ def create(
printd(f"Using model {llm_config.model_endpoint_type}, endpoint: {llm_config.model_endpoint}")
# TODO eventually refactor so that credentials are passed through
credentials = MemGPTCredentials.load()
if function_call and not functions:
@@ -178,6 +179,7 @@ def create(
tools=[{"type": "function", "function": f} for f in functions] if functions else None,
tool_choice=function_call,
user=str(user_id),
response_format={"type": "json_object"},
)
else:
data = ChatCompletionRequest(
@@ -186,6 +188,7 @@ def create(
functions=functions,
function_call=function_call,
user=str(user_id),
response_format={"type": "json_object"},
)
if stream:

View File

@@ -36,9 +36,11 @@ from memgpt.constants import (
from memgpt.models.chat_completion_response import ChatCompletionResponse
from memgpt.openai_backcompat.openai_object import OpenAIObject
# TODO: what is this?
# DEBUG = True
DEBUG = False
if "LOG_LEVEL" in os.environ:
if os.environ["LOG_LEVEL"] == "DEBUG":
DEBUG = True
ADJECTIVE_BANK = [
"beautiful",