allow passing skip_verify to autogen constructors (#581)

* allow passing skip_verify to autogen constructors

* added flag to examples with a NOTE, also added to docs
This commit is contained in:
Charles Packer
2023-12-05 13:28:33 -08:00
committed by GitHub
parent 2fd5a14d4a
commit 1d915cce86
5 changed files with 11 additions and 1 deletions

View File

@@ -23,7 +23,7 @@ config_list_memgpt = [
{
"model": "gpt-4",
"context_window": 8192,
"preset": "memgpt_chat",
"preset": "memgpt_chat", # NOTE: you can change the preset here
# OpenAI specific
"model_endpoint_type": "openai",
"openai_key": YOUR_OPENAI_KEY,
@@ -45,6 +45,7 @@ memgpt_autogen_agent = create_memgpt_autogen_agent_from_config(
system_message=f"Your desired MemGPT persona",
interface_kwargs=interface_kwargs,
default_auto_reply="...",
skip_verify=False, # NOTE: you should set this to True if you expect your MemGPT AutoGen agent to call a function other than send_message on the first turn
)
```

View File

@@ -169,6 +169,7 @@ else:
human_input_mode="TERMINATE",
interface_kwargs=interface_kwargs,
default_auto_reply="...", # Set a default auto-reply message here (non-empty auto-reply is required for LM Studio)
skip_verify=False, # NOTE: you should set this to True if you expect your MemGPT AutoGen agent to call a function other than send_message on the first turn
)
# Begin the group chat with a message from the user

View File

@@ -151,6 +151,7 @@ memgpt_agent = create_memgpt_autogen_agent_from_config(
system_message=f"You are an AI research assistant.\n" f"You are participating in a group chat with a user ({user_proxy.name}).",
interface_kwargs=interface_kwargs,
default_auto_reply="...", # Set a default auto-reply message here (non-empty auto-reply is required for LM Studio)
skip_verify=False, # NOTE: you should set this to True if you expect your MemGPT AutoGen agent to call a function other than send_message on the first turn
)
# NOTE: you need to follow steps to load document first: see https://memgpt.readthedocs.io/en/latest/autogen/#loading-documents
memgpt_agent.load_and_attach("memgpt_research_paper", "directory")

View File

@@ -173,6 +173,7 @@ else:
f"and a product manager ({pm.name}).",
interface_kwargs=interface_kwargs,
default_auto_reply="...", # Set a default auto-reply message here (non-empty auto-reply is required for LM Studio)
skip_verify=False, # NOTE: you should set this to True if you expect your MemGPT AutoGen agent to call a function other than send_message on the first turn
)
# Initialize the group chat between the user and two LLM agents (PM and coder)

View File

@@ -28,6 +28,7 @@ def create_memgpt_autogen_agent_from_config(
nonmemgpt_llm_config: Optional[Union[Dict, bool]] = None,
default_auto_reply: Optional[Union[str, Dict, None]] = "",
interface_kwargs: Dict = None,
skip_verify: bool = False,
):
"""Same function signature as used in base AutoGen, but creates a MemGPT agent
@@ -87,6 +88,7 @@ def create_memgpt_autogen_agent_from_config(
default_auto_reply=default_auto_reply,
is_termination_msg=is_termination_msg,
interface_kwargs=interface_kwargs,
skip_verify=skip_verify,
)
if human_input_mode != "ALWAYS":
@@ -95,6 +97,7 @@ def create_memgpt_autogen_agent_from_config(
default_auto_reply=default_auto_reply,
is_termination_msg=is_termination_msg,
interface_kwargs=interface_kwargs,
skip_verify=skip_verify,
)
if default_auto_reply != "":
coop_agent2 = UserProxyAgent(
@@ -108,6 +111,7 @@ def create_memgpt_autogen_agent_from_config(
default_auto_reply=default_auto_reply,
is_termination_msg=is_termination_msg,
interface_kwargs=interface_kwargs,
skip_verify=skip_verify,
)
groupchat = GroupChat(
@@ -126,6 +130,7 @@ def create_memgpt_autogen_agent_from_config(
def create_autogen_memgpt_agent(
agent_config,
# interface and persistence manager
skip_verify=False,
interface=None,
interface_kwargs={},
persistence_manager=None,
@@ -172,6 +177,7 @@ def create_autogen_memgpt_agent(
agent=memgpt_agent,
default_auto_reply=default_auto_reply,
is_termination_msg=is_termination_msg,
skip_verify=skip_verify,
)
return autogen_memgpt_agent