diff --git a/memgpt/cli/cli_config.py b/memgpt/cli/cli_config.py index 37eb0f92..eb9cf854 100644 --- a/memgpt/cli/cli_config.py +++ b/memgpt/cli/cli_config.py @@ -81,6 +81,8 @@ def configure_llm_endpoint(config: MemGPTConfig): if model_endpoint_type in DEFAULT_ENDPOINTS: default_model_endpoint = DEFAULT_ENDPOINTS[model_endpoint_type] model_endpoint = questionary.text("Enter default endpoint:", default=default_model_endpoint).ask() + elif config.model_endpoint: + model_endpoint = questionary.text("Enter default endpoint:", default=config.model_endpoint).ask() else: # default_model_endpoint = None model_endpoint = None @@ -173,12 +175,10 @@ def configure_embedding_endpoint(config: MemGPTConfig): # configure embedding endpoint default_embedding_endpoint_type = config.embedding_endpoint_type - if config.embedding_endpoint_type is not None and config.embedding_endpoint_type not in ["openai", "azure"]: # local model - default_embedding_endpoint_type = "local" - embedding_endpoint_type, embedding_endpoint, embedding_dim = None, None, None + embedding_endpoint_type, embedding_endpoint, embedding_dim, embedding_model = None, None, None, None embedding_provider = questionary.select( - "Select embedding provider:", choices=["openai", "azure", "local"], default=default_embedding_endpoint_type + "Select embedding provider:", choices=["openai", "azure", "hugging-face", "local"], default=default_embedding_endpoint_type ).ask() if embedding_provider == "openai": embedding_endpoint_type = "openai" @@ -188,11 +188,38 @@ def configure_embedding_endpoint(config: MemGPTConfig): embedding_endpoint_type = "azure" _, _, _, _, embedding_endpoint = get_azure_credentials() embedding_dim = 1536 + elif embedding_provider == "hugging-face": + # configure hugging face embedding endpoint (https://github.com/huggingface/text-embeddings-inference) + # supports custom model/endpoints + embedding_endpoint_type = "hugging-face" + embedding_endpoint = None + + # get endpoint + embedding_endpoint = questionary.text("Enter default endpoint:").ask() + if "http://" not in embedding_endpoint and "https://" not in embedding_endpoint: + typer.secho(f"Endpoint must be a valid address", fg=typer.colors.YELLOW) + embedding_endpoint = None + + # get model type + default_embedding_model = config.embedding_model if config.embedding_model else "BAAI/bge-large-en-v1.5" + embedding_model = questionary.text( + "Enter HuggingFace model tag (e.g. BAAI/bge-large-en-v1.5):", + default=default_embedding_model, + ).ask() + + # get model dimentions + default_embedding_dim = config.embedding_dim if config.embedding_dim else "1024" + embedding_dim = questionary.text("Enter embedding model dimentions (e.g. 1024):", default=str(default_embedding_dim)).ask() + try: + embedding_dim = int(embedding_dim) + except Exception as e: + raise ValueError(f"Failed to cast {embedding_dim} to integer.") else: # local models embedding_endpoint_type = "local" embedding_endpoint = None embedding_dim = 384 - return embedding_endpoint_type, embedding_endpoint, embedding_dim + + return embedding_endpoint_type, embedding_endpoint, embedding_dim, embedding_model def configure_cli(config: MemGPTConfig): @@ -253,7 +280,7 @@ def configure(): config = MemGPTConfig.load() model_endpoint_type, model_endpoint = configure_llm_endpoint(config) model, model_wrapper, context_window = configure_model(config, model_endpoint_type) - embedding_endpoint_type, embedding_endpoint, embedding_dim = configure_embedding_endpoint(config) + embedding_endpoint_type, embedding_endpoint, embedding_dim, embedding_model = configure_embedding_endpoint(config) default_preset, default_persona, default_human, default_agent = configure_cli(config) archival_storage_type, archival_storage_uri = configure_archival_storage(config) @@ -286,6 +313,7 @@ def configure(): embedding_endpoint_type=embedding_endpoint_type, embedding_endpoint=embedding_endpoint, embedding_dim=embedding_dim, + embedding_model=embedding_model, # cli configs preset=default_preset, persona=default_persona, diff --git a/memgpt/config.py b/memgpt/config.py index 34073da8..3326e040 100644 --- a/memgpt/config.py +++ b/memgpt/config.py @@ -92,6 +92,7 @@ class MemGPTConfig: # embedding parameters embedding_endpoint_type: str = "openai" # openai, azure, local embedding_endpoint: str = None + embedding_model: str = None embedding_dim: int = 1536 embedding_chunk_size: int = 300 # number of tokens @@ -153,6 +154,7 @@ class MemGPTConfig: "azure_deployment": get_field(config, "azure", "deployment"), "azure_embedding_deployment": get_field(config, "azure", "embedding_deployment"), "embedding_endpoint": get_field(config, "embedding", "embedding_endpoint"), + "embedding_model": get_field(config, "embedding", "embedding_model"), "embedding_endpoint_type": get_field(config, "embedding", "embedding_endpoint_type"), "embedding_dim": get_field(config, "embedding", "embedding_dim"), "embedding_chunk_size": get_field(config, "embedding", "chunk_size"), @@ -203,6 +205,7 @@ class MemGPTConfig: # embeddings set_field(config, "embedding", "embedding_endpoint_type", self.embedding_endpoint_type) set_field(config, "embedding", "embedding_endpoint", self.embedding_endpoint) + set_field(config, "embedding", "embedding_model", self.embedding_model) set_field(config, "embedding", "embedding_dim", str(self.embedding_dim)) set_field(config, "embedding", "embedding_chunk_size", str(self.embedding_chunk_size)) @@ -265,6 +268,7 @@ class AgentConfig: # embedding info embedding_endpoint_type=None, embedding_endpoint=None, + embedding_model=None, embedding_dim=None, embedding_chunk_size=None, # other @@ -292,6 +296,7 @@ class AgentConfig: self.model_wrapper = config.model_wrapper if model_wrapper is None else model_wrapper self.embedding_endpoint_type = config.embedding_endpoint_type if embedding_endpoint_type is None else embedding_endpoint_type self.embedding_endpoint = config.embedding_endpoint if embedding_endpoint is None else embedding_endpoint + self.embedding_model = config.embedding_model if embedding_model is None else embedding_model self.embedding_dim = config.embedding_dim if embedding_dim is None else embedding_dim self.embedding_chunk_size = config.embedding_chunk_size if embedding_chunk_size is None else embedding_chunk_size diff --git a/memgpt/embeddings.py b/memgpt/embeddings.py index d2c2e476..1445e3db 100644 --- a/memgpt/embeddings.py +++ b/memgpt/embeddings.py @@ -1,6 +1,7 @@ import typer import os from llama_index.embeddings import OpenAIEmbedding +from llama_index.embeddings import TextEmbeddingsInference def embedding_model(): @@ -24,8 +25,15 @@ def embedding_model(): api_type="azure", api_version=config.azure_version, ) + elif endpoint == "hugging-face": + embed_model = TextEmbeddingsInference( + base_url=config.embedding_endpoint, + model_name=config.embedding_model, + timeout=60, # timeout in seconds + ) + return embed_model else: - # default to hugging face model + # default to hugging face model running local from llama_index.embeddings import HuggingFaceEmbedding os.environ["TOKENIZERS_PARALLELISM"] = "False" diff --git a/memgpt/local_llm/constants.py b/memgpt/local_llm/constants.py index f310abd8..bcdf830e 100644 --- a/memgpt/local_llm/constants.py +++ b/memgpt/local_llm/constants.py @@ -7,6 +7,7 @@ DEFAULT_ENDPOINTS = { "ollama": "http://localhost:11434", "webui-legacy": "http://localhost:5000", "webui": "http://localhost:5000", + "vllm": "http://localhost:8000", } DEFAULT_OLLAMA_MODEL = "dolphin2.2-mistral:7b-q6_K" diff --git a/memgpt/memory.py b/memgpt/memory.py index d9acb26c..188e1eb1 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -708,8 +708,8 @@ class EmbeddingArchivalMemory(ArchivalMemory): query_vec = self.embed_model.get_text_embedding(query_string) self.cache[query_string] = self.storage.query(query_string, query_vec, top_k=self.top_k) - start = start if start else 0 - count = count if count else self.top_k + start = int(start if start else 0) + count = int(count if count else self.top_k) end = min(count + start, len(self.cache[query_string])) results = self.cache[query_string][start:end] diff --git a/poetry.lock b/poetry.lock index 9e2e3997..6a15aaee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -254,7 +254,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "cachetools" version = "5.3.2" description = "Extensible memoizing collections and decorators" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, @@ -497,7 +497,7 @@ vision = ["Pillow (>=6.2.1)"] name = "decorator" version = "5.1.1" description = "Decorators for Humans" -optional = false +optional = true python-versions = ">=3.5" files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, @@ -535,7 +535,7 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" -optional = false +optional = true python-versions = "*" files = [ {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, @@ -824,6 +824,62 @@ files = [ docs = ["Sphinx"] test = ["objgraph", "psutil"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.2" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] + +[[package]] +name = "httpx" +version = "0.25.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "huggingface-hub" version = "0.17.3" @@ -950,7 +1006,7 @@ files = [ name = "lancedb" version = "0.3.3" description = "lancedb" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "lancedb-0.3.3-py3-none-any.whl", hash = "sha256:67ccea22a6cb39c688041f7469be778a2e64b141db80866f6f0dec25a3122aff"}, @@ -2009,7 +2065,7 @@ files = [ name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false +optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -2020,7 +2076,7 @@ files = [ name = "pyarrow" version = "14.0.1" description = "Python library for Apache Arrow" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, @@ -2219,7 +2275,7 @@ plugins = ["importlib-metadata"] name = "pylance" version = "0.8.10" description = "python wrapper for Lance columnar format" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pylance-0.8.10-cp38-abi3-macosx_10_15_x86_64.whl", hash = "sha256:aecf053e12f13a1810a70c786c1e73bcf3ffe7287c0bfe2cc5df77a91f0a084c"}, @@ -2246,31 +2302,26 @@ python-versions = ">=3.8" files = [ {file = "PyMuPDF-1.23.6-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:c4eb71b88a22c1008f764b3121b36a9d25340f9920b870508356050a365d9ca1"}, {file = "PyMuPDF-1.23.6-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:3ce2d3678dbf822cff213b1902f2e59756313e543efd516a2b4f15bb0353bd6c"}, - {file = "PyMuPDF-1.23.6-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:2e27857a15c8a810d0b66455b8c8a79013640b6267a9b4ea808a5fe1f47711f2"}, {file = "PyMuPDF-1.23.6-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:5cd05700c8f18c9dafef63ac2ed3b1099ca06017ca0c32deea13093cea1b8671"}, {file = "PyMuPDF-1.23.6-cp310-none-win32.whl", hash = "sha256:951d280c1daafac2fd6a664b031f7f98b27eb2def55d39c92a19087bd8041c5d"}, {file = "PyMuPDF-1.23.6-cp310-none-win_amd64.whl", hash = "sha256:19d1711d5908c4527ad2deef5af2d066649f3f9a12950faf30be5f7251d18abc"}, {file = "PyMuPDF-1.23.6-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:3f0f9b76bc4f039e7587003cbd40684d93a98441549dd033cab38ca07d61988d"}, {file = "PyMuPDF-1.23.6-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:e047571d799b30459ad7ee0bc6e68900a7f6b928876f956c976f279808814e72"}, - {file = "PyMuPDF-1.23.6-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:1cbcf05c06f314fdf3042ceee674e9a0ac7fae598347d5442e2138c6046d4e82"}, {file = "PyMuPDF-1.23.6-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:e33f8ec5ba7265fe78b30332840b8f454184addfa79f9c27f160f19789aa5ffd"}, {file = "PyMuPDF-1.23.6-cp311-none-win32.whl", hash = "sha256:2c141f33e2733e48de8524dfd2de56d889feef0c7773b20a8cd216c03ab24793"}, {file = "PyMuPDF-1.23.6-cp311-none-win_amd64.whl", hash = "sha256:8fd9c4ee1dd4744a515b9190d8ba9133348b0d94c362293ed77726aa1c13b0a6"}, {file = "PyMuPDF-1.23.6-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:4d06751d5cd213e96f84f2faaa71a51cf4d641851e07579247ca1190121f173b"}, {file = "PyMuPDF-1.23.6-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:526b26a5207e923aab65877ad305644402851823a352cb92d362053426899354"}, - {file = "PyMuPDF-1.23.6-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:0f852d125defc26716878b1796f4d68870e9065041d00cf46bde317fd8d30e68"}, {file = "PyMuPDF-1.23.6-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:5bdf7020b90987412381acc42427dd1b7a03d771ee9ec273de003e570164ec1a"}, {file = "PyMuPDF-1.23.6-cp312-none-win32.whl", hash = "sha256:e2d64799c6d9a3735be9e162a5d11061c0b7fbcb1e5fc7446e0993d0f815a93a"}, {file = "PyMuPDF-1.23.6-cp312-none-win_amd64.whl", hash = "sha256:c8ea81964c1433ea163ad4b53c56053a87a9ef6e1bd7a879d4d368a3988b60d1"}, {file = "PyMuPDF-1.23.6-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:761501a4965264e81acdd8f2224f993020bf24474e9b34fcdb5805a6826eda1c"}, {file = "PyMuPDF-1.23.6-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:fd8388e82b6045807d19addf310d8119d32908e89f76cc8bbf8cf1ec36fce947"}, - {file = "PyMuPDF-1.23.6-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:4ac9673a6d6ee7e80cb242dacb43f9ca097b502d9c5e44687dbdffc2bce7961a"}, {file = "PyMuPDF-1.23.6-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:6e319c1f49476e07b9a12017c2d031687617713f8a46b7adcec03c636ed04607"}, {file = "PyMuPDF-1.23.6-cp38-none-win32.whl", hash = "sha256:1103eea4ab727e32b9cb93347b35f71562033018c333a7f3a17d115e980fea4a"}, {file = "PyMuPDF-1.23.6-cp38-none-win_amd64.whl", hash = "sha256:991a37e1cba43775ce094da87cf0bf72172a5532a09644003276bc8bfdfe9f1a"}, {file = "PyMuPDF-1.23.6-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:57725e15872f7ab67a9fb3e06e5384d1047b2121e85755c93a6d4266d3ca8983"}, {file = "PyMuPDF-1.23.6-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:224c341fe254adda97c8f06a4c5838cdbcf609fa89e70b1fb179752533378f2f"}, - {file = "PyMuPDF-1.23.6-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:271bdf6059bb8347f9c9c6b721329bd353a933681b1fc62f43241b410e7ab7ae"}, {file = "PyMuPDF-1.23.6-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:57e22bea69690450197b34dcde16bd9fe0265ac4425b4033535ccc5c044246fb"}, {file = "PyMuPDF-1.23.6-cp39-none-win32.whl", hash = "sha256:2885a26220a32fb45ea443443b72194bb7107d6862d8d546b59e4ad0c8a1f2c9"}, {file = "PyMuPDF-1.23.6-cp39-none-win_amd64.whl", hash = "sha256:361cab1be45481bd3dc4e00ec82628ebc189b4f4b6fd9bd78a00cfeed54e0034"}, @@ -2289,7 +2340,6 @@ python-versions = ">=3.8" files = [ {file = "PyMuPDFb-1.23.6-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:e5af77580aad3d1103aeec57009d156bfca429cecda14a17c573fcbe97bafb30"}, {file = "PyMuPDFb-1.23.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9925816cbe3e05e920f9be925e5752c2eef42b793885b62075bb0f6a69178598"}, - {file = "PyMuPDFb-1.23.6-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:009e2cff166059e13bf71f93919e688f46b8fc11d122433574cfb0cc9134690e"}, {file = "PyMuPDFb-1.23.6-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7132b30e6ad6ff2013344e3a481b2287fe0be3710d80694807dd6e0d8635f085"}, {file = "PyMuPDFb-1.23.6-py3-none-win32.whl", hash = "sha256:9d24ddadc204e895bee5000ddc7507c801643548e59f5a56aad6d32981d17eeb"}, {file = "PyMuPDFb-1.23.6-py3-none-win_amd64.whl", hash = "sha256:7bef75988e6979b10ca804cf9487f817aae43b0fff1c6e315b3b9ee0cf1cc32f"}, @@ -2434,7 +2484,7 @@ prompt_toolkit = ">=2.0,<=3.0.36" name = "ratelimiter" version = "1.2.0.post0" description = "Simple python rate limiting object" -optional = false +optional = true python-versions = "*" files = [ {file = "ratelimiter-1.2.0.post0-py3-none-any.whl", hash = "sha256:a52be07bc0bb0b3674b4b304550f10c769bbb00fead3072e035904474259809f"}, @@ -2566,7 +2616,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "retry" version = "0.9.2" description = "Easy to use retry decorator." -optional = false +optional = true python-versions = "*" files = [ {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, @@ -2732,7 +2782,7 @@ asn1crypto = ">=1.5.1" name = "semver" version = "3.0.2" description = "Python helper for Semantic Versioning (https://semver.org)" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, @@ -3743,7 +3793,7 @@ multidict = ">=4.0" [extras] dev = ["black", "datasets", "pre-commit", "pytest"] -lancedb = [] +lancedb = ["lancedb"] legacy = ["faiss-cpu", "numpy"] local = ["huggingface-hub", "torch", "transformers"] postgres = ["pg8000", "pgvector", "psycopg", "psycopg-binary", "psycopg2-binary"] @@ -3751,4 +3801,4 @@ postgres = ["pg8000", "pgvector", "psycopg", "psycopg-binary", "psycopg2-binary" [metadata] lock-version = "2.0" python-versions = "<3.12,>=3.9" -content-hash = "130c4da6c4b59aeb80aecf9549f75bed28123c275e30f159232e491d726034d5" +content-hash = "2aa01145484f84550b1c5422321abeb7933469b9e37debb1d16b0824b79e7269" diff --git a/pyproject.toml b/pyproject.toml index cea7750c..89ce24e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,7 @@ torch = {version = ">=2.0.0, !=2.0.1, !=2.1.0", optional = true} websockets = "^12.0" docstring-parser = "^0.15" lancedb = {version = "^0.3.3", optional = true} +httpx = "^0.25.2" [tool.poetry.extras] legacy = ["faiss-cpu", "numpy"] diff --git a/tests/utils.py b/tests/utils.py index 23fc969f..ba586f64 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -26,6 +26,7 @@ def configure_memgpt_localllm(): child.expect("Select embedding provider", timeout=TIMEOUT) child.send("\x1b[B") # Send the down arrow key child.send("\x1b[B") # Send the down arrow key + child.send("\x1b[B") # Send the down arrow key child.sendline() child.expect("Select default preset", timeout=TIMEOUT)