Remove groq old imports
This commit is contained in:
@@ -12,7 +12,6 @@ from letta.local_llm.grammars.gbnf_grammar_generator import (
|
||||
create_dynamic_model_from_function,
|
||||
generate_gbnf_grammar_and_documentation,
|
||||
)
|
||||
from letta.local_llm.groq.api import get_groq_completion
|
||||
from letta.local_llm.koboldcpp.api import get_koboldcpp_completion
|
||||
from letta.local_llm.llamacpp.api import get_llamacpp_completion
|
||||
from letta.local_llm.llm_chat_completion_wrappers import simple_summary_wrapper
|
||||
@@ -170,8 +169,6 @@ def get_chat_completion(
|
||||
result, usage = get_ollama_completion(endpoint, auth_type, auth_key, model, prompt, context_window)
|
||||
elif endpoint_type == "vllm":
|
||||
result, usage = get_vllm_completion(endpoint, auth_type, auth_key, model, prompt, context_window, user)
|
||||
elif endpoint_type == "groq":
|
||||
result, usage = get_groq_completion(endpoint, auth_type, auth_key, model, prompt, context_window)
|
||||
else:
|
||||
raise LocalLLMError(
|
||||
f"Invalid endpoint type {endpoint_type}, please set variable depending on your backend (webui, lmstudio, llamacpp, koboldcpp)"
|
||||
|
||||
Reference in New Issue
Block a user