fix: require function declarations to be present for setting gemini tool config

Co-authored-by: Jin Peng <jinjpeng@Jins-MacBook-Pro.local>
This commit is contained in:
jnjpng
2025-08-19 12:56:31 -07:00
committed by GitHub
parent 9ec8473404
commit fb474c4ac9
2 changed files with 2 additions and 5 deletions

View File

@@ -239,7 +239,7 @@ class GoogleVertexClient(LLMClientBase):
request_data["config"]["response_mime_type"] = "application/json"
request_data["config"]["response_schema"] = self.get_function_call_response_schema(tools[0])
del request_data["config"]["tools"]
else:
elif tools:
tool_config = ToolConfig(
function_calling_config=FunctionCallingConfig(
# ANY mode forces the model to predict only function calls

View File

@@ -348,12 +348,9 @@ async def simple_summary(messages: List[Message], llm_config: LLMConfig, actor:
{"role": "system", "content": system_prompt},
{"role": "user", "content": summary_transcript},
]
print("messages going to summarizer:", input_messages)
input_messages_obj = [simple_message_wrapper(msg) for msg in input_messages]
print("messages going to summarizer (objs):", input_messages_obj)
request_data = llm_client.build_request_data(input_messages_obj, llm_config, tools=[])
print("request data:", request_data)
# NOTE: we should disable the inner_thoughts_in_kwargs here, because we don't use it
# I'm leaving it commented it out for now for safety but is fine assuming the var here is a copy not a reference
# llm_config.put_inner_thoughts_in_kwargs = False