Skip to content

Commit 0c257c0

Browse files
committed
Handle unset response_schema being passed to gemini models
1 parent c1912f8 commit 0c257c0

File tree

2 files changed

+3
-2
lines changed

2 files changed

+3
-2
lines changed

src/khoj/processor/conversation/google/gemini_chat.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,8 @@ def gemini_send_message_to_model(
143143
# This caused unwanted behavior and terminates response early for gemini 1.5 series. Monitor for flakiness with 2.0 series.
144144
if response_type == "json_object" and model in ["gemini-2.0-flash"]:
145145
model_kwargs["response_mime_type"] = "application/json"
146-
model_kwargs["response_schema"] = response_schema
146+
if response_schema:
147+
model_kwargs["response_schema"] = response_schema
147148

148149
# Get Response from Gemini
149150
return gemini_completion_with_backoff(

src/khoj/processor/conversation/google/utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def gemini_completion_with_backoff(
8989

9090
# format model response schema
9191
response_schema = None
92-
if model_kwargs and "response_schema" in model_kwargs:
92+
if model_kwargs and not is_none_or_empty(model_kwargs.get("response_schema")):
9393
response_schema = clean_response_schema(model_kwargs["response_schema"])
9494

9595
seed = int(os.getenv("KHOJ_LLM_SEED")) if os.getenv("KHOJ_LLM_SEED") else None

0 commit comments

Comments
 (0)