Skip to content

Commit f95173b

Browse files
committed
Support deepthought in research mode with new Grok 3 reasoning model
Rely on deepthought flag to control reasoning effort of low/high for the grok model This is different from the openai reasoning models which support low/medium/high and for which we use low/medium effort based on the deepthought flag Note: grok is accessible over an openai compatible API
1 parent 9c70a0f commit f95173b

File tree

2 files changed

+22
-0
lines changed

2 files changed

+22
-0
lines changed

src/khoj/processor/conversation/openai/utils.py

+17
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,9 @@ def completion_with_backoff(
6868
temperature = 1
6969
reasoning_effort = "medium" if deepthought else "low"
7070
model_kwargs["reasoning_effort"] = reasoning_effort
71+
elif is_twitter_reasoning_model(model_name, api_base_url):
72+
reasoning_effort = "high" if deepthought else "low"
73+
model_kwargs["reasoning_effort"] = reasoning_effort
7174

7275
model_kwargs["stream_options"] = {"include_usage": True}
7376
if os.getenv("KHOJ_LLM_SEED"):
@@ -181,6 +184,9 @@ def llm_thread(
181184
formatted_messages[first_system_message_index][
182185
"content"
183186
] = f"{first_system_message} Formatting re-enabled"
187+
elif is_twitter_reasoning_model(model_name, api_base_url):
188+
reasoning_effort = "high" if deepthought else "low"
189+
model_kwargs["reasoning_effort"] = reasoning_effort
184190
elif model_name.startswith("deepseek-reasoner"):
185191
# Two successive messages cannot be from the same role. Should merge any back-to-back messages from the same role.
186192
# The first message should always be a user message (except system message).
@@ -266,3 +272,14 @@ def is_openai_reasoning_model(model_name: str, api_base_url: str = None) -> bool
266272
Check if the model is an OpenAI reasoning model
267273
"""
268274
return model_name.startswith("o") and (api_base_url is None or api_base_url.startswith("https://api.openai.com/v1"))
275+
276+
277+
def is_twitter_reasoning_model(model_name: str, api_base_url: str = None) -> bool:
278+
"""
279+
Check if the model is a Twitter reasoning model
280+
"""
281+
return (
282+
model_name.startswith("grok-3-mini")
283+
and api_base_url is not None
284+
and api_base_url.startswith("https://api.x.ai/v1")
285+
)

src/khoj/utils/constants.py

+5
Original file line numberDiff line numberDiff line change
@@ -57,4 +57,9 @@
5757
"claude-3-7-sonnet-20250219": {"input": 3.0, "output": 15.0, "cache_read": 0.3, "cache_write": 3.75},
5858
"claude-3-7-sonnet@20250219": {"input": 3.0, "output": 15.0, "cache_read": 0.3, "cache_write": 3.75},
5959
"claude-3-7-sonnet-latest": {"input": 3.0, "output": 15.0, "cache_read": 0.3, "cache_write": 3.75},
60+
# Grok pricing: https://docs.x.ai/docs/models
61+
"grok-3": {"input": 3.0, "output": 15.0},
62+
"grok-3-latest": {"input": 3.0, "output": 15.0},
63+
"grok-3-mini": {"input": 0.30, "output": 0.50},
64+
"grok-3-mini-latest": {"input": 0.30, "output": 0.50},
6065
}

0 commit comments

Comments
 (0)