mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-11-23 15:38:55 +01:00
Stream responses by openai's o1 model series, as api now supports it
Previously o1 models did not support streaming responses via API. Now they seem to do
This commit is contained in:
parent
e5347dac8c
commit
4da0499cd7
1 changed files with 0 additions and 2 deletions
|
@ -56,7 +56,6 @@ def completion_with_backoff(
|
||||||
# Update request parameters for compatability with o1 model series
|
# Update request parameters for compatability with o1 model series
|
||||||
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
|
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
|
||||||
if model.startswith("o1"):
|
if model.startswith("o1"):
|
||||||
stream = False
|
|
||||||
temperature = 1
|
temperature = 1
|
||||||
model_kwargs.pop("stop", None)
|
model_kwargs.pop("stop", None)
|
||||||
model_kwargs.pop("response_format", None)
|
model_kwargs.pop("response_format", None)
|
||||||
|
@ -156,7 +155,6 @@ def llm_thread(
|
||||||
# Update request parameters for compatability with o1 model series
|
# Update request parameters for compatability with o1 model series
|
||||||
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
|
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
|
||||||
if model_name.startswith("o1"):
|
if model_name.startswith("o1"):
|
||||||
stream = False
|
|
||||||
temperature = 1
|
temperature = 1
|
||||||
model_kwargs.pop("stop", None)
|
model_kwargs.pop("stop", None)
|
||||||
model_kwargs.pop("response_format", None)
|
model_kwargs.pop("response_format", None)
|
||||||
|
|
Loading…
Reference in a new issue