Stream responses by openai's o1 model series, as api now supports it

Previously o1 models did not support streaming responses via API. Now
they seem to do
This commit is contained in:
Debanjum 2024-11-19 21:07:17 -08:00
parent e5347dac8c
commit 4da0499cd7

View file

@ -56,7 +56,6 @@ def completion_with_backoff(
# Update request parameters for compatability with o1 model series
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
if model.startswith("o1"):
stream = False
temperature = 1
model_kwargs.pop("stop", None)
model_kwargs.pop("response_format", None)
@ -156,7 +155,6 @@ def llm_thread(
# Update request parameters for compatability with o1 model series
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
if model_name.startswith("o1"):
stream = False
temperature = 1
model_kwargs.pop("stop", None)
model_kwargs.pop("response_format", None)