Set default temperature to 0.7 in the extract_questions method

This commit is contained in:
sabaimran 2024-09-10 21:09:21 -07:00
parent 8d40fc0aef
commit 07e2c49a7a

View file

@ -51,6 +51,7 @@ def extract_questions(
today = datetime.today()
current_new_year = today.replace(month=1, day=1)
last_new_year = current_new_year.replace(year=today.year - 1)
temperature = 0.7
prompt = prompts.extract_questions.format(
current_date=today.strftime("%Y-%m-%d"),
@ -77,7 +78,9 @@ def extract_questions(
messages = [ChatMessage(content=prompt, role="user")]
response = send_message_to_model(messages, api_key, model, response_type="json_object", api_base_url=api_base_url)
response = send_message_to_model(
messages, api_key, model, response_type="json_object", api_base_url=api_base_url, temperature=temperature
)
# Extract, Clean Message from GPT's Response
try:
@ -96,7 +99,7 @@ def extract_questions(
return questions
def send_message_to_model(messages, api_key, model, response_type="text", api_base_url=None):
def send_message_to_model(messages, api_key, model, response_type="text", api_base_url=None, temperature=0):
"""
Send message to model
"""
@ -106,6 +109,7 @@ def send_message_to_model(messages, api_key, model, response_type="text", api_ba
messages=messages,
model=model,
openai_api_key=api_key,
temperature=temperature,
api_base_url=api_base_url,
model_kwargs={"response_format": {"type": response_type}},
)