Fix passing model_name param to chatml formatter for online chat

This commit is contained in:
Debanjum Singh Solanky 2024-04-03 15:53:35 +05:30
parent 6aa88761b8
commit f915b2bd14

View file

@ -163,9 +163,9 @@ def converse(
conversation_primer,
system_prompt,
conversation_log,
model,
max_prompt_size,
tokenizer_name,
model_name=model,
max_prompt_size=max_prompt_size,
tokenizer_name=tokenizer_name,
)
truncated_messages = "\n".join({f"{message.content[:70]}..." for message in messages})
logger.debug(f"Conversation Context for GPT: {truncated_messages}")