From 6212d7c2e8803300215e53f7e85296aa0ecb2747 Mon Sep 17 00:00:00 2001 From: Saba Date: Mon, 5 Jun 2023 19:00:25 -0700 Subject: [PATCH] Remove debug line --- src/khoj/processor/conversation/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/khoj/processor/conversation/utils.py b/src/khoj/processor/conversation/utils.py index d81ec648..9cf9952f 100644 --- a/src/khoj/processor/conversation/utils.py +++ b/src/khoj/processor/conversation/utils.py @@ -106,7 +106,6 @@ def truncate_message(messages, max_prompt_size, model_name): """Truncate messages to fit within max prompt size supported by model""" encoder = tiktoken.encoding_for_model(model_name) tokens = sum([len(encoder.encode(message.content)) for message in messages]) - logger.info(f"num tokens: {tokens}") while tokens > max_prompt_size and len(messages) > 1: messages.pop() tokens = sum([len(encoder.encode(message.content)) for message in messages])