Rename additional context to additional_context_for_llm_response

This commit is contained in:
sabaimran 2024-12-03 21:23:15 -08:00
parent c87fce5930
commit 3552032827
10 changed files with 26 additions and 33 deletions

View file

@ -541,12 +541,10 @@ export class KhojChatView extends KhojPaneView {
imageMarkdown += `${message}`;
}
if (!images || images.length === 0) {
if (inferredQueries) {
imageMarkdown += "\n\n**Inferred Query**:";
for (let inferredQuery of inferredQueries) {
imageMarkdown += `\n\n${inferredQuery}`;
}
if (images?.length === 0 && inferredQueries) {
imageMarkdown += "\n\n**Inferred Query**:";
for (let inferredQuery of inferredQueries) {
imageMarkdown += `\n\n${inferredQuery}`;
}
}
return imageMarkdown;

View file

@ -397,11 +397,6 @@ const ChatMessage = forwardRef<HTMLDivElement, ChatMessageProps>((props, ref) =>
// Prepare initial message for rendering
let message = props.chatMessage.message;
if (props.chatMessage.intent && props.chatMessage.intent.type == "excalidraw") {
message = props.chatMessage.intent["inferred-queries"][0];
setExcalidrawData(props.chatMessage.message);
}
if (props.chatMessage.excalidrawDiagram) {
setExcalidrawData(props.chatMessage.excalidrawDiagram);
}

View file

@ -65,12 +65,12 @@ class PeopleAlsoAsk(PydanticBaseModel):
class KnowledgeGraph(PydanticBaseModel):
attributes: Dict[str, str]
description: str
descriptionLink: str
descriptionSource: str
imageUrl: str
description: Optional[str] = None
descriptionLink: Optional[str] = None
descriptionSource: Optional[str] = None
imageUrl: Optional[str] = None
title: str
type: str
type: Optional[str] = None
class OrganicContext(PydanticBaseModel):

View file

@ -160,7 +160,7 @@ def converse_anthropic(
generated_images: Optional[list[str]] = None,
generated_files: List[FileAttachment] = None,
generated_excalidraw_diagram: Optional[str] = None,
additional_context: Optional[List[str]] = None,
additional_context_for_llm_response: Optional[List[str]] = None,
tracer: dict = {},
):
"""
@ -224,7 +224,7 @@ def converse_anthropic(
generated_excalidraw_diagram=generated_excalidraw_diagram,
generated_files=generated_files,
generated_images=generated_images,
additional_program_context=additional_context,
additional_context_for_llm_response=additional_context_for_llm_response,
)
messages, system_prompt = format_messages_for_anthropic(messages, system_prompt)

View file

@ -170,7 +170,7 @@ def converse_gemini(
generated_images: Optional[list[str]] = None,
generated_files: List[FileAttachment] = None,
generated_excalidraw_diagram: Optional[str] = None,
additional_context: List[str] = None,
additional_context_for_llm_response: List[str] = None,
tracer={},
):
"""
@ -235,7 +235,7 @@ def converse_gemini(
generated_excalidraw_diagram=generated_excalidraw_diagram,
generated_files=generated_files,
generated_images=generated_images,
additional_program_context=additional_context,
additional_context_for_llm_response=additional_context_for_llm_response,
)
messages, system_prompt = format_messages_for_gemini(messages, system_prompt)

View file

@ -234,7 +234,7 @@ def converse_offline(
model_type=ChatModelOptions.ModelType.OFFLINE,
query_files=query_files,
generated_files=generated_files,
additional_program_context=additional_context,
additional_context_for_llm_response=additional_context,
)
logger.debug(f"Conversation Context for {model}: {messages_to_print(messages)}")

View file

@ -160,7 +160,7 @@ def converse(
generated_images: Optional[list[str]] = None,
generated_files: List[FileAttachment] = None,
generated_excalidraw_diagram: Optional[str] = None,
additional_context: List[str] = None,
additional_context_for_llm_response: List[str] = None,
tracer: dict = {},
):
"""
@ -226,7 +226,7 @@ def converse(
generated_excalidraw_diagram=generated_excalidraw_diagram,
generated_files=generated_files,
generated_images=generated_images,
additional_program_context=additional_context,
additional_context_for_llm_response=additional_context_for_llm_response,
)
logger.debug(f"Conversation Context for GPT: {messages_to_print(messages)}")

View file

@ -186,9 +186,7 @@ Here is the image you generated based on my query. You can follow-up with a gene
generated_diagram_attachment = PromptTemplate.from_template(
f"""
The AI has successfully created a diagram based on the user's query and handled the request. Good job! This will be shared with the user.
AI can follow-up with a general response or summary. Limit to 1-2 sentences.
I've successfully created a diagram based on the user's query. The diagram will automatically be shared with the user. I can follow-up with a general response or summary. Limit to 1-2 sentences.
""".strip()
)

View file

@ -383,7 +383,7 @@ def generate_chatml_messages_with_context(
generated_images: Optional[list[str]] = None,
generated_files: List[FileAttachment] = None,
generated_excalidraw_diagram: str = None,
additional_program_context: List[str] = [],
additional_context_for_llm_response: List[str] = [],
):
"""Generate chat messages with appropriate context from previous conversation to send to the chat model"""
# Set max prompt size from user config or based on pre-configured for model and machine specs
@ -484,10 +484,12 @@ def generate_chatml_messages_with_context(
if generated_excalidraw_diagram:
messages.append(ChatMessage(content=prompts.generated_diagram_attachment.format(), role="assistant"))
if additional_program_context:
if additional_context_for_llm_response:
messages.append(
ChatMessage(
content=prompts.additional_program_context.format(context="\n".join(additional_program_context)),
content=prompts.additional_program_context.format(
context="\n".join(additional_context_for_llm_response)
),
role="assistant",
)
)

View file

@ -1188,7 +1188,7 @@ def generate_chat_response(
generated_images: List[str] = None,
raw_generated_files: List[FileAttachment] = [],
generated_excalidraw_diagram: str = None,
additional_context: List[str] = [],
additional_context_for_llm_response: List[str] = [],
tracer: dict = {},
) -> Tuple[Union[ThreadedGenerator, Iterator[str]], Dict[str, str]]:
# Initialize Variables
@ -1280,7 +1280,7 @@ def generate_chat_response(
generated_files=raw_generated_files,
generated_images=generated_images,
generated_excalidraw_diagram=generated_excalidraw_diagram,
additional_context=additional_context,
additional_context_for_llm_response=additional_context_for_llm_response,
tracer=tracer,
)
@ -1307,7 +1307,7 @@ def generate_chat_response(
generated_files=raw_generated_files,
generated_images=generated_images,
generated_excalidraw_diagram=generated_excalidraw_diagram,
additional_context=additional_context,
additional_context_for_llm_response=additional_context_for_llm_response,
tracer=tracer,
)
elif conversation_config.model_type == ChatModelOptions.ModelType.GOOGLE:
@ -1333,7 +1333,7 @@ def generate_chat_response(
generated_files=raw_generated_files,
generated_images=generated_images,
generated_excalidraw_diagram=generated_excalidraw_diagram,
additional_context=additional_context,
additional_context_for_llm_response=additional_context_for_llm_response,
tracer=tracer,
)