Add beta API to summarize top search result using an OpenAI model

This is unlike the more general chat API that combines summarization
of top search result and conversing with the OpenAI model

This should give faster summary results. As no intent categorization
API call required
This commit is contained in:
Debanjum Singh Solanky 2023-01-09 01:14:49 -03:00
parent d36da46f7b
commit 7b164de021

View file

@ -33,6 +33,21 @@ def search_beta(q: str, n: Optional[int] = 1):
return {'status': 'ok', 'result': search_results, 'type': search_type} return {'status': 'ok', 'result': search_results, 'type': search_type}
@api_beta.get('/summarize')
def summarize_beta(q: str):
# Initialize Variables
model = state.processor_config.conversation.model
api_key = state.processor_config.conversation.openai_api_key
# Converse with OpenAI GPT
result_list = search(q, n=1, t=SearchType.Org, r=True)
collated_result = "\n".join([item.entry for item in result_list])
logger.debug(f'Semantically Similar Notes:\n{collated_result}')
gpt_response = summarize(collated_result, summary_type="notes", user_query=q, model=model, api_key=api_key)
return {'status': 'ok', 'response': gpt_response}
@api_beta.get('/chat') @api_beta.get('/chat')
def chat(q: str): def chat(q: str):
# Load Conversation History # Load Conversation History