2021-08-17 12:59:58 +02:00
|
|
|
# Standard Packages
|
2021-11-28 18:05:04 +01:00
|
|
|
import sys, json, yaml
|
2021-08-16 02:50:08 +02:00
|
|
|
from typing import Optional
|
2021-08-17 12:59:58 +02:00
|
|
|
|
|
|
|
# External Packages
|
|
|
|
import uvicorn
|
2021-11-28 18:05:04 +01:00
|
|
|
from fastapi import FastAPI, Request
|
2021-11-26 20:51:11 +01:00
|
|
|
from fastapi.responses import HTMLResponse
|
2021-11-27 16:49:33 +01:00
|
|
|
from fastapi.staticfiles import StaticFiles
|
2021-11-26 20:51:11 +01:00
|
|
|
from fastapi.templating import Jinja2Templates
|
2021-08-17 12:59:58 +02:00
|
|
|
|
|
|
|
# Internal Packages
|
2021-09-30 13:12:14 +02:00
|
|
|
from src.search_type import asymmetric, symmetric_ledger, image_search
|
2021-11-26 20:57:46 +01:00
|
|
|
from src.utils.helpers import get_absolute_path
|
2021-09-30 13:12:14 +02:00
|
|
|
from src.utils.cli import cli
|
2021-12-04 17:43:48 +01:00
|
|
|
from src.utils.config import SearchType, SearchModels, ProcessorConfig, ConversationProcessorConfigDTO
|
|
|
|
from src.utils.rawconfig import FullConfigModel
|
2021-11-27 19:34:39 +01:00
|
|
|
from src.processor.conversation.gpt import converse, message_to_log, message_to_prompt, understand
|
2021-08-17 12:59:58 +02:00
|
|
|
|
2021-09-30 05:24:27 +02:00
|
|
|
# Application Global State
|
|
|
|
model = SearchModels()
|
2021-11-26 20:57:46 +01:00
|
|
|
processor_config = ProcessorConfig()
|
2021-11-27 16:49:33 +01:00
|
|
|
config = {}
|
2021-11-28 18:34:40 +01:00
|
|
|
config_file = ""
|
2021-12-04 17:43:48 +01:00
|
|
|
verbose = 0
|
2021-08-16 02:50:08 +02:00
|
|
|
app = FastAPI()
|
|
|
|
|
2021-11-27 16:49:33 +01:00
|
|
|
app.mount("/views", StaticFiles(directory="views"), name="views")
|
2021-11-26 20:51:11 +01:00
|
|
|
templates = Jinja2Templates(directory="views/")
|
|
|
|
|
|
|
|
@app.get('/ui', response_class=HTMLResponse)
|
|
|
|
def ui(request: Request):
|
|
|
|
return templates.TemplateResponse("config.html", context={'request': request})
|
2021-08-16 02:50:08 +02:00
|
|
|
|
2021-12-04 17:43:48 +01:00
|
|
|
@app.get('/config', response_model=FullConfigModel)
|
2021-11-27 16:49:33 +01:00
|
|
|
def config():
|
2021-11-28 01:17:15 +01:00
|
|
|
return config
|
|
|
|
|
|
|
|
@app.post('/config')
|
2021-12-04 17:43:48 +01:00
|
|
|
async def config(updated_config: FullConfigModel):
|
2021-11-28 19:28:22 +01:00
|
|
|
global config
|
|
|
|
config = updated_config
|
2021-11-28 18:34:40 +01:00
|
|
|
with open(config_file, 'w') as outfile:
|
2021-11-28 19:28:22 +01:00
|
|
|
yaml.dump(yaml.safe_load(config.json(by_alias=True)), outfile)
|
2021-11-28 18:26:07 +01:00
|
|
|
outfile.close()
|
2021-11-28 19:28:22 +01:00
|
|
|
return config
|
2021-11-27 16:49:33 +01:00
|
|
|
|
2021-08-16 02:50:08 +02:00
|
|
|
@app.get('/search')
|
2021-09-30 04:02:55 +02:00
|
|
|
def search(q: str, n: Optional[int] = 5, t: Optional[SearchType] = None):
|
2021-08-16 02:50:08 +02:00
|
|
|
if q is None or q == '':
|
|
|
|
print(f'No query param (q) passed in API call to initiate search')
|
|
|
|
return {}
|
|
|
|
|
|
|
|
user_query = q
|
|
|
|
results_count = n
|
|
|
|
|
2021-09-30 11:04:04 +02:00
|
|
|
if (t == SearchType.Notes or t == None) and model.notes_search:
|
2021-08-16 02:50:08 +02:00
|
|
|
# query notes
|
2021-09-30 11:04:04 +02:00
|
|
|
hits = asymmetric.query(user_query, model.notes_search)
|
2021-08-16 02:50:08 +02:00
|
|
|
|
|
|
|
# collate and return results
|
2021-09-30 05:24:27 +02:00
|
|
|
return asymmetric.collate_results(hits, model.notes_search.entries, results_count)
|
2021-08-16 02:50:08 +02:00
|
|
|
|
2021-09-30 11:04:04 +02:00
|
|
|
if (t == SearchType.Music or t == None) and model.music_search:
|
2021-08-29 12:07:36 +02:00
|
|
|
# query music library
|
2021-09-30 11:04:04 +02:00
|
|
|
hits = asymmetric.query(user_query, model.music_search)
|
2021-08-29 12:07:36 +02:00
|
|
|
|
|
|
|
# collate and return results
|
2021-09-30 06:09:42 +02:00
|
|
|
return asymmetric.collate_results(hits, model.music_search.entries, results_count)
|
2021-08-29 12:07:36 +02:00
|
|
|
|
2021-09-30 11:04:04 +02:00
|
|
|
if (t == SearchType.Ledger or t == None) and model.ledger_search:
|
2021-08-22 12:16:57 +02:00
|
|
|
# query transactions
|
2021-09-30 11:04:04 +02:00
|
|
|
hits = symmetric_ledger.query(user_query, model.ledger_search)
|
2021-08-22 12:16:57 +02:00
|
|
|
|
|
|
|
# collate and return results
|
2021-09-30 06:09:42 +02:00
|
|
|
return symmetric_ledger.collate_results(hits, model.ledger_search.entries, results_count)
|
2021-08-22 12:16:57 +02:00
|
|
|
|
2021-09-30 11:04:04 +02:00
|
|
|
if (t == SearchType.Image or t == None) and model.image_search:
|
2021-08-23 06:00:54 +02:00
|
|
|
# query transactions
|
2021-09-30 11:04:04 +02:00
|
|
|
hits = image_search.query(user_query, results_count, model.image_search)
|
2021-08-23 06:00:54 +02:00
|
|
|
|
|
|
|
# collate and return results
|
|
|
|
return image_search.collate_results(
|
|
|
|
hits,
|
2021-09-30 06:09:42 +02:00
|
|
|
model.image_search.image_names,
|
2021-12-04 17:43:48 +01:00
|
|
|
config.content_type.image.input_directory,
|
2021-08-23 06:00:54 +02:00
|
|
|
results_count)
|
|
|
|
|
2021-08-16 02:50:08 +02:00
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2021-08-17 03:52:38 +02:00
|
|
|
@app.get('/regenerate')
|
2021-09-30 04:02:55 +02:00
|
|
|
def regenerate(t: Optional[SearchType] = None):
|
2021-12-04 17:43:48 +01:00
|
|
|
initialize_search(regenerate=True)
|
2021-08-17 08:47:33 +02:00
|
|
|
return {'status': 'ok', 'message': 'regeneration completed'}
|
2021-08-17 03:52:38 +02:00
|
|
|
|
|
|
|
|
2021-11-26 20:57:46 +01:00
|
|
|
@app.get('/chat')
|
|
|
|
def chat(q: str):
|
|
|
|
# Load Conversation History
|
2021-11-27 19:34:39 +01:00
|
|
|
chat_log = processor_config.conversation.chat_log
|
|
|
|
meta_log = processor_config.conversation.meta_log
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
# Converse with OpenAI GPT
|
2021-11-27 19:34:39 +01:00
|
|
|
user_message_metadata = understand(q, api_key=processor_config.conversation.openai_api_key)
|
|
|
|
gpt_response = converse(q, chat_log, api_key=processor_config.conversation.openai_api_key)
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
# Update Conversation History
|
2021-11-27 19:34:39 +01:00
|
|
|
processor_config.conversation.chat_log = message_to_prompt(q, chat_log, gpt_message=gpt_response)
|
|
|
|
processor_config.conversation.meta_log= message_to_log(q, user_message_metadata, gpt_response, meta_log)
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
return {'status': 'ok', 'response': gpt_response}
|
|
|
|
|
|
|
|
|
2021-12-04 17:43:48 +01:00
|
|
|
def initialize_search(regenerate: bool, t: SearchType = None):
|
2021-09-30 11:04:04 +02:00
|
|
|
model = SearchModels()
|
2021-08-22 03:47:55 +02:00
|
|
|
|
2021-08-22 12:16:57 +02:00
|
|
|
# Initialize Org Notes Search
|
2021-12-04 17:43:48 +01:00
|
|
|
if (t == SearchType.Notes or t == None) and config.content_type.org:
|
|
|
|
# Extract Entries, Generate Notes Embeddings
|
|
|
|
model.notes_search = asymmetric.setup(config.content_type.org, regenerate=regenerate, verbose=verbose)
|
2021-08-16 02:50:08 +02:00
|
|
|
|
2021-08-29 12:07:36 +02:00
|
|
|
# Initialize Org Music Search
|
2021-12-04 17:43:48 +01:00
|
|
|
if (t == SearchType.Music or t == None) and config.content_type.music:
|
|
|
|
# Extract Entries, Generate Music Embeddings
|
|
|
|
model.music_search = asymmetric.setup(config.content_type.music, regenerate=regenerate, verbose=verbose)
|
2021-08-29 12:07:36 +02:00
|
|
|
|
2021-08-22 12:16:57 +02:00
|
|
|
# Initialize Ledger Search
|
2021-12-04 17:43:48 +01:00
|
|
|
if (t == SearchType.Ledger or t == None) and config.content_type.ledger:
|
|
|
|
# Extract Entries, Generate Ledger Embeddings
|
|
|
|
model.ledger_search = symmetric_ledger.setup(config.content_type.ledger, regenerate=regenerate, verbose=verbose)
|
2021-08-16 02:50:08 +02:00
|
|
|
|
2021-08-23 06:00:54 +02:00
|
|
|
# Initialize Image Search
|
2021-12-04 17:43:48 +01:00
|
|
|
if (t == SearchType.Image or t == None) and config.content_type.image:
|
|
|
|
# Extract Entries, Generate Image Embeddings
|
|
|
|
model.image_search = image_search.setup(config.content_type.image, regenerate=regenerate, verbose=verbose)
|
2021-09-30 11:04:04 +02:00
|
|
|
|
2021-12-04 17:43:48 +01:00
|
|
|
return model
|
2021-09-30 11:04:04 +02:00
|
|
|
|
|
|
|
|
2021-12-04 17:43:48 +01:00
|
|
|
def initialize_processor():
|
2021-12-04 16:11:00 +01:00
|
|
|
if not config.processor:
|
|
|
|
return
|
|
|
|
|
2021-11-26 20:57:46 +01:00
|
|
|
processor_config = ProcessorConfig()
|
|
|
|
|
|
|
|
# Initialize Conversation Processor
|
2021-12-04 17:43:48 +01:00
|
|
|
processor_config.conversation = ConversationProcessorConfigDTO(config.processor.conversation, verbose)
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
conversation_logfile = processor_config.conversation.conversation_logfile
|
|
|
|
if processor_config.conversation.verbose:
|
2021-11-27 19:34:39 +01:00
|
|
|
print('INFO:\tLoading conversation logs from disk...')
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
if conversation_logfile.expanduser().absolute().is_file():
|
2021-11-27 19:34:39 +01:00
|
|
|
# Load Metadata Logs from Conversation Logfile
|
2021-11-26 20:57:46 +01:00
|
|
|
with open(get_absolute_path(conversation_logfile), 'r') as f:
|
2021-11-27 19:34:39 +01:00
|
|
|
processor_config.conversation.meta_log = json.load(f)
|
|
|
|
|
|
|
|
# Extract Chat Logs from Metadata
|
|
|
|
processor_config.conversation.chat_log = ''.join(
|
|
|
|
[f'\n{item["by"]}: {item["message"]}'
|
|
|
|
for item
|
|
|
|
in processor_config.conversation.meta_log])
|
|
|
|
|
|
|
|
print('INFO:\tConversation logs loaded from disk.')
|
2021-11-26 20:57:46 +01:00
|
|
|
else:
|
2021-11-27 19:34:39 +01:00
|
|
|
# Initialize Conversation Logs
|
|
|
|
processor_config.conversation.meta_log = []
|
|
|
|
processor_config.conversation.chat_log = ""
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
return processor_config
|
|
|
|
|
|
|
|
|
|
|
|
@app.on_event('shutdown')
|
|
|
|
def shutdown_event():
|
2021-11-27 19:34:39 +01:00
|
|
|
# No need to create empty log file
|
|
|
|
if not processor_config.conversation.meta_log:
|
|
|
|
return
|
|
|
|
elif processor_config.conversation.verbose:
|
|
|
|
print('INFO:\tSaving conversation logs to disk...')
|
2021-11-26 20:57:46 +01:00
|
|
|
|
2021-11-27 19:34:39 +01:00
|
|
|
# Save Conversation Metadata Logs to Disk
|
2021-11-26 20:57:46 +01:00
|
|
|
conversation_logfile = get_absolute_path(processor_config.conversation.conversation_logfile)
|
|
|
|
with open(conversation_logfile, "w+", encoding='utf-8') as logfile:
|
2021-11-27 19:34:39 +01:00
|
|
|
json.dump(processor_config.conversation.meta_log, logfile)
|
2021-11-26 20:57:46 +01:00
|
|
|
|
2021-11-27 19:34:39 +01:00
|
|
|
print('INFO:\tConversation logs saved to disk.')
|
2021-11-26 20:57:46 +01:00
|
|
|
|
|
|
|
|
2021-09-30 11:04:04 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
# Load config from CLI
|
|
|
|
args = cli(sys.argv[1:])
|
2021-11-28 18:34:40 +01:00
|
|
|
|
|
|
|
# Stores the file path to the config file.
|
|
|
|
config_file = args.config_file
|
2021-09-30 11:04:04 +02:00
|
|
|
|
2021-12-04 17:43:48 +01:00
|
|
|
# Store the verbose flag
|
|
|
|
verbose = args.verbose
|
|
|
|
|
2021-11-28 18:34:40 +01:00
|
|
|
# Store the raw config data.
|
2021-11-27 16:49:33 +01:00
|
|
|
config = args.config
|
|
|
|
|
2021-12-04 17:43:48 +01:00
|
|
|
# Initialize the search model from Config
|
|
|
|
model = initialize_search(args.regenerate)
|
2021-08-23 06:00:54 +02:00
|
|
|
|
2021-11-26 20:57:46 +01:00
|
|
|
# Initialize Processor from Config
|
2021-12-04 17:43:48 +01:00
|
|
|
processor_config = initialize_processor()
|
2021-11-26 20:57:46 +01:00
|
|
|
|
2021-08-16 02:50:08 +02:00
|
|
|
# Start Application Server
|
2021-10-03 01:16:33 +02:00
|
|
|
if args.socket:
|
|
|
|
uvicorn.run(app, proxy_headers=True, uds=args.socket)
|
|
|
|
else:
|
|
|
|
uvicorn.run(app, host=args.host, port=args.port)
|