mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-11-23 15:38:55 +01:00
Working API request body parsing to /post config!
This commit is contained in:
parent
64645c3ac1
commit
66183cc298
5 changed files with 10 additions and 16 deletions
19
src/main.py
19
src/main.py
|
@ -2,11 +2,10 @@
|
|||
import sys
|
||||
import json
|
||||
from typing import Optional
|
||||
from src import search_type
|
||||
|
||||
# External Packages
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi import FastAPI, Request, Body
|
||||
from fastapi.responses import HTMLResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
@ -17,6 +16,7 @@ from src.search_type import asymmetric, symmetric_ledger, image_search
|
|||
from src.utils.helpers import get_absolute_path
|
||||
from src.utils.cli import cli
|
||||
from src.utils.config import SearchType, SearchModels, TextSearchConfig, ImageSearchConfig, SearchConfig, ProcessorConfig, ConversationProcessorConfig
|
||||
from src.utils.rawconfig import FullConfig
|
||||
from src.processor.conversation.gpt import converse, message_to_prompt
|
||||
|
||||
# Application Global State
|
||||
|
@ -26,14 +26,6 @@ processor_config = ProcessorConfig()
|
|||
config = {}
|
||||
app = FastAPI()
|
||||
|
||||
class Config(BaseModel):
|
||||
content_type: Optional[SearchConfig]
|
||||
search_type: Optional[SearchModels]
|
||||
processor: Optional[ProcessorConfig]
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
app.mount("/views", StaticFiles(directory="views"), name="views")
|
||||
templates = Jinja2Templates(directory="views/")
|
||||
|
||||
|
@ -41,15 +33,14 @@ templates = Jinja2Templates(directory="views/")
|
|||
def ui(request: Request):
|
||||
return templates.TemplateResponse("config.html", context={'request': request})
|
||||
|
||||
@app.get('/config')
|
||||
@app.get('/config', response_model=FullConfig)
|
||||
def config():
|
||||
return config
|
||||
|
||||
@app.post('/config')
|
||||
async def config(updated_config: Config):
|
||||
async def config(updated_config: FullConfig):
|
||||
print(updated_config)
|
||||
data = await updated_config.json()
|
||||
return data
|
||||
return updated_config
|
||||
|
||||
@app.get('/search')
|
||||
def search(q: str, n: Optional[int] = 5, t: Optional[SearchType] = None):
|
||||
|
|
|
@ -19,7 +19,7 @@ from src.utils.config import TextSearchModel, TextSearchConfig
|
|||
|
||||
|
||||
def initialize_model():
|
||||
"Initialize model for symetric semantic search. That is, where query of similar size to results"
|
||||
"Initialize model for symmetric semantic search. That is, where query of similar size to results"
|
||||
torch.set_num_threads(4)
|
||||
bi_encoder = SentenceTransformer('sentence-transformers/paraphrase-MiniLM-L6-v2') # The encoder encodes all entries to use for semantic search
|
||||
top_k = 30 # Number of entries we want to retrieve with the bi-encoder
|
||||
|
|
|
@ -116,4 +116,4 @@ class ConversationProcessorConfig():
|
|||
|
||||
@dataclass
|
||||
class ProcessorConfig():
|
||||
conversation: ConversationProcessorConfig = None
|
||||
conversation: ConversationProcessorConfig = None
|
||||
|
|
|
@ -4,6 +4,8 @@ import pathlib
|
|||
def is_none_or_empty(item):
|
||||
return item == None or (hasattr(item, '__iter__') and len(item) == 0)
|
||||
|
||||
def to_snake_case_from_dash(item: str):
|
||||
return item.replace('_', '-')
|
||||
|
||||
def get_absolute_path(filepath):
|
||||
return str(pathlib.Path(filepath).expanduser().absolute())
|
||||
|
|
|
@ -19,6 +19,7 @@ fetch("/config")
|
|||
|
||||
configForm.addEventListener("submit", (event) => {
|
||||
event.preventDefault();
|
||||
console.log(rawConfig);
|
||||
const response = fetch("/config", {
|
||||
method: "POST",
|
||||
credentials: "same-origin",
|
||||
|
|
Loading…
Reference in a new issue