Update offline chat model config schema used by Emacs, Obsidian clients

The server uses a new schema for the conversation config. The Emacs,
Obsidian clients need to use this schema to update the conversation
config
This commit is contained in:
Debanjum Singh Solanky 2023-10-17 06:30:20 -07:00
parent ecc6fbfeb2
commit b8976426eb
3 changed files with 50 additions and 12 deletions

View file

@ -261,6 +261,11 @@ for example), set this to the full interpreter path."
:type 'boolean
:group 'khoj)
(defcustom khoj-offline-chat-model nil
"Specify chat model to use for offline chat with khoj."
:type 'string
:group 'khoj)
(defcustom khoj-auto-setup t
"Automate install, configure and start of khoj server.
Auto invokes setup steps on calling main entrypoint."
@ -405,7 +410,8 @@ CONFIG is json obtained from Khoj config API."
(default-index-dir (khoj--get-directory-from-config default-config '(content-type org embeddings-file)))
(default-chat-dir (khoj--get-directory-from-config default-config '(processor conversation conversation-logfile)))
(chat-model (or khoj-chat-model (alist-get 'chat-model (alist-get 'openai (alist-get 'conversation (alist-get 'processor default-config))))))
(enable-offline-chat (or khoj-chat-offline (alist-get 'enable-offline-chat (alist-get 'conversation (alist-get 'processor default-config)))))
(enable-offline-chat (or khoj-chat-offline (alist-get 'enable-offline-chat (alist-get 'offline-chat (alist-get 'conversation (alist-get 'processor default-config))))))
(offline-chat-model (or khoj-offline-chat-model (alist-get 'chat-model (alist-get 'offline-chat (alist-get 'conversation (alist-get 'processor default-config))))))
(config (or current-config default-config)))
;; Configure content types
@ -469,7 +475,8 @@ CONFIG is json obtained from Khoj config API."
(message "khoj.el: Chat not configured yet.")
(setq config (delq (assoc 'processor config) config))
(cl-pushnew `(processor . ((conversation . ((conversation-logfile . ,(format "%s/conversation.json" default-chat-dir))
(enable-offline-chat . ,enable-offline-chat)
(offline-chat . ((enable-offline-chat . ,enable-offline-chat)
(chat-model . ,offline-chat-model)))
(openai . ((chat-model . ,chat-model)
(api-key . ,khoj-openai-api-key)))))))
config))
@ -480,7 +487,8 @@ CONFIG is json obtained from Khoj config API."
(let ((new-processor-type (alist-get 'processor config)))
(setq new-processor-type (delq (assoc 'conversation new-processor-type) new-processor-type))
(cl-pushnew `(conversation . ((conversation-logfile . ,(format "%s/conversation.json" default-chat-dir))
(enable-offline-chat . ,enable-offline-chat)
(offline-chat . ((enable-offline-chat . ,enable-offline-chat)
(chat-model . ,offline-chat-model)))
(openai . ((chat-model . ,chat-model)
(api-key . ,khoj-openai-api-key)))))
new-processor-type)
@ -490,13 +498,15 @@ CONFIG is json obtained from Khoj config API."
;; Else if chat configuration in khoj backend has gone stale
((not (and (equal (alist-get 'api-key (alist-get 'openai (alist-get 'conversation (alist-get 'processor config)))) khoj-openai-api-key)
(equal (alist-get 'chat-model (alist-get 'openai (alist-get 'conversation (alist-get 'processor config)))) khoj-chat-model)
(equal (alist-get 'enable-offline-chat (alist-get 'conversation (alist-get 'processor config))) enable-offline-chat)))
(equal (alist-get 'enable-offline-chat (alist-get 'offline-chat (alist-get 'conversation (alist-get 'processor config)))) enable-offline-chat)
(equal (alist-get 'chat-model (alist-get 'offline-chat (alist-get 'conversation (alist-get 'processor config)))) offline-chat-model)))
(message "khoj.el: Chat configuration has gone stale.")
(let* ((chat-directory (khoj--get-directory-from-config config '(processor conversation conversation-logfile)))
(new-processor-type (alist-get 'processor config)))
(setq new-processor-type (delq (assoc 'conversation new-processor-type) new-processor-type))
(cl-pushnew `(conversation . ((conversation-logfile . ,(format "%s/conversation.json" chat-directory))
(enable-offline-chat . ,enable-offline-chat)
(offline-chat . ((enable-offline-chat . ,enable-offline-chat)
(chat-model . ,offline-chat-model)))
(openai . ((chat-model . ,khoj-chat-model)
(api-key . ,khoj-openai-api-key)))))
new-processor-type)

View file

@ -14,11 +14,18 @@ type OpenAIType = null | {
"api-key": string;
};
type OfflineChatType = null | {
"chat-model": string;
"enable-offline-chat": boolean;
};
interface ProcessorData {
conversation: {
"conversation-logfile": string;
openai: OpenAIType;
"enable-offline-chat": boolean;
"offline-chat": OfflineChatType;
"tokenizer": null | string;
"max-prompt-size": null | number;
};
}
@ -106,7 +113,8 @@ export async function configureKhojBackend(vault: Vault, setting: KhojSetting, n
// Get default config fields from khoj backend
let defaultConfig = await request(`${khojConfigUrl}/default`).then(response => JSON.parse(response));
let khojDefaultChatDirectory = getIndexDirectoryFromBackendConfig(defaultConfig["processor"]["conversation"]["conversation-logfile"]);
let khojDefaultChatModelName = defaultConfig["processor"]["conversation"]["openai"]["chat-model"];
let khojDefaultOpenAIChatModelName = defaultConfig["processor"]["conversation"]["openai"]["chat-model"];
let khojDefaultOfflineChatModelName = defaultConfig["processor"]["conversation"]["offline-chat"]["chat-model"];
// Get current config if khoj backend configured, else get default config from khoj backend
await request(khoj_already_configured ? khojConfigUrl : `${khojConfigUrl}/default`)
@ -117,13 +125,18 @@ export async function configureKhojBackend(vault: Vault, setting: KhojSetting, n
"conversation": {
"conversation-logfile": conversationLogFile,
"openai": null,
"enable-offline-chat": setting.enableOfflineChat,
"offline-chat": {
"chat-model": khojDefaultOfflineChatModelName,
"enable-offline-chat": setting.enableOfflineChat,
},
"tokenizer": null,
"max-prompt-size": null,
}
}
// If the Open AI API Key was configured in the plugin settings
if (!!setting.openaiApiKey) {
let openAIChatModel = data?.["processor"]?.["conversation"]?.["openai"]?.["chat-model"] ?? khojDefaultChatModelName;
let openAIChatModel = data?.["processor"]?.["conversation"]?.["openai"]?.["chat-model"] ?? khojDefaultOpenAIChatModelName;
processorData = {
"conversation": {
"conversation-logfile": conversationLogFile,
@ -131,7 +144,12 @@ export async function configureKhojBackend(vault: Vault, setting: KhojSetting, n
"chat-model": openAIChatModel,
"api-key": setting.openaiApiKey,
},
"enable-offline-chat": setting.enableOfflineChat,
"offline-chat": {
"chat-model": khojDefaultOfflineChatModelName,
"enable-offline-chat": setting.enableOfflineChat,
},
"tokenizer": null,
"max-prompt-size": null,
},
}
}

View file

@ -53,7 +53,12 @@ empty_config = {
"api-key": None,
"chat-model": "gpt-3.5-turbo",
},
"enable-offline-chat": False,
"offline-chat": {
"enable-offline-chat": False,
"chat-model": "llama-2-7b-chat.ggmlv3.q4_0.bin",
},
"tokenizer": None,
"max-prompt-size": None,
"conversation-logfile": "~/.khoj/processor/conversation/conversation_logs.json",
}
},
@ -125,7 +130,12 @@ default_config = {
"api-key": None,
"chat-model": "gpt-3.5-turbo",
},
"enable-offline-chat": False,
"offline-chat": {
"enable-offline-chat": False,
"chat-model": "llama-2-7b-chat.ggmlv3.q4_0.bin",
},
"tokenizer": None,
"max-prompt-size": None,
"conversation-logfile": "~/.khoj/processor/conversation/conversation_logs.json",
}
},