mirror of
https://github.com/khoj-ai/khoj.git
synced 2025-02-17 08:04:21 +00:00
Use chat_model specified in new offline_chat section of config
- Dedupe offline_chat_model variable. Only reference offline chat model stored under offline_chat. Delete the previous chat_model field under GPT4AllProcessorConfig - Set offline chat model to use via config/offline_chat API endpoint
This commit is contained in:
parent
feb4f17e3d
commit
116595b351
3 changed files with 5 additions and 4 deletions
|
@ -288,6 +288,7 @@ if not state.demo:
|
|||
async def set_processor_enable_offline_chat_config_data(
|
||||
request: Request,
|
||||
enable_offline_chat: bool,
|
||||
offline_chat_model: Optional[str] = None,
|
||||
client: Optional[str] = None,
|
||||
):
|
||||
_initialize_config()
|
||||
|
@ -302,6 +303,8 @@ if not state.demo:
|
|||
|
||||
assert state.config.processor.conversation is not None
|
||||
state.config.processor.conversation.offline_chat.enable_offline_chat = enable_offline_chat
|
||||
if offline_chat_model is not None:
|
||||
state.config.processor.conversation.offline_chat.chat_model = offline_chat_model
|
||||
state.processor_config = configure_processor(state.config.processor, state.processor_config)
|
||||
|
||||
update_telemetry_state(
|
||||
|
|
|
@ -122,7 +122,7 @@ def generate_chat_response(
|
|||
conversation_log=meta_log,
|
||||
completion_func=partial_completion,
|
||||
conversation_command=conversation_command,
|
||||
model=state.processor_config.conversation.gpt4all_model.chat_model,
|
||||
model=state.processor_config.conversation.offline_chat.chat_model,
|
||||
)
|
||||
|
||||
elif state.processor_config.conversation.openai_model:
|
||||
|
|
|
@ -84,7 +84,6 @@ class SearchModels:
|
|||
|
||||
@dataclass
|
||||
class GPT4AllProcessorConfig:
|
||||
chat_model: Optional[str] = None
|
||||
loaded_model: Union[Any, None] = None
|
||||
|
||||
|
||||
|
@ -95,7 +94,6 @@ class ConversationProcessorConfigModel:
|
|||
):
|
||||
self.openai_model = conversation_config.openai
|
||||
self.gpt4all_model = GPT4AllProcessorConfig()
|
||||
self.gpt4all_model.chat_model = conversation_config.offline_chat_model
|
||||
self.offline_chat = conversation_config.offline_chat
|
||||
self.conversation_logfile = Path(conversation_config.conversation_logfile)
|
||||
self.chat_session: List[str] = []
|
||||
|
@ -103,7 +101,7 @@ class ConversationProcessorConfigModel:
|
|||
|
||||
if self.offline_chat.enable_offline_chat:
|
||||
try:
|
||||
self.gpt4all_model.loaded_model = download_model(self.gpt4all_model.chat_model)
|
||||
self.gpt4all_model.loaded_model = download_model(self.offline_chat.chat_model)
|
||||
except ValueError as e:
|
||||
self.offline_chat.enable_offline_chat = False
|
||||
self.gpt4all_model.loaded_model = None
|
||||
|
|
Loading…
Add table
Reference in a new issue