Make offline chat model user configurable

Only GPT4All supported Llama v2 models will work given the prompt
structure is not currently configurable
This commit is contained in:
Debanjum Singh Solanky 2023-10-04 20:39:31 -07:00
parent d1ff812021
commit a85ff941ca
2 changed files with 3 additions and 1 deletions

View file

@ -84,7 +84,7 @@ class SearchModels:
@dataclass
class GPT4AllProcessorConfig:
chat_model: Optional[str] = "llama-2-7b-chat.ggmlv3.q4_0.bin"
chat_model: Optional[str] = None
loaded_model: Union[Any, None] = None
@ -95,6 +95,7 @@ class ConversationProcessorConfigModel:
):
self.openai_model = conversation_config.openai
self.gpt4all_model = GPT4AllProcessorConfig()
self.gpt4all_model.chat_model = conversation_config.offline_chat_model
self.enable_offline_chat = conversation_config.enable_offline_chat
self.conversation_logfile = Path(conversation_config.conversation_logfile)
self.chat_session: List[str] = []

View file

@ -95,6 +95,7 @@ class ConversationProcessorConfig(ConfigBase):
conversation_logfile: Path
openai: Optional[OpenAIProcessorConfig]
enable_offline_chat: Optional[bool] = False
offline_chat_model: Optional[str] = "llama-2-7b-chat.ggmlv3.q4_0.bin"
class ProcessorConfig(ConfigBase):