mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-11-23 15:38:55 +01:00
4b6ec248a6
- Add a data model which allows us to store Conversations with users. This does a minimal lift over the current setup, where the underlying data is stored in a JSON file. This maintains parity with that configuration. - There does _seem_ to be some regression in chat quality, which is most likely attributable to search results. This will help us with #275. It should become much easier to maintain multiple Conversations in a given table in the backend now. We will have to do some thinking on the UI.
51 lines
1.2 KiB
Python
51 lines
1.2 KiB
Python
import factory
|
|
import os
|
|
|
|
from database.models import (
|
|
KhojUser,
|
|
ConversationProcessorConfig,
|
|
OfflineChatProcessorConversationConfig,
|
|
OpenAIProcessorConversationConfig,
|
|
Conversation,
|
|
)
|
|
|
|
|
|
class UserFactory(factory.django.DjangoModelFactory):
|
|
class Meta:
|
|
model = KhojUser
|
|
|
|
username = factory.Faker("name")
|
|
email = factory.Faker("email")
|
|
password = factory.Faker("password")
|
|
uuid = factory.Faker("uuid4")
|
|
|
|
|
|
class ConversationProcessorConfigFactory(factory.django.DjangoModelFactory):
|
|
class Meta:
|
|
model = ConversationProcessorConfig
|
|
|
|
max_prompt_size = 2000
|
|
tokenizer = None
|
|
|
|
|
|
class OfflineChatProcessorConversationConfigFactory(factory.django.DjangoModelFactory):
|
|
class Meta:
|
|
model = OfflineChatProcessorConversationConfig
|
|
|
|
enable_offline_chat = True
|
|
chat_model = "llama-2-7b-chat.ggmlv3.q4_0.bin"
|
|
|
|
|
|
class OpenAIProcessorConversationConfigFactory(factory.django.DjangoModelFactory):
|
|
class Meta:
|
|
model = OpenAIProcessorConversationConfig
|
|
|
|
api_key = os.getenv("OPENAI_API_KEY")
|
|
chat_model = "gpt-3.5-turbo"
|
|
|
|
|
|
class ConversationFactory(factory.django.DjangoModelFactory):
|
|
class Meta:
|
|
model = Conversation
|
|
|
|
user = factory.SubFactory(UserFactory)
|