mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-12-18 18:47:11 +00:00
Only auto load available chat models from Ollama provider for now
Allowing models from any openai proxy service makes it too unwieldy. And a bunch of them do not even support this endpoint.
This commit is contained in:
parent
2c934162d3
commit
3fd8614a4b
1 changed files with 4 additions and 0 deletions
|
@ -235,6 +235,10 @@ def initialization(interactive: bool = True):
|
||||||
# Get OpenAI configs with custom base URLs
|
# Get OpenAI configs with custom base URLs
|
||||||
custom_configs = AiModelApi.objects.exclude(api_base_url__isnull=True)
|
custom_configs = AiModelApi.objects.exclude(api_base_url__isnull=True)
|
||||||
|
|
||||||
|
# Only enable for whitelisted provider names (i.e Ollama) for now
|
||||||
|
# TODO: This is hacky. Will be replaced with more robust solution based on provider type enum
|
||||||
|
custom_configs = custom_configs.filter(name__in=["Ollama"])
|
||||||
|
|
||||||
for config in custom_configs:
|
for config in custom_configs:
|
||||||
try:
|
try:
|
||||||
# Create OpenAI client with custom base URL
|
# Create OpenAI client with custom base URL
|
||||||
|
|
Loading…
Reference in a new issue