mirror of
https://github.com/khoj-ai/khoj.git
synced 2024-11-23 15:38:55 +01:00
69ef6829c1
- Integrate with Ollama or other openai compatible APIs by simply setting `OPENAI_API_BASE' environment variable in docker-compose etc. - Update docs on integrating with Ollama, openai proxies on first run - Auto populate all chat models supported by openai compatible APIs - Auto set vision enabled for all commercial models - Minor - Add huggingface cache to khoj_models volume. This is where chat models and (now) sentence transformer models are stored by default - Reduce verbosity of yarn install of web app. Otherwise hit docker log size limit & stops showing remaining logs after web app install - Suggest `ollama pull <model_name>` to start it in background
80 lines
3 KiB
YAML
80 lines
3 KiB
YAML
services:
|
|
database:
|
|
image: ankane/pgvector
|
|
ports:
|
|
- "5432:5432"
|
|
environment:
|
|
POSTGRES_USER: postgres
|
|
POSTGRES_PASSWORD: postgres
|
|
POSTGRES_DB: postgres
|
|
volumes:
|
|
- khoj_db:/var/lib/postgresql/data/
|
|
healthcheck:
|
|
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
|
interval: 30s
|
|
timeout: 10s
|
|
retries: 5
|
|
sandbox:
|
|
image: ghcr.io/khoj-ai/terrarium:latest
|
|
ports:
|
|
- "8080:8080"
|
|
server:
|
|
depends_on:
|
|
database:
|
|
condition: service_healthy
|
|
# Use the following line to use the latest version of khoj. Otherwise, it will build from source.
|
|
image: ghcr.io/khoj-ai/khoj:latest
|
|
# Uncomment the following line to build from source. This will take a few minutes. Comment the next two lines out if you want to use the offiicial image.
|
|
# build:
|
|
# context: .
|
|
ports:
|
|
# If changing the local port (left hand side), no other changes required.
|
|
# If changing the remote port (right hand side),
|
|
# change the port in the args in the build section,
|
|
# as well as the port in the command section to match
|
|
- "42110:42110"
|
|
working_dir: /app
|
|
volumes:
|
|
- khoj_config:/root/.khoj/
|
|
- khoj_models:/root/.cache/torch/sentence_transformers
|
|
- khoj_models:/root/.cache/huggingface
|
|
# Use 0.0.0.0 to explicitly set the host ip for the service on the container. https://pythonspeed.com/articles/docker-connection-refused/
|
|
environment:
|
|
- POSTGRES_DB=postgres
|
|
- POSTGRES_USER=postgres
|
|
- POSTGRES_PASSWORD=postgres
|
|
- POSTGRES_HOST=database
|
|
- POSTGRES_PORT=5432
|
|
- KHOJ_DJANGO_SECRET_KEY=secret
|
|
- KHOJ_DEBUG=False
|
|
- KHOJ_ADMIN_EMAIL=username@example.com
|
|
- KHOJ_ADMIN_PASSWORD=password
|
|
# Uncomment line below to use with Ollama running on your local machine at localhost:11434.
|
|
# Change URL to use with other OpenAI API compatible providers like VLLM, LMStudio etc.
|
|
# - OPENAI_API_BASE=http://host.docker.internal:11434/v1/
|
|
#
|
|
# Uncomment appropriate lines below to use chat models by OpenAI, Anthropic, Google.
|
|
# Ensure you set your provider specific API keys.
|
|
# ---
|
|
# - OPENAI_API_KEY=your_openai_api_key
|
|
# - GEMINI_API_KEY=your_gemini_api_key
|
|
# - ANTHROPIC_API_KEY=your_anthropic_api_key
|
|
#
|
|
# Uncomment the necessary lines below to make your instance publicly accessible.
|
|
# Replace the KHOJ_DOMAIN with either your domain or IP address (no http/https prefix).
|
|
# Proceed with caution, especially if you are using anonymous mode.
|
|
# ---
|
|
# - KHOJ_NO_HTTPS=True
|
|
# - KHOJ_DOMAIN=192.168.0.104
|
|
# - KHOJ_DOMAIN=khoj.example.com
|
|
# Uncomment the line below to disable telemetry.
|
|
# Telemetry helps us prioritize feature development and understand how people are using Khoj
|
|
# Read more at https://docs.khoj.dev/miscellaneous/telemetry
|
|
# - KHOJ_TELEMETRY_DISABLE=True
|
|
command: --host="0.0.0.0" --port=42110 -vv --anonymous-mode --non-interactive
|
|
|
|
|
|
volumes:
|
|
khoj_config:
|
|
khoj_db:
|
|
khoj_models:
|