khoj/config/khoj_docker.yml

53 lines
1.6 KiB
YAML

content-type:
# The /data/folder/ prefix to the folders is here because this is
# the directory to which the local files are copied in the docker-compose.
# If changing, the docker-compose volumes should also be changed to match.
org:
input-files: null
input-filter: ["/data/org/**/*.org"]
compressed-jsonl: "/data/embeddings/notes.jsonl.gz"
embeddings-file: "/data/embeddings/note_embeddings.pt"
index_heading_entries: false
markdown:
input-files: null
input-filter: ["/data/markdown/**/*.markdown"]
compressed-jsonl: "/data/embeddings/markdown.jsonl.gz"
embeddings-file: "/data/embeddings/markdown_embeddings.pt"
pdf:
input-files: null
input-filter: ["/data/pdf/**/*.pdf"]
compressed-jsonl: "/data/embeddings/pdf.jsonl.gz"
embeddings-file: "/data/embeddings/pdf_embeddings.pt"
image:
input-directories: ["/data/images/"]
embeddings-file: "/data/embeddings/image_embeddings.pt"
batch-size: 50
use-xmp-metadata: false
notion: null
github: null
plugins: null
search-type:
symmetric: null
asymmetric:
encoder: "sentence-transformers/multi-qa-MiniLM-L6-cos-v1"
cross-encoder: "cross-encoder/ms-marco-MiniLM-L-6-v2"
model_directory: "/data/models/asymmetric"
image:
encoder: "sentence-transformers/clip-ViT-B-32"
model_directory: "/data/models/image_encoder"
processor:
conversation:
conversation-logfile: "/data/embeddings/conversation_logs.json"
enable-offline-chat: false
openai:
api-key: null
chat-model: "chat-gpt-3.5"
app:
should_log_telemetry: true