Currently broken but nearing improvement
This commit is contained in:
parent
0a8073f165
commit
77a9e35f2b
22 changed files with 486 additions and 409 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -10,7 +10,7 @@ sijapi/data/*.pbf
|
||||||
sijapi/data/geonames.txt
|
sijapi/data/geonames.txt
|
||||||
sijapi/data/img/images/
|
sijapi/data/img/images/
|
||||||
sijapi/config/*.yaml
|
sijapi/config/*.yaml
|
||||||
sijapi/config/O365/
|
sijapi/config/MS365/
|
||||||
sijapi/local_only/
|
sijapi/local_only/
|
||||||
sijapi/testbed/
|
sijapi/testbed/
|
||||||
khoj/
|
khoj/
|
||||||
|
|
|
@ -36,7 +36,7 @@
|
||||||
!{!{ YOUR SIJAPI SUBDOMAIN }!}! {
|
!{!{ YOUR SIJAPI SUBDOMAIN }!}! {
|
||||||
import cors
|
import cors
|
||||||
@public {
|
@public {
|
||||||
path /img/* /oauth /oauth/* /o365 /o365/* /ip /health /health* /health/* /id /identity
|
path /img/* /oauth /oauth/* /MS365 /MS365/* /ip /health /health* /health/* /id /identity
|
||||||
}
|
}
|
||||||
@apiKeyAuthHeader {
|
@apiKeyAuthHeader {
|
||||||
header Authorization "Bearer !{!{ YOUR GLOBAL_API_KEY }!}!"
|
header Authorization "Bearer !{!{ YOUR GLOBAL_API_KEY }!}!"
|
||||||
|
|
|
@ -1,63 +1,49 @@
|
||||||
# __init__.py
|
# __init__.py
|
||||||
import os
|
import os
|
||||||
import json
|
|
||||||
import yaml
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import multiprocessing
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from dateutil import tz
|
from dateutil import tz
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional
|
|
||||||
from .logs import Logger
|
from .logs import Logger
|
||||||
from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, Database, Geocoder, APIConfig, Configuration
|
from .classes import Database, Geocoder, APIConfig, Configuration, Dir
|
||||||
|
|
||||||
### Initial initialization
|
### Initial initialization
|
||||||
BASE_DIR = Path(__file__).resolve().parent
|
API = APIConfig.load('api', 'secrets')
|
||||||
CONFIG_DIR = BASE_DIR / "config"
|
Dir = Dir.load('dirs')
|
||||||
ENV_PATH = CONFIG_DIR / ".env"
|
ENV_PATH = Dir.CONFIG / ".env"
|
||||||
LOGS_DIR = BASE_DIR / "logs"
|
LOGS_DIR = Dir.LOGS
|
||||||
|
|
||||||
L = Logger("Central", LOGS_DIR)
|
L = Logger("Central", LOGS_DIR)
|
||||||
os.makedirs(LOGS_DIR, exist_ok=True)
|
os.makedirs(LOGS_DIR, exist_ok=True)
|
||||||
load_dotenv(ENV_PATH)
|
load_dotenv(ENV_PATH)
|
||||||
|
|
||||||
### API essentials
|
### API essentials
|
||||||
API = APIConfig.load('api', 'secrets')
|
DB = Database.from_yaml('db.yaml')
|
||||||
Dir = Configuration.load('dirs')
|
|
||||||
HOST = f"{API.BIND}:{API.PORT}"
|
|
||||||
LOCAL_HOSTS = [ipaddress.ip_address(localhost.strip()) for localhost in os.getenv('LOCAL_HOSTS', '127.0.0.1').split(',')] + ['localhost']
|
|
||||||
SUBNET_BROADCAST = os.getenv("SUBNET_BROADCAST", '10.255.255.255')
|
|
||||||
MAX_CPU_CORES = min(int(os.getenv("MAX_CPU_CORES", int(multiprocessing.cpu_count()/2))), multiprocessing.cpu_count())
|
|
||||||
DB = Database.from_env()
|
|
||||||
|
|
||||||
|
ASR = Configuration.load('asr')
|
||||||
|
IMG = Configuration.load('img')
|
||||||
|
Cal = Configuration.load('cal', 'secrets')
|
||||||
|
Email = Configuration.load('email', 'secrets')
|
||||||
|
LLM = Configuration.load('llm', 'secrets')
|
||||||
News = Configuration.load('news', 'secrets')
|
News = Configuration.load('news', 'secrets')
|
||||||
IMG = Configuration.load('img', 'secrets')
|
TTS = Configuration.load('tts', 'secrets')
|
||||||
|
CourtListener = Configuration.load('courtlistener', 'secrets')
|
||||||
|
Tailscale = Configuration.load('tailscale', 'secrets')
|
||||||
|
Cloudflare = Configuration.load('cloudflare', 'secrets')
|
||||||
|
|
||||||
|
|
||||||
### Directories & general paths
|
### Directories & general paths
|
||||||
ROUTER_DIR = BASE_DIR / "routers"
|
|
||||||
DATA_DIR = BASE_DIR / "data"
|
|
||||||
os.makedirs(DATA_DIR, exist_ok=True)
|
|
||||||
ALERTS_DIR = DATA_DIR / "alerts"
|
|
||||||
os.makedirs(ALERTS_DIR, exist_ok=True)
|
|
||||||
REQUESTS_DIR = LOGS_DIR / "requests"
|
REQUESTS_DIR = LOGS_DIR / "requests"
|
||||||
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
||||||
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
||||||
|
|
||||||
### LOCATE AND WEATHER LOCALIZATIONS
|
### LOCATE AND WEATHER LOCALIZATIONS
|
||||||
USER_FULLNAME = os.getenv('USER_FULLNAME')
|
|
||||||
USER_BIO = os.getenv('USER_BIO')
|
|
||||||
HOME_ZIP = os.getenv("HOME_ZIP") # unimplemented
|
|
||||||
NAMED_LOCATIONS = CONFIG_DIR / "named-locations.yaml"
|
|
||||||
# DB = DATA_DIR / "weatherlocate.db" # deprecated
|
# DB = DATA_DIR / "weatherlocate.db" # deprecated
|
||||||
VISUALCROSSING_BASE_URL = os.getenv("VISUALCROSSING_BASE_URL", "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline")
|
VISUALCROSSING_BASE_URL = os.getenv("VISUALCROSSING_BASE_URL", "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline")
|
||||||
VISUALCROSSING_API_KEY = os.getenv("VISUALCROSSING_API_KEY")
|
VISUALCROSSING_API_KEY = os.getenv("VISUALCROSSING_API_KEY")
|
||||||
GEONAMES_TXT = DATA_DIR / "geonames.txt"
|
|
||||||
LOCATIONS_CSV = DATA_DIR / "US.csv"
|
|
||||||
TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
|
TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
|
||||||
TZ_CACHE = DATA_DIR / "tzcache.json"
|
TZ_CACHE = Dir.DATA / "tzcache.json"
|
||||||
GEO = Geocoder(NAMED_LOCATIONS, TZ_CACHE)
|
GEO = Geocoder(Dir.config.locations, TZ_CACHE)
|
||||||
|
|
||||||
### Obsidian & notes
|
### Obsidian & notes
|
||||||
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
|
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
|
||||||
|
@ -71,8 +57,6 @@ OBSIDIAN_BANNER_SCENE = os.getenv("OBSIDIAN_BANNER_SCENE", "wallpaper")
|
||||||
OBSIDIAN_CHROMADB_COLLECTION = os.getenv("OBSIDIAN_CHROMADB_COLLECTION", "obsidian")
|
OBSIDIAN_CHROMADB_COLLECTION = os.getenv("OBSIDIAN_CHROMADB_COLLECTION", "obsidian")
|
||||||
ARCHIVE_DIR = Path(os.getenv("ARCHIVE_DIR", OBSIDIAN_VAULT_DIR / "archive"))
|
ARCHIVE_DIR = Path(os.getenv("ARCHIVE_DIR", OBSIDIAN_VAULT_DIR / "archive"))
|
||||||
os.makedirs(ARCHIVE_DIR, exist_ok=True)
|
os.makedirs(ARCHIVE_DIR, exist_ok=True)
|
||||||
DOC_DIR = DATA_DIR / "docs"
|
|
||||||
os.makedirs(DOC_DIR, exist_ok=True)
|
|
||||||
|
|
||||||
### DATETIME SCHEMA FOR DAILY NOTE FOLDER HIERARCHY FORMATTING ###
|
### DATETIME SCHEMA FOR DAILY NOTE FOLDER HIERARCHY FORMATTING ###
|
||||||
YEAR_FMT = os.getenv("YEAR_FMT")
|
YEAR_FMT = os.getenv("YEAR_FMT")
|
||||||
|
@ -80,125 +64,15 @@ MONTH_FMT = os.getenv("MONTH_FMT")
|
||||||
DAY_FMT = os.getenv("DAY_FMT")
|
DAY_FMT = os.getenv("DAY_FMT")
|
||||||
DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
|
DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
|
||||||
|
|
||||||
### Large language model
|
|
||||||
LLM_URL = os.getenv("LLM_URL", "http://localhost:11434")
|
|
||||||
LLM_SYS_MSG = os.getenv("SYSTEM_MSG", "You are a helpful AI assistant.")
|
|
||||||
DEFAULT_LLM = os.getenv("DEFAULT_LLM", "llama3")
|
|
||||||
DEFAULT_VISION = os.getenv("DEFAULT_VISION", "llava")
|
|
||||||
DEFAULT_VOICE = os.getenv("DEFAULT_VOICE", "Luna")
|
|
||||||
DEFAULT_11L_VOICE = os.getenv("DEFAULT_11L_VOICE", "Victoria")
|
|
||||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
|
||||||
### Summarization
|
|
||||||
SUMMARY_CHUNK_SIZE = int(os.getenv("SUMMARY_CHUNK_SIZE", 16384)) # measured in tokens
|
|
||||||
SUMMARY_CHUNK_OVERLAP = int(os.getenv("SUMMARY_CHUNK_OVERLAP", 256)) # measured in tokens
|
|
||||||
SUMMARY_TPW = float(os.getenv("SUMMARY_TPW", 1.3)) # measured in tokens
|
|
||||||
SUMMARY_LENGTH_RATIO = int(os.getenv("SUMMARY_LENGTH_RATIO", 4)) # measured as original to length ratio
|
|
||||||
SUMMARY_MIN_LENGTH = int(os.getenv("SUMMARY_MIN_LENGTH", 150)) # measured in tokens
|
|
||||||
SUMMARY_MODEL = os.getenv("SUMMARY_MODEL", "dolphin-llama3:8b-256k")
|
|
||||||
SUMMARY_TOKEN_LIMIT = int(os.getenv("SUMMARY_TOKEN_LIMIT", 16384))
|
|
||||||
SUMMARY_INSTRUCT = os.getenv('SUMMARY_INSTRUCT', "You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.")
|
|
||||||
SUMMARY_INSTRUCT_TTS = os.getenv('SUMMARY_INSTRUCT_TTS', "You are an AI assistant that provides email summaries for Sanjay. Your response will undergo Text-To-Speech conversion and added to Sanjay's private podcast. Providing adequate context (Sanjay did not send this question to you, he will only hear your response) but aiming for conciseness and precision, and bearing in mind the Text-To-Speech conversion (avoiding acronyms and formalities), summarize the following email.")
|
|
||||||
|
|
||||||
|
|
||||||
### Stable diffusion
|
|
||||||
IMG_DIR = DATA_DIR / "img" / "images"
|
|
||||||
os.makedirs(IMG_DIR, exist_ok=True)
|
|
||||||
IMG_WORKFLOWS_DIR = DATA_DIR / "img" / "workflows"
|
|
||||||
os.makedirs(IMG_WORKFLOWS_DIR, exist_ok=True)
|
|
||||||
COMFYUI_URL = os.getenv('COMFYUI_URL', "http://localhost:8188")
|
|
||||||
COMFYUI_DIR = Path(os.getenv('COMFYUI_DIR'))
|
|
||||||
COMFYUI_OUTPUT_DIR = COMFYUI_DIR / 'output'
|
|
||||||
COMFYUI_LAUNCH_CMD = os.getenv('COMFYUI_LAUNCH_CMD', 'mamba activate comfyui && python main.py')
|
|
||||||
IMG_CONFIG_PATH = CONFIG_DIR / 'img.yaml'
|
|
||||||
|
|
||||||
### ASR
|
|
||||||
ASR_DIR = DATA_DIR / "asr"
|
|
||||||
os.makedirs(ASR_DIR, exist_ok=True)
|
|
||||||
WHISPER_CPP_DIR = Path(Dir.HOME) / str(os.getenv("WHISPER_CPP_DIR"))
|
|
||||||
WHISPER_CPP_MODELS = os.getenv('WHISPER_CPP_MODELS', 'NULL,VOID').split(',')
|
|
||||||
|
|
||||||
### TTS
|
|
||||||
PREFERRED_TTS = os.getenv("PREFERRED_TTS", "None")
|
|
||||||
TTS_DIR = DATA_DIR / "tts"
|
|
||||||
os.makedirs(TTS_DIR, exist_ok=True)
|
|
||||||
VOICE_DIR = TTS_DIR / 'voices'
|
|
||||||
os.makedirs(VOICE_DIR, exist_ok=True)
|
|
||||||
PODCAST_DIR = os.getenv("PODCAST_DIR", TTS_DIR / "sideloads")
|
|
||||||
os.makedirs(PODCAST_DIR, exist_ok=True)
|
|
||||||
TTS_OUTPUT_DIR = TTS_DIR / 'outputs'
|
|
||||||
os.makedirs(TTS_OUTPUT_DIR, exist_ok=True)
|
|
||||||
TTS_SEGMENTS_DIR = TTS_DIR / 'segments'
|
|
||||||
os.makedirs(TTS_SEGMENTS_DIR, exist_ok=True)
|
|
||||||
ELEVENLABS_API_KEY = os.getenv("ELEVENLABS_API_KEY")
|
|
||||||
|
|
||||||
|
|
||||||
### Calendar & email account
|
|
||||||
MS365_TOGGLE = True if os.getenv("MS365_TOGGLE") == "True" else False
|
|
||||||
ICAL_TOGGLE = True if os.getenv("ICAL_TOGGLE") == "True" else False
|
|
||||||
ICS_PATH = DATA_DIR / 'calendar.ics' # deprecated now, but maybe revive?
|
|
||||||
ICALENDARS = os.getenv('ICALENDARS', 'NULL,VOID').split(',')
|
|
||||||
|
|
||||||
EMAIL_CONFIG = CONFIG_DIR / "email.yaml"
|
|
||||||
EMAIL_LOGS = LOGS_DIR / "email"
|
|
||||||
os.makedirs(EMAIL_LOGS, exist_ok = True)
|
|
||||||
|
|
||||||
### Courtlistener & other webhooks
|
|
||||||
COURTLISTENER_DOCKETS_DIR = DATA_DIR / "courtlistener" / "dockets"
|
|
||||||
os.makedirs(COURTLISTENER_DOCKETS_DIR, exist_ok=True)
|
|
||||||
COURTLISTENER_SEARCH_DIR = DATA_DIR / "courtlistener" / "cases"
|
|
||||||
os.makedirs(COURTLISTENER_SEARCH_DIR, exist_ok=True)
|
|
||||||
CASETABLE_PATH = DATA_DIR / "courtlistener" / "cases.json"
|
|
||||||
COURTLISTENER_API_KEY = os.getenv("COURTLISTENER_API_KEY")
|
|
||||||
COURTLISTENER_BASE_URL = os.getenv("COURTLISTENER_BASE_URL", "https://www.courtlistener.com")
|
|
||||||
COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
|
|
||||||
|
|
||||||
### Keys & passwords
|
### Keys & passwords
|
||||||
PUBLIC_KEY_FILE = os.getenv("PUBLIC_KEY_FILE", 'you_public_key.asc')
|
|
||||||
PUBLIC_KEY = (BASE_DIR.parent / PUBLIC_KEY_FILE).read_text()
|
|
||||||
MAC_ID = os.getenv("MAC_ID")
|
MAC_ID = os.getenv("MAC_ID")
|
||||||
MAC_UN = os.getenv("MAC_UN")
|
MAC_UN = os.getenv("MAC_UN")
|
||||||
MAC_PW = os.getenv("MAC_PW")
|
MAC_PW = os.getenv("MAC_PW")
|
||||||
TIMING_API_KEY = os.getenv("TIMING_API_KEY")
|
TIMING_API_KEY = os.getenv("TIMING_API_KEY")
|
||||||
TIMING_API_URL = os.getenv("TIMING_API_URL", "https://web.timingapp.com/api/v1")
|
TIMING_API_URL = os.getenv("TIMING_API_URL", "https://web.timingapp.com/api/v1")
|
||||||
PHOTOPRISM_URL = os.getenv("PHOTOPRISM_URL")
|
|
||||||
PHOTOPRISM_USER = os.getenv("PHOTOPRISM_USER")
|
|
||||||
PHOTOPRISM_PASS = os.getenv("PHOTOPRISM_PASS")
|
|
||||||
|
|
||||||
### Tailscale
|
|
||||||
TS_IP = ipaddress.ip_address(os.getenv("TS_IP", "NULL"))
|
|
||||||
TS_SUBNET = ipaddress.ip_network(os.getenv("TS_SUBNET")) if os.getenv("TS_SUBNET") else None
|
|
||||||
TS_ID = os.getenv("TS_ID", "NULL")
|
|
||||||
TS_TAILNET = os.getenv("TS_TAILNET", "NULL")
|
|
||||||
TS_ADDRESS = f"http://{TS_ID}.{TS_TAILNET}.ts.net"
|
|
||||||
|
|
||||||
### Cloudflare
|
|
||||||
CF_API_BASE_URL = os.getenv("CF_API_BASE_URL")
|
|
||||||
CF_TOKEN = os.getenv("CF_TOKEN")
|
|
||||||
CF_IP = DATA_DIR / "cf_ip.txt" # to be deprecated soon
|
|
||||||
CF_DOMAINS_PATH = DATA_DIR / "cf_domains.json" # to be deprecated soon
|
|
||||||
|
|
||||||
### Caddy - not fully implemented
|
### Caddy - not fully implemented
|
||||||
API.URL = os.getenv("API.URL")
|
API.URL = os.getenv("API.URL")
|
||||||
CADDY_SERVER = os.getenv('CADDY_SERVER', None)
|
CADDY_SERVER = os.getenv('CADDY_SERVER', None)
|
||||||
CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
|
CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
|
||||||
CADDY_API_KEY = os.getenv("CADDY_API_KEY")
|
CADDY_API_KEY = os.getenv("CADDY_API_KEY")
|
||||||
|
|
||||||
|
|
||||||
### Microsoft Graph
|
|
||||||
MS365_CLIENT_ID = os.getenv('MS365_CLIENT_ID')
|
|
||||||
MS365_SECRET = os.getenv('MS365_SECRET')
|
|
||||||
MS365_TENANT_ID = os.getenv('MS365_TENANT_ID')
|
|
||||||
MS365_CERT_PATH = CONFIG_DIR / 'MS365' / '.cert.pem' # deprecated
|
|
||||||
MS365_KEY_PATH = CONFIG_DIR / 'MS365' / '.cert.key' # deprecated
|
|
||||||
MS365_KEY = MS365_KEY_PATH.read_text()
|
|
||||||
MS365_TOKEN_PATH = CONFIG_DIR / 'MS365' / '.token.txt'
|
|
||||||
MS365_THUMBPRINT = os.getenv('MS365_THUMBPRINT')
|
|
||||||
|
|
||||||
MS365_LOGIN_URL = os.getenv("MS365_LOGIN_URL", "https://login.microsoftonline.com")
|
|
||||||
MS365_AUTHORITY_URL = f"{MS365_LOGIN_URL}/{MS365_TENANT_ID}"
|
|
||||||
MS365_REDIRECT_PATH = os.getenv("MS365_REDIRECT_PATH", "https://api.sij.ai/o365/oauth_redirect")
|
|
||||||
MS365_SCOPE = os.getenv("MS365_SCOPE", 'Calendars.Read,Calendars.ReadWrite,offline_access').split(',')
|
|
||||||
|
|
||||||
### Maintenance
|
|
||||||
GARBAGE_COLLECTION_INTERVAL = 60 * 60 # Run cleanup every hour
|
|
||||||
GARBAGE_TTL = 60 * 60 * 24 # Delete files older than 24 hours
|
|
|
@ -4,20 +4,13 @@ from fastapi import FastAPI, Request, HTTPException, Response
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
|
||||||
from starlette.requests import ClientDisconnect
|
|
||||||
from hypercorn.asyncio import serve
|
from hypercorn.asyncio import serve
|
||||||
from hypercorn.config import Config as HypercornConfig
|
from hypercorn.config import Config as HypercornConfig
|
||||||
import sys
|
import sys
|
||||||
import asyncio
|
import asyncio
|
||||||
import httpx
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import importlib
|
import importlib
|
||||||
from dotenv import load_dotenv
|
|
||||||
from pathlib import Path
|
|
||||||
from datetime import datetime
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Personal API.')
|
parser = argparse.ArgumentParser(description='Personal API.')
|
||||||
|
@ -25,17 +18,12 @@ parser.add_argument('--debug', action='store_true', help='Set log level to L.INF
|
||||||
parser.add_argument('--test', type=str, help='Load only the specified module.')
|
parser.add_argument('--test', type=str, help='Load only the specified module.')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
from . import L, API, ROUTER_DIR
|
from . import L, API, Dir
|
||||||
L.setup_from_args(args)
|
L.setup_from_args(args)
|
||||||
|
app = FastAPI()
|
||||||
from sijapi import ROUTER_DIR
|
|
||||||
|
|
||||||
# Initialize a FastAPI application
|
|
||||||
api = FastAPI()
|
|
||||||
|
|
||||||
|
|
||||||
# CORSMiddleware
|
# CORSMiddleware
|
||||||
api.add_middleware(
|
app.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=['*'],
|
allow_origins=['*'],
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
|
@ -63,41 +51,22 @@ class SimpleAPIKeyMiddleware(BaseHTTPMiddleware):
|
||||||
content={"detail": "Invalid or missing API key"}
|
content={"detail": "Invalid or missing API key"}
|
||||||
)
|
)
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
# L.DEBUG(f"Request from {client_ip} is complete")
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# Add the middleware to your FastAPI app
|
app.add_middleware(SimpleAPIKeyMiddleware)
|
||||||
api.add_middleware(SimpleAPIKeyMiddleware)
|
|
||||||
|
|
||||||
|
@app.exception_handler(HTTPException)
|
||||||
canceled_middleware = """
|
|
||||||
@api.middleware("http")
|
|
||||||
async def log_requests(request: Request, call_next):
|
|
||||||
L.DEBUG(f"Incoming request: {request.method} {request.url}")
|
|
||||||
L.DEBUG(f"Request headers: {request.headers}")
|
|
||||||
L.DEBUG(f"Request body: {await request.body()}")
|
|
||||||
response = await call_next(request)
|
|
||||||
return response
|
|
||||||
|
|
||||||
async def log_outgoing_request(request):
|
|
||||||
L.INFO(f"Outgoing request: {request.method} {request.url}")
|
|
||||||
L.DEBUG(f"Request headers: {request.headers}")
|
|
||||||
L.DEBUG(f"Request body: {request.content}")
|
|
||||||
"""
|
|
||||||
|
|
||||||
@api.exception_handler(HTTPException)
|
|
||||||
async def http_exception_handler(request: Request, exc: HTTPException):
|
async def http_exception_handler(request: Request, exc: HTTPException):
|
||||||
L.ERR(f"HTTP Exception: {exc.status_code} - {exc.detail}")
|
L.ERR(f"HTTP Exception: {exc.status_code} - {exc.detail}")
|
||||||
L.ERR(f"Request: {request.method} {request.url}")
|
L.ERR(f"Request: {request.method} {request.url}")
|
||||||
return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
|
return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
|
||||||
|
|
||||||
@api.middleware("http")
|
@app.middleware("http")
|
||||||
async def handle_exception_middleware(request: Request, call_next):
|
async def handle_exception_middleware(request: Request, call_next):
|
||||||
try:
|
try:
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
if str(exc) == "Response content longer than Content-Length":
|
if str(exc) == "Response content longer than Content-Length":
|
||||||
# Update the Content-Length header to match the actual response content length
|
|
||||||
response.headers["Content-Length"] = str(len(response.body))
|
response.headers["Content-Length"] = str(len(response.body))
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
@ -105,21 +74,20 @@ async def handle_exception_middleware(request: Request, call_next):
|
||||||
|
|
||||||
|
|
||||||
def load_router(router_name):
|
def load_router(router_name):
|
||||||
router_file = ROUTER_DIR / f'{router_name}.py'
|
router_file = Dir.ROUTERS / f'{router_name}.py'
|
||||||
L.DEBUG(f"Attempting to load {router_name.capitalize()}...")
|
L.DEBUG(f"Attempting to load {router_name.capitalize()}...")
|
||||||
if router_file.exists():
|
if router_file.exists():
|
||||||
module_path = f'sijapi.routers.{router_name}'
|
module_path = f'sijapi.routers.{router_name}'
|
||||||
try:
|
try:
|
||||||
module = importlib.import_module(module_path)
|
module = importlib.import_module(module_path)
|
||||||
router = getattr(module, router_name)
|
router = getattr(module, router_name)
|
||||||
api.include_router(router)
|
app.include_router(router)
|
||||||
L.INFO(f"{router_name.capitalize()} router loaded.")
|
L.INFO(f"{router_name.capitalize()} router loaded.")
|
||||||
except (ImportError, AttributeError) as e:
|
except (ImportError, AttributeError) as e:
|
||||||
L.CRIT(f"Failed to load router {router_name}: {e}")
|
L.CRIT(f"Failed to load router {router_name}: {e}")
|
||||||
else:
|
else:
|
||||||
L.ERR(f"Router file for {router_name} does not exist.")
|
L.ERR(f"Router file for {router_name} does not exist.")
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
if args.test:
|
if args.test:
|
||||||
load_router(args.test)
|
load_router(args.test)
|
||||||
|
@ -132,8 +100,7 @@ def main(argv):
|
||||||
|
|
||||||
config = HypercornConfig()
|
config = HypercornConfig()
|
||||||
config.bind = [API.BIND] # Use the resolved BIND value
|
config.bind = [API.BIND] # Use the resolved BIND value
|
||||||
asyncio.run(serve(api, config))
|
asyncio.run(serve(app, config))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
|
@ -2,6 +2,7 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import math
|
import math
|
||||||
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
@ -13,35 +14,196 @@ from zoneinfo import ZoneInfo
|
||||||
import aiofiles
|
import aiofiles
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import asyncpg
|
import asyncpg
|
||||||
|
from typing import Union, Any
|
||||||
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
import reverse_geocoder as rg
|
import reverse_geocoder as rg
|
||||||
import yaml
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from pydantic import BaseModel, Field, create_model
|
from pydantic import BaseModel, Field, create_model, ConfigDict, validator
|
||||||
from srtm import get_data
|
from srtm import get_data
|
||||||
from timezonefinder import TimezoneFinder
|
from timezonefinder import TimezoneFinder
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Union, TypeVar, Type
|
||||||
|
import yaml
|
||||||
|
from typing import List, Optional
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
T = TypeVar('T', bound='Configuration')
|
T = TypeVar('T', bound='Configuration')
|
||||||
|
|
||||||
|
class HierarchicalPath(os.PathLike):
|
||||||
|
def __init__(self, path=None, base=None, home=None):
|
||||||
|
self.home = Path(home).expanduser() if home else Path.home()
|
||||||
|
self.base = Path(base).resolve() if base else self._find_base()
|
||||||
|
self.path = self._resolve_path(path) if path else self.base
|
||||||
|
|
||||||
|
def _find_base(self):
|
||||||
|
current = Path(__file__).resolve().parent
|
||||||
|
while current.name != 'sijapi' and current != current.parent:
|
||||||
|
current = current.parent
|
||||||
|
return current
|
||||||
|
|
||||||
|
def _resolve_path(self, path):
|
||||||
|
if isinstance(path, HierarchicalPath):
|
||||||
|
return path.path
|
||||||
|
if isinstance(path, Path):
|
||||||
|
return path
|
||||||
|
path = self._resolve_placeholders(path)
|
||||||
|
if path.startswith(('~', 'HOME')):
|
||||||
|
return self.home / path.lstrip('~').lstrip('HOME').lstrip('/')
|
||||||
|
if path.startswith('/'):
|
||||||
|
return Path(path)
|
||||||
|
return self._resolve_relative_path(self.base / path)
|
||||||
|
|
||||||
|
def _resolve_placeholders(self, path):
|
||||||
|
placeholders = {
|
||||||
|
'HOME': str(self.home),
|
||||||
|
'BASE': str(self.base),
|
||||||
|
}
|
||||||
|
pattern = r'\{\{\s*([^}]+)\s*\}\}'
|
||||||
|
return re.sub(pattern, lambda m: placeholders.get(m.group(1).strip(), m.group(0)), path)
|
||||||
|
|
||||||
|
def _resolve_relative_path(self, path):
|
||||||
|
if path.is_file():
|
||||||
|
return path
|
||||||
|
if path.is_dir():
|
||||||
|
return path
|
||||||
|
yaml_path = path.with_suffix('.yaml')
|
||||||
|
if yaml_path.is_file():
|
||||||
|
return yaml_path
|
||||||
|
return path
|
||||||
|
|
||||||
|
def __truediv__(self, other):
|
||||||
|
return HierarchicalPath(self.path / other, base=self.base, home=self.home)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return HierarchicalPath(self.path / name, base=self.base, home=self.home)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.path)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"HierarchicalPath('{self.path}')"
|
||||||
|
|
||||||
|
def __fspath__(self):
|
||||||
|
return os.fspath(self.path)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, (HierarchicalPath, Path, str)):
|
||||||
|
return str(self.path) == str(other)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if isinstance(other, (HierarchicalPath, Path, str)):
|
||||||
|
return str(self.path) < str(other)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
if isinstance(other, (HierarchicalPath, Path, str)):
|
||||||
|
return str(self.path) <= str(other)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
if isinstance(other, (HierarchicalPath, Path, str)):
|
||||||
|
return str(self.path) > str(other)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
if isinstance(other, (HierarchicalPath, Path, str)):
|
||||||
|
return str(self.path) >= str(other)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.path)
|
||||||
|
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
try:
|
||||||
|
return super().__getattribute__(name)
|
||||||
|
except AttributeError:
|
||||||
|
return getattr(self.path, name)
|
||||||
|
|
||||||
|
|
||||||
|
class Dir(BaseModel):
|
||||||
|
HOME: HierarchicalPath = Field(default_factory=lambda: HierarchicalPath(Path.home()))
|
||||||
|
BASE: HierarchicalPath | None = None
|
||||||
|
|
||||||
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_base(cls) -> HierarchicalPath:
|
||||||
|
return HierarchicalPath(HierarchicalPath()._find_base())
|
||||||
|
|
||||||
|
def __init__(self, **data):
|
||||||
|
super().__init__(**data)
|
||||||
|
if self.BASE is None:
|
||||||
|
self.BASE = self.determine_base()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(cls, yaml_path: Union[str, Path] = None) -> 'Dir':
|
||||||
|
yaml_path = cls._resolve_path(yaml_path) if yaml_path else None
|
||||||
|
if yaml_path:
|
||||||
|
with open(yaml_path, 'r') as file:
|
||||||
|
config_data = yaml.safe_load(file)
|
||||||
|
print(f"Loaded directory configuration from {yaml_path}")
|
||||||
|
resolved_data = cls.resolve_placeholders(config_data)
|
||||||
|
else:
|
||||||
|
resolved_data = {}
|
||||||
|
return cls(**resolved_data)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _resolve_path(cls, path: Union[str, Path]) -> Path:
|
||||||
|
base_path = cls.determine_base().path.parent
|
||||||
|
path = Path(path)
|
||||||
|
if not path.suffix:
|
||||||
|
path = base_path / 'sijapi' / 'config' / f"{path.name}.yaml"
|
||||||
|
elif not path.is_absolute():
|
||||||
|
path = base_path / path
|
||||||
|
return path
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_placeholders(cls, data: Any) -> Any:
|
||||||
|
if isinstance(data, dict):
|
||||||
|
return {k: cls.resolve_placeholders(v) for k, v in data.items()}
|
||||||
|
elif isinstance(data, list):
|
||||||
|
return [cls.resolve_placeholders(v) for v in data]
|
||||||
|
elif isinstance(data, str):
|
||||||
|
return cls.resolve_string_placeholders(data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_string_placeholders(cls, value: str) -> Any:
|
||||||
|
if value.startswith('{{') and value.endswith('}}'):
|
||||||
|
parts = value.strip('{}').strip().split('.')
|
||||||
|
result = cls.HOME
|
||||||
|
for part in parts:
|
||||||
|
result = getattr(result, part)
|
||||||
|
return result
|
||||||
|
elif value == '*~*':
|
||||||
|
return cls.HOME
|
||||||
|
return HierarchicalPath(value)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return HierarchicalPath(self.BASE / name.lower(), base=self.BASE.path, home=self.HOME.path)
|
||||||
|
|
||||||
|
def model_dump(self, *args, **kwargs):
|
||||||
|
d = super().model_dump(*args, **kwargs)
|
||||||
|
return {k: str(v) for k, v in d.items()}
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Union, Optional, Any, Dict, List
|
|
||||||
import yaml
|
|
||||||
import re
|
|
||||||
from pydantic import BaseModel, create_model
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
|
|
||||||
class Configuration(BaseModel):
|
class Configuration(BaseModel):
|
||||||
HOME: Path = Path.home()
|
HOME: Path = Field(default_factory=Path.home)
|
||||||
_dir_config: Optional['Configuration'] = None
|
_dir_config: Optional['Configuration'] = None
|
||||||
|
dir: Dir = Field(default_factory=Dir)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
extra = "allow" # This allows extra fields
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, yaml_path: Union[str, Path], secrets_path: Optional[Union[str, Path]] = None, dir_config: Optional['Configuration'] = None) -> 'Configuration':
|
def load(cls, yaml_path: Union[str, Path], secrets_path: Optional[Union[str, Path]] = None, dir_config: Optional['Configuration'] = None) -> 'Configuration':
|
||||||
yaml_path = cls._resolve_path(yaml_path, 'config')
|
yaml_path = cls._resolve_path(yaml_path, 'config')
|
||||||
if secrets_path:
|
if secrets_path:
|
||||||
secrets_path = cls._resolve_path(secrets_path, 'config')
|
secrets_path = cls._resolve_path(secrets_path, 'config')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with yaml_path.open('r') as file:
|
with yaml_path.open('r') as file:
|
||||||
config_data = yaml.safe_load(file)
|
config_data = yaml.safe_load(file)
|
||||||
|
@ -54,25 +216,31 @@ class Configuration(BaseModel):
|
||||||
print(f"Loaded secrets data from {secrets_path}")
|
print(f"Loaded secrets data from {secrets_path}")
|
||||||
config_data.update(secrets_data)
|
config_data.update(secrets_data)
|
||||||
|
|
||||||
# Ensure HOME is set
|
instance = cls(**config_data)
|
||||||
if config_data.get('HOME') is None:
|
|
||||||
config_data['HOME'] = str(Path.home())
|
|
||||||
print(f"HOME was None in config, set to default: {config_data['HOME']}")
|
|
||||||
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
instance = cls.create_dynamic_model(**config_data)
|
|
||||||
instance._dir_config = dir_config or instance
|
instance._dir_config = dir_config or instance
|
||||||
|
|
||||||
resolved_data = instance.resolve_placeholders(config_data)
|
resolved_data = instance.resolve_placeholders(config_data)
|
||||||
instance = cls.create_dynamic_model(**resolved_data)
|
|
||||||
instance._dir_config = dir_config or instance
|
|
||||||
|
|
||||||
return instance
|
return cls._create_nested_config(resolved_data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error loading configuration: {str(e)}")
|
print(f"Error loading configuration: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _create_nested_config(cls, data):
|
||||||
|
if isinstance(data, dict):
|
||||||
|
return cls(**{k: cls._create_nested_config(v) for k, v in data.items()})
|
||||||
|
elif isinstance(data, list):
|
||||||
|
return [cls._create_nested_config(item) for item in data]
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
value = self.__dict__.get(name)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return Configuration(**value)
|
||||||
|
return value
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _resolve_path(cls, path: Union[str, Path], default_dir: str) -> Path:
|
def _resolve_path(cls, path: Union[str, Path], default_dir: str) -> Path:
|
||||||
base_path = Path(__file__).parent.parent # This will be two levels up from this file
|
base_path = Path(__file__).parent.parent # This will be two levels up from this file
|
||||||
|
@ -106,7 +274,7 @@ class Configuration(BaseModel):
|
||||||
elif len(parts) == 2 and parts[0] == 'ENV':
|
elif len(parts) == 2 and parts[0] == 'ENV':
|
||||||
replacement = os.getenv(parts[1], '')
|
replacement = os.getenv(parts[1], '')
|
||||||
else:
|
else:
|
||||||
replacement = value # Keep original if not recognized
|
replacement = value
|
||||||
|
|
||||||
value = value.replace('{{' + match + '}}', str(replacement))
|
value = value.replace('{{' + match + '}}', str(replacement))
|
||||||
|
|
||||||
|
@ -115,26 +283,6 @@ class Configuration(BaseModel):
|
||||||
return Path(value).expanduser()
|
return Path(value).expanduser()
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_dynamic_model(cls, **data):
|
|
||||||
for key, value in data.items():
|
|
||||||
if isinstance(value, dict):
|
|
||||||
data[key] = cls.create_dynamic_model(**value)
|
|
||||||
elif isinstance(value, list) and all(isinstance(item, dict) for item in value):
|
|
||||||
data[key] = [cls.create_dynamic_model(**item) for item in value]
|
|
||||||
|
|
||||||
DynamicModel = create_model(
|
|
||||||
f'Dynamic{cls.__name__}',
|
|
||||||
__base__=cls,
|
|
||||||
**{k: (Any, v) for k, v in data.items()}
|
|
||||||
)
|
|
||||||
return DynamicModel(**data)
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
extra = "allow"
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class APIConfig(BaseModel):
|
class APIConfig(BaseModel):
|
||||||
HOST: str
|
HOST: str
|
||||||
|
@ -146,23 +294,25 @@ class APIConfig(BaseModel):
|
||||||
MODULES: Any # This will be replaced with a dynamic model
|
MODULES: Any # This will be replaced with a dynamic model
|
||||||
TZ: str
|
TZ: str
|
||||||
KEYS: List[str]
|
KEYS: List[str]
|
||||||
|
MAX_CPU_CORES: int = Field(default_factory=lambda: min(
|
||||||
|
int(os.getenv("MAX_CPU_CORES", multiprocessing.cpu_count() // 2)), multiprocessing.cpu_count()
|
||||||
|
))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, config_path: Union[str, Path], secrets_path: Union[str, Path]):
|
def load(cls, config_path: Union[str, Path], secrets_path: Union[str, Path]):
|
||||||
config_path = cls._resolve_path(config_path, 'config')
|
config_path = cls._resolve_path(config_path, 'config')
|
||||||
secrets_path = cls._resolve_path(secrets_path, 'config')
|
secrets_path = cls._resolve_path(secrets_path, 'config')
|
||||||
|
|
||||||
# Load main configuration
|
|
||||||
with open(config_path, 'r') as file:
|
with open(config_path, 'r') as file:
|
||||||
config_data = yaml.safe_load(file)
|
config_data = yaml.safe_load(file)
|
||||||
|
|
||||||
print(f"Loaded main config: {config_data}") # Debug print
|
print(f"Loaded main config: {config_data}")
|
||||||
|
|
||||||
# Load secrets
|
# Load secrets
|
||||||
try:
|
try:
|
||||||
with open(secrets_path, 'r') as file:
|
with open(secrets_path, 'r') as file:
|
||||||
secrets_data = yaml.safe_load(file)
|
secrets_data = yaml.safe_load(file)
|
||||||
print(f"Loaded secrets: {secrets_data}") # Debug print
|
print(f"Loaded secrets: {secrets_data}")
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print(f"Secrets file not found: {secrets_path}")
|
print(f"Secrets file not found: {secrets_path}")
|
||||||
secrets_data = {}
|
secrets_data = {}
|
||||||
|
@ -173,7 +323,7 @@ class APIConfig(BaseModel):
|
||||||
# Resolve internal placeholders
|
# Resolve internal placeholders
|
||||||
config_data = cls.resolve_placeholders(config_data)
|
config_data = cls.resolve_placeholders(config_data)
|
||||||
|
|
||||||
print(f"Resolved config: {config_data}") # Debug print
|
print(f"Resolved config: {config_data}")
|
||||||
|
|
||||||
# Handle KEYS placeholder
|
# Handle KEYS placeholder
|
||||||
if isinstance(config_data.get('KEYS'), list) and len(config_data['KEYS']) == 1:
|
if isinstance(config_data.get('KEYS'), list) and len(config_data['KEYS']) == 1:
|
||||||
|
@ -185,7 +335,7 @@ class APIConfig(BaseModel):
|
||||||
secret_key = parts[1]
|
secret_key = parts[1]
|
||||||
if secret_key in secrets_data:
|
if secret_key in secrets_data:
|
||||||
config_data['KEYS'] = secrets_data[secret_key]
|
config_data['KEYS'] = secrets_data[secret_key]
|
||||||
print(f"Replaced KEYS with secret: {config_data['KEYS']}") # Debug print
|
print(f"Replaced KEYS with secret: {config_data['KEYS']}")
|
||||||
else:
|
else:
|
||||||
print(f"Secret key '{secret_key}' not found in secrets file")
|
print(f"Secret key '{secret_key}' not found in secrets file")
|
||||||
else:
|
else:
|
||||||
|
@ -201,10 +351,10 @@ class APIConfig(BaseModel):
|
||||||
modules_fields[key] = (bool, value)
|
modules_fields[key] = (bool, value)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid value for module {key}: {value}. Must be 'on', 'off', True, or False.")
|
raise ValueError(f"Invalid value for module {key}: {value}. Must be 'on', 'off', True, or False.")
|
||||||
|
|
||||||
DynamicModulesConfig = create_model('DynamicModulesConfig', **modules_fields)
|
DynamicModulesConfig = create_model('DynamicModulesConfig', **modules_fields)
|
||||||
config_data['MODULES'] = DynamicModulesConfig(**modules_data)
|
config_data['MODULES'] = DynamicModulesConfig(**modules_data)
|
||||||
|
|
||||||
return cls(**config_data)
|
return cls(**config_data)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -236,12 +386,12 @@ class APIConfig(BaseModel):
|
||||||
resolved_data[key] = [resolve_value(item) for item in value]
|
resolved_data[key] = [resolve_value(item) for item in value]
|
||||||
else:
|
else:
|
||||||
resolved_data[key] = resolve_value(value)
|
resolved_data[key] = resolve_value(value)
|
||||||
|
|
||||||
# Resolve BIND separately to ensure HOST and PORT are used
|
# Resolve BIND separately to ensure HOST and PORT are used
|
||||||
if 'BIND' in resolved_data:
|
if 'BIND' in resolved_data:
|
||||||
resolved_data['BIND'] = resolved_data['BIND'].replace('{{ HOST }}', str(resolved_data['HOST']))
|
resolved_data['BIND'] = resolved_data['BIND'].replace('{{ HOST }}', str(resolved_data['HOST']))
|
||||||
resolved_data['BIND'] = resolved_data['BIND'].replace('{{ PORT }}', str(resolved_data['PORT']))
|
resolved_data['BIND'] = resolved_data['BIND'].replace('{{ PORT }}', str(resolved_data['PORT']))
|
||||||
|
|
||||||
return resolved_data
|
return resolved_data
|
||||||
|
|
||||||
def __getattr__(self, name: str) -> Any:
|
def __getattr__(self, name: str) -> Any:
|
||||||
|
@ -253,8 +403,6 @@ class APIConfig(BaseModel):
|
||||||
def active_modules(self) -> List[str]:
|
def active_modules(self) -> List[str]:
|
||||||
return [module for module, is_active in self.MODULES.__dict__.items() if is_active]
|
return [module for module, is_active in self.MODULES.__dict__.items() if is_active]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Location(BaseModel):
|
class Location(BaseModel):
|
||||||
latitude: float
|
latitude: float
|
||||||
longitude: float
|
longitude: float
|
||||||
|
@ -482,7 +630,6 @@ class Geocoder:
|
||||||
timezone=await self.timezone(latitude, longitude)
|
timezone=await self.timezone(latitude, longitude)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def round_coords(self, lat: float, lon: float, decimal_places: int = 2) -> Tuple[float, float]:
|
def round_coords(self, lat: float, lon: float, decimal_places: int = 2) -> Tuple[float, float]:
|
||||||
return (round(lat, decimal_places), round(lon, decimal_places))
|
return (round(lat, decimal_places), round(lon, decimal_places))
|
||||||
|
|
||||||
|
@ -583,55 +730,75 @@ class Database(BaseModel):
|
||||||
await conn.close()
|
await conn.close()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_env(cls):
|
def from_yaml(cls, yaml_path: Union[str, Path]):
|
||||||
import os
|
yaml_path = Path(yaml_path)
|
||||||
return cls(
|
if not yaml_path.is_absolute():
|
||||||
host=os.getenv("DB_HOST", "localhost"),
|
yaml_path = Path(__file__).parent / 'config' / yaml_path
|
||||||
port=int(os.getenv("DB_PORT", 5432)),
|
|
||||||
user=os.getenv("DB_USER"),
|
with open(yaml_path, 'r') as file:
|
||||||
password=os.getenv("DB_PASSWORD"),
|
config = yaml.safe_load(file)
|
||||||
database=os.getenv("DB_NAME"),
|
return cls(**config)
|
||||||
db_schema=os.getenv("DB_SCHEMA")
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return self.dict(exclude_none=True)
|
return self.dict(exclude_none=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class IMAPConfig(BaseModel):
|
class IMAPConfig(BaseModel):
|
||||||
username: str
|
username: str
|
||||||
password: str
|
password: str
|
||||||
host: str
|
host: str
|
||||||
port: int
|
port: int
|
||||||
encryption: str = None
|
encryption: Optional[str]
|
||||||
|
|
||||||
class SMTPConfig(BaseModel):
|
class SMTPConfig(BaseModel):
|
||||||
username: str
|
username: str
|
||||||
password: str
|
password: str
|
||||||
host: str
|
host: str
|
||||||
port: int
|
port: int
|
||||||
encryption: str = None
|
encryption: Optional[str]
|
||||||
|
|
||||||
class AutoResponder(BaseModel):
|
class AutoResponder(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
style: str
|
style: str
|
||||||
context: str
|
context: str
|
||||||
ollama_model: str = "llama3"
|
ollama_model: str = "llama3"
|
||||||
|
image_prompt: Optional[str] = None
|
||||||
|
image_scene: Optional[str] = None
|
||||||
|
|
||||||
|
class AccountAutoResponder(BaseModel):
|
||||||
|
name: str
|
||||||
|
smtp: str
|
||||||
whitelist: List[str]
|
whitelist: List[str]
|
||||||
blacklist: List[str]
|
blacklist: List[str]
|
||||||
image_prompt: Optional[str] = None
|
|
||||||
image_scene: Optional[str] = None
|
|
||||||
smtp: SMTPConfig
|
|
||||||
|
|
||||||
class EmailAccount(BaseModel):
|
class EmailAccount(BaseModel):
|
||||||
name: str
|
name: str
|
||||||
refresh: int
|
|
||||||
fullname: Optional[str]
|
fullname: Optional[str]
|
||||||
bio: Optional[str]
|
bio: Optional[str]
|
||||||
|
refresh: int
|
||||||
summarize: bool = False
|
summarize: bool = False
|
||||||
podcast: bool = False
|
podcast: bool = False
|
||||||
imap: IMAPConfig
|
imap: str
|
||||||
autoresponders: Optional[List[AutoResponder]]
|
autoresponders: List[AccountAutoResponder]
|
||||||
|
|
||||||
|
class EmailConfiguration(Configuration):
|
||||||
|
imaps: List[IMAPConfig]
|
||||||
|
smtps: List[SMTPConfig]
|
||||||
|
autoresponders: List[AutoResponder]
|
||||||
|
accounts: List[EmailAccount]
|
||||||
|
|
||||||
|
def get_imap(self, username: str) -> Optional[IMAPConfig]:
|
||||||
|
return next((imap for imap in self.imaps if imap.username == username), None)
|
||||||
|
|
||||||
|
def get_smtp(self, username: str) -> Optional[SMTPConfig]:
|
||||||
|
return next((smtp for smtp in self.smtps if smtp.username == username), None)
|
||||||
|
|
||||||
|
def get_autoresponder(self, name: str) -> Optional[AutoResponder]:
|
||||||
|
return next((ar for ar in self.autoresponders if ar.name == name), None)
|
||||||
|
|
||||||
|
def get_account(self, name: str) -> Optional[EmailAccount]:
|
||||||
|
return next((account for account in self.accounts if account.name == name), None)
|
||||||
|
|
||||||
class EmailContact(BaseModel):
|
class EmailContact(BaseModel):
|
||||||
email: str
|
email: str
|
||||||
|
@ -643,4 +810,4 @@ class IncomingEmail(BaseModel):
|
||||||
recipients: List[EmailContact]
|
recipients: List[EmailContact]
|
||||||
subject: str
|
subject: str
|
||||||
body: str
|
body: str
|
||||||
attachments: List[dict] = []
|
attachments: List[dict] = []
|
|
@ -291,7 +291,7 @@ MS365_SECRET=¿SECRET? # <--- enter your app secret (found in Azure
|
||||||
MS365_SCOPE='basic,calendar_all,Calendars.Read,Calendars.ReadWrite,offline_access'
|
MS365_SCOPE='basic,calendar_all,Calendars.Read,Calendars.ReadWrite,offline_access'
|
||||||
MS365_TOKEN_FILE=oauth_token.txt
|
MS365_TOKEN_FILE=oauth_token.txt
|
||||||
MS365_LOGIN_URL='https://login.microsoftonline.com'
|
MS365_LOGIN_URL='https://login.microsoftonline.com'
|
||||||
MS365_REDIRECT_PATH=¿SECRET? # <--- e.g. http://localhost:4444/o365/oauth_redirect
|
MS365_REDIRECT_PATH=¿SECRET? # <--- e.g. http://localhost:4444/MS365/oauth_redirect
|
||||||
#─── notes: ───────────────────────────────────────────────────────────────────────────────
|
#─── notes: ───────────────────────────────────────────────────────────────────────────────
|
||||||
#
|
#
|
||||||
# # MS365_CLIENT_ID, _TENANT_ID, _SECRET, AND _SCOPES must be obtained from Microsoft
|
# # MS365_CLIENT_ID, _TENANT_ID, _SECRET, AND _SCOPES must be obtained from Microsoft
|
||||||
|
|
11
sijapi/config/asr.yaml-example
Normal file
11
sijapi/config/asr.yaml-example
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
DIR: '{{ DIR.HOME }}/whisper.cpp'
|
||||||
|
MODELS:
|
||||||
|
- small
|
||||||
|
- base
|
||||||
|
- base-en
|
||||||
|
- tiny
|
||||||
|
- medium
|
||||||
|
- medium-en
|
||||||
|
- large
|
||||||
|
- large-v2
|
||||||
|
- large-v3
|
19
sijapi/config/cal.yaml-example
Normal file
19
sijapi/config/cal.yaml-example
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
MS365:
|
||||||
|
STATUS: OFF
|
||||||
|
AUTH:
|
||||||
|
TENANT: bad78048-a6e0-47b1-a24b-403c444aa349
|
||||||
|
CLIENT_ID: ce8cbd24-f146-4dc7-8ee7-51d9b69dec59
|
||||||
|
LOGIN: 'https://login.microsoftonline.com'
|
||||||
|
REDIRECT: 'https://api.sij.ai/MS365/oauth_redirect'
|
||||||
|
SCOPES:
|
||||||
|
- basic
|
||||||
|
- calendar_all
|
||||||
|
- Calendars.Read
|
||||||
|
- Calendars.ReadWrite
|
||||||
|
- offline_access
|
||||||
|
SECRET: '{{ SECRET.MS365_SECRET }}'
|
||||||
|
TOKEN_FILE: '{{ DIR.CONFIG }}/ms365/oauth_token.txt'
|
||||||
|
ICAL:
|
||||||
|
STATUS: ON
|
||||||
|
CALENDARS:
|
||||||
|
- ''
|
6
sijapi/config/courtlistener.yaml-example
Normal file
6
sijapi/config/courtlistener.yaml-example
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
url:
|
||||||
|
base: 'https://www.courtlistener.com'
|
||||||
|
dockets: '{{ url.base }}/api/rest/v3/dockets/'
|
||||||
|
API_KEY: '{{ SECRET.COURTLISTENER_API_KEY }}'
|
||||||
|
DOCKETS: '{{ DIR.DATA }}/cl/dockets'
|
||||||
|
SEARCHES: '{{ DIR.DATA }}/cl/searches'
|
|
@ -1,6 +1,16 @@
|
||||||
HOME: ~
|
HOME: ~
|
||||||
BASE: '{{ HOME }}/sijapi'
|
BASE: '{{ HOME }}/workshop/sijapi'
|
||||||
SIJAPI: '{{ BASE }}/sijapi'
|
SIJAPI: '{{ BASE }}/sijapi'
|
||||||
CONFIG: '{{ SIJAPI }}/config'
|
CONFIG: '{{ SIJAPI }}/config'
|
||||||
|
CONFIG.email: '{{ CONFIG }}/email.yaml'
|
||||||
|
CONFIG.img: '{{ CONFIG }}/img.yaml'
|
||||||
|
CONFIG.news: '{{ CONFIG }}/news.yaml'
|
||||||
|
SECRETS: '{{ CONFIG }}/secrets.yaml'
|
||||||
DATA: '{{ SIJAPI }}/data'
|
DATA: '{{ SIJAPI }}/data'
|
||||||
|
DATA.ALERTS: '{{ DATA }}/alerts'
|
||||||
|
DATA.ASR: '{{ DATA }}/asr'
|
||||||
|
DATA.BASE: '{{ DATA }}/db'
|
||||||
|
DATA.IMG: '{{ DATA }}/img'
|
||||||
|
DATA.TTS: '{{ DATA }}/tts'
|
||||||
|
TTS.VOICES: '{{ TTS }}/voices'
|
||||||
LOGS: '{{ SIJAPI }}/logs'
|
LOGS: '{{ SIJAPI }}/logs'
|
17
sijapi/config/llm.yaml-example
Normal file
17
sijapi/config/llm.yaml-example
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
url: http://localhost:11434
|
||||||
|
sys: 'You are a helpful AI assistant.'
|
||||||
|
tpw: 1.3s
|
||||||
|
chat:
|
||||||
|
model: dolphin-mistral
|
||||||
|
vision:
|
||||||
|
model: llava-llama3
|
||||||
|
summary:
|
||||||
|
model: dolphin-llama3:8b-256k
|
||||||
|
chunk-size: 16384
|
||||||
|
chunk-overlap: 256
|
||||||
|
length-ratio: 4
|
||||||
|
min-length: 64
|
||||||
|
token-limit: 16384
|
||||||
|
instruct: 'You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.'
|
||||||
|
functions:
|
||||||
|
model: 'command-r'
|
6
sijapi/config/obsidian.yaml-example
Normal file
6
sijapi/config/obsidian.yaml-example
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
DAILY_NOTE:
|
||||||
|
YEAR: '%Y'
|
||||||
|
MONTH: '%Y-%m %B'
|
||||||
|
DAY: '%Y-%m-%d %A'
|
||||||
|
DAY_SHORT: '%Y-%m-%d'
|
||||||
|
DIR: '{{ HOME_DIR }}/Nextcloud/notes' # you can specify the absolute path or use '{{ HOME_DIR }}' followed by a relative path
|
6
sijapi/config/tailscale.yaml-example
Normal file
6
sijapi/config/tailscale.yaml-example
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
ID: sij-mbp16
|
||||||
|
IP: 100.64.64.20
|
||||||
|
SUBNET: 100.64.64.0/24
|
||||||
|
MDNS: starling-sailfin.ts.net
|
||||||
|
API_KEY: '{{ SECRET.TAILSCALE_API_KEY }}'
|
||||||
|
ADDRESS: 'http://{{ ID }}.{{ MDNS }}'
|
|
@ -15,7 +15,7 @@ from fastapi.responses import JSONResponse
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sijapi import L, ASR_DIR, WHISPER_CPP_MODELS, GARBAGE_COLLECTION_INTERVAL, GARBAGE_TTL, WHISPER_CPP_DIR, MAX_CPU_CORES
|
from sijapi import L, API, Dir, ASR
|
||||||
|
|
||||||
asr = APIRouter()
|
asr = APIRouter()
|
||||||
|
|
||||||
|
@ -83,11 +83,11 @@ async def transcribe_endpoint(
|
||||||
async def transcribe_audio(file_path, params: TranscribeParams):
|
async def transcribe_audio(file_path, params: TranscribeParams):
|
||||||
L.DEBUG(f"Transcribing audio file from {file_path}...")
|
L.DEBUG(f"Transcribing audio file from {file_path}...")
|
||||||
file_path = await convert_to_wav(file_path)
|
file_path = await convert_to_wav(file_path)
|
||||||
model = params.model if params.model in WHISPER_CPP_MODELS else 'small'
|
model = params.model if params.model in ASR.MODELS else 'small'
|
||||||
model_path = WHISPER_CPP_DIR / 'models' / f'ggml-{model}.bin'
|
model_path = ASR.WHISPER_DIR.models / f'ggml-{model}.bin'
|
||||||
command = [str(WHISPER_CPP_DIR / 'build' / 'bin' / 'main')]
|
command = [str(ASR.WHISPER_DIR.build.bin.main)]
|
||||||
command.extend(['-m', str(model_path)])
|
command.extend(['-m', str(model_path)])
|
||||||
command.extend(['-t', str(max(1, min(params.threads or MAX_CPU_CORES, MAX_CPU_CORES)))])
|
command.extend(['-t', str(max(1, min(params.threads or API.MAX_CPU_CORES, API.MAX_CPU_CORES)))])
|
||||||
command.extend(['-np']) # Always enable no-prints
|
command.extend(['-np']) # Always enable no-prints
|
||||||
|
|
||||||
|
|
||||||
|
@ -187,7 +187,7 @@ async def run_transcription(command, file_path):
|
||||||
return stdout.decode().strip()
|
return stdout.decode().strip()
|
||||||
|
|
||||||
async def convert_to_wav(file_path: str):
|
async def convert_to_wav(file_path: str):
|
||||||
wav_file_path = os.path.join(ASR_DIR, f"{uuid.uuid4()}.wav")
|
wav_file_path = os.path.join(Dir.data.asr, f"{uuid.uuid4()}.wav")
|
||||||
proc = await asyncio.create_subprocess_exec(
|
proc = await asyncio.create_subprocess_exec(
|
||||||
"ffmpeg", "-y", "-i", file_path, "-acodec", "pcm_s16le", "-ar", "16000", "-ac", "1", wav_file_path,
|
"ffmpeg", "-y", "-i", file_path, "-acodec", "pcm_s16le", "-ar", "16000", "-ac", "1", wav_file_path,
|
||||||
stdout=asyncio.subprocess.PIPE,
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'''
|
'''
|
||||||
Calendar module using macOS Calendars and/or Microsoft 365 via its Graph API.
|
Calendar module using macOS Calendars and/or Microsoft 365 via its Graph API.
|
||||||
Depends on:
|
Depends on:
|
||||||
LOGGER, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
|
LOGGER, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, Cal.MS365.auth.token
|
||||||
'''
|
'''
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||||
from fastapi.responses import RedirectResponse, JSONResponse
|
from fastapi.responses import RedirectResponse, JSONResponse
|
||||||
|
@ -16,45 +16,46 @@ from typing import Dict, List, Any
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from Foundation import NSDate, NSRunLoop
|
from Foundation import NSDate, NSRunLoop
|
||||||
import EventKit as EK
|
import EventKit as EK
|
||||||
from sijapi import L, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
|
from sijapi import L, Cal
|
||||||
from sijapi.routers import loc
|
from sijapi.routers import loc
|
||||||
|
|
||||||
cal = APIRouter()
|
cal = APIRouter()
|
||||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
|
||||||
timeout = httpx.Timeout(12)
|
timeout = httpx.Timeout(12)
|
||||||
|
|
||||||
if MS365_TOGGLE is True:
|
print(f"Configuration MS365: {Cal.MS365}")
|
||||||
L.CRIT(f"Visit https://api.sij.ai/o365/login to obtain your Microsoft 365 authentication token.")
|
if Cal.MS365.toggle == 'on':
|
||||||
|
L.CRIT(f"Visit https://api.sij.ai/MS365/login to obtain your Microsoft 365 authentication token.")
|
||||||
|
|
||||||
@cal.get("/o365/login")
|
@cal.get("/MS365/login")
|
||||||
async def login():
|
async def login():
|
||||||
L.DEBUG(f"Received request to /o365/login")
|
L.DEBUG(f"Received request to /MS365/login")
|
||||||
L.DEBUG(f"SCOPE: {MS365_SCOPE}")
|
L.DEBUG(f"SCOPE: {Cal.MS365.auth.scopes}")
|
||||||
if not MS365_SCOPE:
|
if not Cal.MS365.auth.scopes:
|
||||||
L.ERR("No scopes defined for authorization.")
|
L.ERR("No scopes defined for authorization.")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail="No scopes defined for authorization."
|
detail="No scopes defined for authorization."
|
||||||
)
|
)
|
||||||
authorization_url = f"{MS365_AUTHORITY_URL}/oauth2/v2.0/authorize?client_id={MS365_CLIENT_ID}&response_type=code&redirect_uri={MS365_REDIRECT_PATH}&scope={'+'.join(MS365_SCOPE)}"
|
authorization_url = f"{Cal.MS365.auth.url}/oauth2/v2.0/authorize?client_id={Cal.MS365.client}&response_type=code&redirect_uri={Cal.MS365.auth.redirect}&scope={'+'.join(Cal.MS365.auth.scopes)}"
|
||||||
L.INFO(f"Redirecting to authorization URL: {authorization_url}")
|
L.INFO(f"Redirecting to authorization URL: {authorization_url}")
|
||||||
return RedirectResponse(authorization_url)
|
return RedirectResponse(authorization_url)
|
||||||
|
|
||||||
@cal.get("/o365/oauth_redirect")
|
@cal.get("/MS365/oauth_redirect")
|
||||||
async def oauth_redirect(code: str = None, error: str = None):
|
async def oauth_redirect(code: str = None, error: str = None):
|
||||||
L.DEBUG(f"Received request to /o365/oauth_redirect")
|
L.DEBUG(f"Received request to /MS365/oauth_redirect")
|
||||||
if error:
|
if error:
|
||||||
L.ERR(f"OAuth2 Error: {error}")
|
L.ERR(f"OAuth2 Error: {error}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST, detail="OAuth2 Error"
|
status_code=status.HTTP_400_BAD_REQUEST, detail="OAuth2 Error"
|
||||||
)
|
)
|
||||||
L.INFO(f"Requesting token with authorization code: {code}")
|
L.INFO(f"Requesting token with authorization code: {code}")
|
||||||
token_url = f"{MS365_AUTHORITY_URL}/oauth2/v2.0/token"
|
token_url = f"{Cal.MS365.auth.url}/oauth2/v2.0/token"
|
||||||
data = {
|
data = {
|
||||||
"client_id": MS365_CLIENT_ID,
|
"client_id": Cal.MS365.client,
|
||||||
"client_secret": MS365_SECRET,
|
"client_secret": Cal.MS365.auth.secret,
|
||||||
"code": code,
|
"code": code,
|
||||||
"redirect_uri": MS365_REDIRECT_PATH,
|
"redirect_uri": Cal.MS365.auth.redirect,
|
||||||
"grant_type": "authorization_code"
|
"grant_type": "authorization_code"
|
||||||
}
|
}
|
||||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
@ -73,9 +74,9 @@ if MS365_TOGGLE is True:
|
||||||
detail="Failed to obtain access token"
|
detail="Failed to obtain access token"
|
||||||
)
|
)
|
||||||
|
|
||||||
@cal.get("/o365/me")
|
@cal.get("/MS365/me")
|
||||||
async def read_items():
|
async def read_items():
|
||||||
L.DEBUG(f"Received request to /o365/me")
|
L.DEBUG(f"Received request to /MS365/me")
|
||||||
token = await load_token()
|
token = await load_token()
|
||||||
if not token:
|
if not token:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
@ -102,16 +103,16 @@ if MS365_TOGGLE is True:
|
||||||
L.DEBUG(f"Saving token: {token}")
|
L.DEBUG(f"Saving token: {token}")
|
||||||
try:
|
try:
|
||||||
token["expires_at"] = int(time.time()) + token["expires_in"]
|
token["expires_at"] = int(time.time()) + token["expires_in"]
|
||||||
with open(MS365_TOKEN_PATH, "w") as file:
|
with open(Cal.MS365.auth.token, "w") as file:
|
||||||
json.dump(token, file)
|
json.dump(token, file)
|
||||||
L.DEBUG(f"Saved token to {MS365_TOKEN_PATH}")
|
L.DEBUG(f"Saved token to {Cal.MS365.auth.token}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
L.ERR(f"Failed to save token: {e}")
|
L.ERR(f"Failed to save token: {e}")
|
||||||
|
|
||||||
async def load_token():
|
async def load_token():
|
||||||
if os.path.exists(MS365_TOKEN_PATH):
|
if os.path.exists(Cal.MS365.auth.token):
|
||||||
try:
|
try:
|
||||||
with open(MS365_TOKEN_PATH, "r") as file:
|
with open(Cal.MS365.auth.token, "r") as file:
|
||||||
token = json.load(file)
|
token = json.load(file)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
L.ERR("Token file not found.")
|
L.ERR("Token file not found.")
|
||||||
|
@ -128,7 +129,7 @@ if MS365_TOGGLE is True:
|
||||||
L.DEBUG("No token found.")
|
L.DEBUG("No token found.")
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
L.ERR(f"No file found at {MS365_TOKEN_PATH}")
|
L.ERR(f"No file found at {Cal.MS365.auth.token}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -146,13 +147,13 @@ if MS365_TOGGLE is True:
|
||||||
return response.status_code == 401
|
return response.status_code == 401
|
||||||
|
|
||||||
async def get_new_token_with_refresh_token(refresh_token):
|
async def get_new_token_with_refresh_token(refresh_token):
|
||||||
token_url = f"{MS365_AUTHORITY_URL}/oauth2/v2.0/token"
|
token_url = f"{Cal.MS365.auth.url}/oauth2/v2.0/token"
|
||||||
data = {
|
data = {
|
||||||
"client_id": MS365_CLIENT_ID,
|
"client_id": Cal.MS365.client,
|
||||||
"client_secret": MS365_SECRET,
|
"client_secret": Cal.MS365.auth.secret,
|
||||||
"refresh_token": refresh_token,
|
"refresh_token": refresh_token,
|
||||||
"grant_type": "refresh_token",
|
"grant_type": "refresh_token",
|
||||||
"scope": " ".join(MS365_SCOPE),
|
"scope": " ".join(Cal.MS365.auth.scopes),
|
||||||
}
|
}
|
||||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
response = await client.post(token_url, data=data)
|
response = await client.post(token_url, data=data)
|
||||||
|
@ -164,6 +165,36 @@ if MS365_TOGGLE is True:
|
||||||
L.ERR("Failed to refresh access token")
|
L.ERR("Failed to refresh access token")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
async def get_ms365_events(start_date: datetime, end_date: datetime):
|
||||||
|
token = await load_token()
|
||||||
|
if token:
|
||||||
|
if await is_token_expired(token):
|
||||||
|
await refresh_token()
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Access token not found",
|
||||||
|
)
|
||||||
|
# this looks like it might need updating to use tz-aware datetimes converted to UTC...
|
||||||
|
graph_url = f"https://graph.microsoft.com/v1.0/me/events?$filter=start/dateTime ge '{start_date}T00:00:00' and end/dateTime le '{end_date}T23:59:59'"
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {token['access_token']}",
|
||||||
|
"Prefer": 'outlook.timezone="Pacific Standard Time"',
|
||||||
|
}
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(graph_url, headers=headers)
|
||||||
|
|
||||||
|
if response.status_code != 200:
|
||||||
|
L.ERR("Failed to retrieve events from Microsoft 365")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Failed to retrieve events",
|
||||||
|
)
|
||||||
|
|
||||||
|
ms_events = response.json().get("value", [])
|
||||||
|
return ms_events
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def refresh_token():
|
async def refresh_token():
|
||||||
token = await load_token()
|
token = await load_token()
|
||||||
|
@ -223,12 +254,12 @@ async def get_events_endpoint(start_date: str, end_date: str):
|
||||||
|
|
||||||
async def get_events(start_dt: datetime, end_dt: datetime) -> List:
|
async def get_events(start_dt: datetime, end_dt: datetime) -> List:
|
||||||
combined_events = []
|
combined_events = []
|
||||||
if MS365_TOGGLE:
|
if Cal.MS365.toggle == "on":
|
||||||
ms_events = await get_ms365_events(start_dt, end_dt)
|
ms_events = await get_ms365_events(start_dt, end_dt)
|
||||||
combined_events.extend(ms_events) # Use extend instead of append
|
combined_events.extend(ms_events) # Use extend instead of append
|
||||||
|
|
||||||
if ICAL_TOGGLE:
|
if Cal.ICAL.toggle == "on":
|
||||||
calendar_ids = ICALENDARS
|
calendar_ids = Cal.ICAL.calendars
|
||||||
macos_events = get_macos_calendar_events(start_dt, end_dt, calendar_ids)
|
macos_events = get_macos_calendar_events(start_dt, end_dt, calendar_ids)
|
||||||
combined_events.extend(macos_events) # Use extend instead of append
|
combined_events.extend(macos_events) # Use extend instead of append
|
||||||
|
|
||||||
|
@ -236,108 +267,80 @@ async def get_events(start_dt: datetime, end_dt: datetime) -> List:
|
||||||
return parsed_events
|
return parsed_events
|
||||||
|
|
||||||
|
|
||||||
def get_macos_calendar_events(start_date: datetime, end_date: datetime, calendar_ids: List[str] = None) -> List[Dict]:
|
if Cal.ICAL.toggle == "on":
|
||||||
event_store = EK.EKEventStore.alloc().init()
|
def get_macos_calendar_events(start_date: datetime, end_date: datetime, calendar_ids: List[str] = None) -> List[Dict]:
|
||||||
|
event_store = EK.EKEventStore.alloc().init()
|
||||||
|
|
||||||
# Request access to EventKit
|
# Request access to EventKit
|
||||||
def request_access() -> bool:
|
def request_access() -> bool:
|
||||||
access_granted = []
|
access_granted = []
|
||||||
|
|
||||||
def completion_handler(granted, error):
|
def completion_handler(granted, error):
|
||||||
if error is not None:
|
if error is not None:
|
||||||
L.ERR(f"Error: {error}")
|
L.ERR(f"Error: {error}")
|
||||||
access_granted.append(granted)
|
access_granted.append(granted)
|
||||||
# Notify the main thread that the completion handler has executed
|
# Notify the main thread that the completion handler has executed
|
||||||
|
with access_granted_condition:
|
||||||
|
access_granted_condition.notify()
|
||||||
|
|
||||||
|
access_granted_condition = threading.Condition()
|
||||||
with access_granted_condition:
|
with access_granted_condition:
|
||||||
access_granted_condition.notify()
|
event_store.requestAccessToEntityType_completion_(0, completion_handler) # 0 corresponds to EKEntityTypeEvent
|
||||||
|
# Wait for the completion handler to be called
|
||||||
|
access_granted_condition.wait(timeout=10)
|
||||||
|
# Verify that the handler was called and access_granted is not empty
|
||||||
|
if access_granted:
|
||||||
|
return access_granted[0]
|
||||||
|
else:
|
||||||
|
L.ERR("Request access timed out or failed")
|
||||||
|
return False
|
||||||
|
|
||||||
access_granted_condition = threading.Condition()
|
if not request_access():
|
||||||
with access_granted_condition:
|
L.ERR("Access to calendar data was not granted")
|
||||||
event_store.requestAccessToEntityType_completion_(0, completion_handler) # 0 corresponds to EKEntityTypeEvent
|
return []
|
||||||
# Wait for the completion handler to be called
|
|
||||||
access_granted_condition.wait(timeout=10)
|
|
||||||
# Verify that the handler was called and access_granted is not empty
|
|
||||||
if access_granted:
|
|
||||||
return access_granted[0]
|
|
||||||
else:
|
|
||||||
L.ERR("Request access timed out or failed")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not request_access():
|
ns_start_date = datetime_to_nsdate(start_date)
|
||||||
L.ERR("Access to calendar data was not granted")
|
ns_end_date = datetime_to_nsdate(end_date)
|
||||||
return []
|
|
||||||
|
|
||||||
ns_start_date = datetime_to_nsdate(start_date)
|
# Retrieve all calendars
|
||||||
ns_end_date = datetime_to_nsdate(end_date)
|
all_calendars = event_store.calendarsForEntityType_(0) # 0 corresponds to EKEntityTypeEvent
|
||||||
|
if calendar_ids:
|
||||||
# Retrieve all calendars
|
selected_calendars = [cal for cal in all_calendars if cal.calendarIdentifier() in calendar_ids]
|
||||||
all_calendars = event_store.calendarsForEntityType_(0) # 0 corresponds to EKEntityTypeEvent
|
|
||||||
if calendar_ids:
|
|
||||||
selected_calendars = [cal for cal in all_calendars if cal.calendarIdentifier() in calendar_ids]
|
|
||||||
else:
|
|
||||||
selected_calendars = all_calendars
|
|
||||||
|
|
||||||
# Filtering events by selected calendars
|
|
||||||
predicate = event_store.predicateForEventsWithStartDate_endDate_calendars_(ns_start_date, ns_end_date, selected_calendars)
|
|
||||||
events = event_store.eventsMatchingPredicate_(predicate)
|
|
||||||
|
|
||||||
event_list = []
|
|
||||||
for event in events:
|
|
||||||
# Check if event.attendees() returns None
|
|
||||||
if event.attendees():
|
|
||||||
attendees = [{'name': att.name(), 'email': att.emailAddress()} for att in event.attendees() if att.emailAddress()]
|
|
||||||
else:
|
else:
|
||||||
attendees = []
|
selected_calendars = all_calendars
|
||||||
|
|
||||||
# Format the start and end dates properly
|
# Filtering events by selected calendars
|
||||||
start_date_str = event.startDate().descriptionWithLocale_(None)
|
predicate = event_store.predicateForEventsWithStartDate_endDate_calendars_(ns_start_date, ns_end_date, selected_calendars)
|
||||||
end_date_str = event.endDate().descriptionWithLocale_(None)
|
events = event_store.eventsMatchingPredicate_(predicate)
|
||||||
|
|
||||||
event_data = {
|
event_list = []
|
||||||
"subject": event.title(),
|
for event in events:
|
||||||
"id": event.eventIdentifier(),
|
# Check if event.attendees() returns None
|
||||||
"start": start_date_str,
|
if event.attendees():
|
||||||
"end": end_date_str,
|
attendees = [{'name': att.name(), 'email': att.emailAddress()} for att in event.attendees() if att.emailAddress()]
|
||||||
"bodyPreview": event.notes() if event.notes() else '',
|
else:
|
||||||
"attendees": attendees,
|
attendees = []
|
||||||
"location": event.location() if event.location() else '',
|
|
||||||
"onlineMeetingUrl": '', # Defaulting to empty as macOS EventKit does not provide this
|
|
||||||
"showAs": 'busy', # Default to 'busy'
|
|
||||||
"isAllDay": event.isAllDay()
|
|
||||||
}
|
|
||||||
|
|
||||||
event_list.append(event_data)
|
# Format the start and end dates properly
|
||||||
|
start_date_str = event.startDate().descriptionWithLocale_(None)
|
||||||
|
end_date_str = event.endDate().descriptionWithLocale_(None)
|
||||||
|
|
||||||
return event_list
|
event_data = {
|
||||||
|
"subject": event.title(),
|
||||||
|
"id": event.eventIdentifier(),
|
||||||
|
"start": start_date_str,
|
||||||
|
"end": end_date_str,
|
||||||
|
"bodyPreview": event.notes() if event.notes() else '',
|
||||||
|
"attendees": attendees,
|
||||||
|
"location": event.location() if event.location() else '',
|
||||||
|
"onlineMeetingUrl": '', # Defaulting to empty as macOS EventKit does not provide this
|
||||||
|
"showAs": 'busy', # Default to 'busy'
|
||||||
|
"isAllDay": event.isAllDay()
|
||||||
|
}
|
||||||
|
|
||||||
async def get_ms365_events(start_date: datetime, end_date: datetime):
|
event_list.append(event_data)
|
||||||
token = await load_token()
|
|
||||||
if token:
|
|
||||||
if await is_token_expired(token):
|
|
||||||
await refresh_token()
|
|
||||||
else:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Access token not found",
|
|
||||||
)
|
|
||||||
# this looks like it might need updating to use tz-aware datetimes converted to UTC...
|
|
||||||
graph_url = f"https://graph.microsoft.com/v1.0/me/events?$filter=start/dateTime ge '{start_date}T00:00:00' and end/dateTime le '{end_date}T23:59:59'"
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {token['access_token']}",
|
|
||||||
"Prefer": 'outlook.timezone="Pacific Standard Time"',
|
|
||||||
}
|
|
||||||
async with httpx.AsyncClient() as client:
|
|
||||||
response = await client.get(graph_url, headers=headers)
|
|
||||||
|
|
||||||
if response.status_code != 200:
|
return event_list
|
||||||
L.ERR("Failed to retrieve events from Microsoft 365")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail="Failed to retrieve events",
|
|
||||||
)
|
|
||||||
|
|
||||||
ms_events = response.json().get("value", [])
|
|
||||||
return ms_events
|
|
||||||
|
|
||||||
|
|
||||||
async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]):
|
async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]):
|
||||||
|
|
|
@ -130,12 +130,10 @@ async def add_config(record: DNSRecordRequest):
|
||||||
raise HTTPException(status_code=400, detail=f"Failed to create A record: {error_message} (Code: {error_code})")
|
raise HTTPException(status_code=400, detail=f"Failed to create A record: {error_message} (Code: {error_code})")
|
||||||
|
|
||||||
# Update Caddyfile
|
# Update Caddyfile
|
||||||
await update_caddyfile(full_domain, caddy_ip, port)
|
await update_caddyfile(full_domain, caddy_ip, port)
|
||||||
|
|
||||||
return {"message": "Configuration added successfully"}
|
return {"message": "Configuration added successfully"}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@cf.get("/cf/list_zones")
|
@cf.get("/cf/list_zones")
|
||||||
async def list_zones_endpoint():
|
async def list_zones_endpoint():
|
||||||
domains = await list_zones()
|
domains = await list_zones()
|
||||||
|
|
|
@ -20,7 +20,7 @@ import ssl
|
||||||
import yaml
|
import yaml
|
||||||
from typing import List, Dict, Optional, Set
|
from typing import List, Dict, Optional, Set
|
||||||
from datetime import datetime as dt_datetime
|
from datetime import datetime as dt_datetime
|
||||||
from sijapi import L, PODCAST_DIR, DEFAULT_VOICE, EMAIL_CONFIG, EMAIL_LOGS
|
from sijapi import L, TTS, Email, Dir
|
||||||
from sijapi.routers import img, loc, tts, llm
|
from sijapi.routers import img, loc, tts, llm
|
||||||
from sijapi.utilities import clean_text, assemble_journal_path, extract_text, prefix_lines
|
from sijapi.utilities import clean_text, assemble_journal_path, extract_text, prefix_lines
|
||||||
from sijapi.classes import EmailAccount, IMAPConfig, SMTPConfig, IncomingEmail, EmailContact, AutoResponder
|
from sijapi.classes import EmailAccount, IMAPConfig, SMTPConfig, IncomingEmail, EmailContact, AutoResponder
|
||||||
|
@ -28,13 +28,11 @@ from sijapi.classes import EmailAccount
|
||||||
|
|
||||||
email = APIRouter(tags=["private"])
|
email = APIRouter(tags=["private"])
|
||||||
|
|
||||||
|
|
||||||
def load_email_accounts(yaml_path: str) -> List[EmailAccount]:
|
def load_email_accounts(yaml_path: str) -> List[EmailAccount]:
|
||||||
with open(yaml_path, 'r') as file:
|
with open(yaml_path, 'r') as file:
|
||||||
config = yaml.safe_load(file)
|
config = yaml.safe_load(file)
|
||||||
return [EmailAccount(**account) for account in config['accounts']]
|
return [EmailAccount(**account) for account in config['accounts']]
|
||||||
|
|
||||||
|
|
||||||
def get_imap_connection(account: EmailAccount):
|
def get_imap_connection(account: EmailAccount):
|
||||||
return Imbox(account.imap.host,
|
return Imbox(account.imap.host,
|
||||||
username=account.imap.username,
|
username=account.imap.username,
|
||||||
|
@ -43,8 +41,6 @@ def get_imap_connection(account: EmailAccount):
|
||||||
ssl=account.imap.encryption == 'SSL',
|
ssl=account.imap.encryption == 'SSL',
|
||||||
starttls=account.imap.encryption == 'STARTTLS')
|
starttls=account.imap.encryption == 'STARTTLS')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_smtp_connection(autoresponder):
|
def get_smtp_connection(autoresponder):
|
||||||
# Create an SSL context that doesn't verify certificates
|
# Create an SSL context that doesn't verify certificates
|
||||||
context = ssl.create_default_context()
|
context = ssl.create_default_context()
|
||||||
|
@ -83,7 +79,6 @@ def get_smtp_connection(autoresponder):
|
||||||
L.ERR(f"Unencrypted connection failed: {str(e)}")
|
L.ERR(f"Unencrypted connection failed: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
async def send_response(to_email: str, subject: str, body: str, profile: AutoResponder, image_attachment: Path = None) -> bool:
|
async def send_response(to_email: str, subject: str, body: str, profile: AutoResponder, image_attachment: Path = None) -> bool:
|
||||||
server = None
|
server = None
|
||||||
try:
|
try:
|
||||||
|
@ -149,7 +144,7 @@ async def extract_attachments(attachments) -> List[str]:
|
||||||
return attachment_texts
|
return attachment_texts
|
||||||
|
|
||||||
async def process_account_archival(account: EmailAccount):
|
async def process_account_archival(account: EmailAccount):
|
||||||
summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
|
summarized_log = Dir.logs.email / account.name / "summarized.txt"
|
||||||
os.makedirs(summarized_log.parent, exist_ok = True)
|
os.makedirs(summarized_log.parent, exist_ok = True)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -196,7 +191,7 @@ async def summarize_single_email(this_email: IncomingEmail, podcast: bool = Fals
|
||||||
attachment_texts = await extract_attachments(this_email.attachments)
|
attachment_texts = await extract_attachments(this_email.attachments)
|
||||||
email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
|
email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
|
||||||
summary = await llm.summarize_text(email_content)
|
summary = await llm.summarize_text(email_content)
|
||||||
await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = podcast, output_path = tts_path)
|
await tts.local_tts(text_content = summary, speed = 1.1, voice = TTS.xtts.voice, podcast = podcast, output_path = tts_path)
|
||||||
md_summary = f'```ad.summary\n'
|
md_summary = f'```ad.summary\n'
|
||||||
md_summary += f'title: {this_email.subject}\n'
|
md_summary += f'title: {this_email.subject}\n'
|
||||||
md_summary += f'{summary}\n'
|
md_summary += f'{summary}\n'
|
||||||
|
@ -266,7 +261,7 @@ def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount
|
||||||
|
|
||||||
|
|
||||||
async def process_account_autoresponding(account: EmailAccount):
|
async def process_account_autoresponding(account: EmailAccount):
|
||||||
EMAIL_AUTORESPONSE_LOG = EMAIL_LOGS / account.name / "autoresponded.txt"
|
EMAIL_AUTORESPONSE_LOG = Dir.logs.email / account.name / "autoresponded.txt"
|
||||||
os.makedirs(EMAIL_AUTORESPONSE_LOG.parent, exist_ok=True)
|
os.makedirs(EMAIL_AUTORESPONSE_LOG.parent, exist_ok=True)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
|
@ -405,7 +405,7 @@ async def load_workflow(workflow_path: str, workflow:str):
|
||||||
return json.load(file)
|
return json.load(file)
|
||||||
|
|
||||||
|
|
||||||
async def update_prompt_and_get_key(workflow: dict, post: dict, positive: str):
|
async def update_prompt_and_get_key(workf0ow: dict, post: dict, positive: str):
|
||||||
'''
|
'''
|
||||||
Recurses through the workflow searching for and substituting the dynamic values for API_PrePrompt, API_StylePrompt, API_NegativePrompt, width, height, and seed (random integer).
|
Recurses through the workflow searching for and substituting the dynamic values for API_PrePrompt, API_StylePrompt, API_NegativePrompt, width, height, and seed (random integer).
|
||||||
Even more important, it finds and returns the key to the filepath where the file is saved, which we need to decipher status when generation is complete.
|
Even more important, it finds and returns the key to the filepath where the file is saved, which we need to decipher status when generation is complete.
|
||||||
|
|
|
@ -26,7 +26,7 @@ import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
import html2text
|
import html2text
|
||||||
import markdown
|
import markdown
|
||||||
from sijapi import L, LLM_SYS_MSG, DEFAULT_LLM, DEFAULT_VISION, REQUESTS_DIR, OBSIDIAN_CHROMADB_COLLECTION, OBSIDIAN_VAULT_DIR, DOC_DIR, OPENAI_API_KEY, DEFAULT_VOICE, SUMMARY_INSTRUCT, SUMMARY_CHUNK_SIZE, SUMMARY_TPW, SUMMARY_CHUNK_OVERLAP, SUMMARY_LENGTH_RATIO, SUMMARY_TOKEN_LIMIT, SUMMARY_MIN_LENGTH, SUMMARY_MODEL
|
from sijapi import L, Dir, API, LLM, TTS
|
||||||
from sijapi.utilities import convert_to_unix_time, sanitize_filename, ocr_pdf, clean_text, should_use_ocr, extract_text_from_pdf, extract_text_from_docx, read_text_file, str_to_bool, get_extension
|
from sijapi.utilities import convert_to_unix_time, sanitize_filename, ocr_pdf, clean_text, should_use_ocr, extract_text_from_pdf, extract_text_from_docx, read_text_file, str_to_bool, get_extension
|
||||||
from sijapi.routers import tts
|
from sijapi.routers import tts
|
||||||
from sijapi.routers.asr import transcribe_audio
|
from sijapi.routers.asr import transcribe_audio
|
||||||
|
@ -401,7 +401,7 @@ def query_gpt4(llmPrompt: List = [], system_msg: str = "", user_msg: str = "", m
|
||||||
{"role": "system", "content": system_msg},
|
{"role": "system", "content": system_msg},
|
||||||
{"role": "user", "content": user_msg}
|
{"role": "user", "content": user_msg}
|
||||||
]
|
]
|
||||||
LLM = OpenAI(api_key=OPENAI_API_KEY)
|
LLM = OpenAI(api_key=LLM.OPENAI_API_KEY)
|
||||||
response = LLM.chat.completions.create(
|
response = LLM.chat.completions.create(
|
||||||
model="gpt-4",
|
model="gpt-4",
|
||||||
messages=messages,
|
messages=messages,
|
||||||
|
|
|
@ -13,7 +13,7 @@ from zoneinfo import ZoneInfo
|
||||||
from dateutil.parser import parse as dateutil_parse
|
from dateutil.parser import parse as dateutil_parse
|
||||||
from typing import Optional, List, Union
|
from typing import Optional, List, Union
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sijapi import L, DB, TZ, NAMED_LOCATIONS, GEO
|
from sijapi import L, DB, TZ, GEO
|
||||||
from sijapi.classes import Location
|
from sijapi.classes import Location
|
||||||
from sijapi.utilities import haversine
|
from sijapi.utilities import haversine
|
||||||
|
|
||||||
|
|
|
@ -26,8 +26,6 @@ from fastapi.responses import JSONResponse
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from sijapi.classes import Configuration
|
|
||||||
from sijapi import API, L, Dir, News, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO
|
from sijapi import API, L, Dir, News, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO
|
||||||
from sijapi.utilities import sanitize_filename, assemble_journal_path, assemble_archive_path
|
from sijapi.utilities import sanitize_filename, assemble_journal_path, assemble_archive_path
|
||||||
from sijapi.routers import llm, tts, asr, loc
|
from sijapi.routers import llm, tts, asr, loc
|
||||||
|
|
Loading…
Reference in a new issue