Auto-update: Sat Jun 29 13:04:54 PDT 2024
This commit is contained in:
parent
f6cbe5b3b7
commit
dcd973d757
8 changed files with 137 additions and 92 deletions
|
@ -17,30 +17,29 @@ BASE_DIR = Path(__file__).resolve().parent
|
||||||
CONFIG_DIR = BASE_DIR / "config"
|
CONFIG_DIR = BASE_DIR / "config"
|
||||||
ENV_PATH = CONFIG_DIR / ".env"
|
ENV_PATH = CONFIG_DIR / ".env"
|
||||||
LOGS_DIR = BASE_DIR / "logs"
|
LOGS_DIR = BASE_DIR / "logs"
|
||||||
|
|
||||||
L = Logger("Central", LOGS_DIR)
|
L = Logger("Central", LOGS_DIR)
|
||||||
os.makedirs(LOGS_DIR, exist_ok=True)
|
os.makedirs(LOGS_DIR, exist_ok=True)
|
||||||
load_dotenv(ENV_PATH)
|
load_dotenv(ENV_PATH)
|
||||||
|
|
||||||
### API essentials
|
### API essentials
|
||||||
API_CONFIG_PATH = CONFIG_DIR / "api.yaml"
|
API_CONFIG_PATH = CONFIG_DIR / "api.yaml"
|
||||||
SECRETS_CONFIG_PATH = CONFIG_DIR / "secrets.yaml"
|
SECRETS_PATH = CONFIG_DIR / "secrets.yaml"
|
||||||
API = APIConfig.load_from_yaml(API_CONFIG_PATH, SECRETS_CONFIG_PATH)
|
API = APIConfig.load(API_CONFIG_PATH, SECRETS_PATH)
|
||||||
|
DIR_CONFIG_PATH = CONFIG_DIR / "dirs.yaml"
|
||||||
|
L.DEBUG(f"Loading DIR configuration from: {DIR_CONFIG_PATH}")
|
||||||
|
DIR = Configuration.load(DIR_CONFIG_PATH)
|
||||||
|
L.DEBUG(f"Loaded DIR configuration: {DIR.__dict__}")
|
||||||
|
|
||||||
DB = Database.from_env()
|
DB = Database.from_env()
|
||||||
ROUTERS = os.getenv('ROUTERS', '').split(',')
|
|
||||||
PUBLIC_SERVICES = os.getenv('PUBLIC_SERVICES', '').split(',')
|
HOST = f"{API.BIND}:{API.PORT}"
|
||||||
GLOBAL_API_KEY = os.getenv("GLOBAL_API_KEY")
|
|
||||||
# HOST_NET and HOST_PORT comprise HOST, which is what the server will bind to
|
|
||||||
HOST_NET = os.getenv("HOST_NET", "127.0.0.1")
|
|
||||||
HOST_PORT = int(os.getenv("HOST_PORT", 4444))
|
|
||||||
HOST = f"{HOST_NET}:{HOST_PORT}"
|
|
||||||
BASE_URL = os.getenv("BASE_URL", f"http://{HOST}")
|
|
||||||
LOCAL_HOSTS = [ipaddress.ip_address(localhost.strip()) for localhost in os.getenv('LOCAL_HOSTS', '127.0.0.1').split(',')] + ['localhost']
|
LOCAL_HOSTS = [ipaddress.ip_address(localhost.strip()) for localhost in os.getenv('LOCAL_HOSTS', '127.0.0.1').split(',')] + ['localhost']
|
||||||
SUBNET_BROADCAST = os.getenv("SUBNET_BROADCAST", '10.255.255.255')
|
SUBNET_BROADCAST = os.getenv("SUBNET_BROADCAST", '10.255.255.255')
|
||||||
TRUSTED_SUBNETS = [ipaddress.ip_network(subnet.strip()) for subnet in os.getenv('TRUSTED_SUBNETS', '127.0.0.1/32').split(',')]
|
|
||||||
MAX_CPU_CORES = min(int(os.getenv("MAX_CPU_CORES", int(multiprocessing.cpu_count()/2))), multiprocessing.cpu_count())
|
MAX_CPU_CORES = min(int(os.getenv("MAX_CPU_CORES", int(multiprocessing.cpu_count()/2))), multiprocessing.cpu_count())
|
||||||
|
|
||||||
|
|
||||||
### Directories & general paths
|
### Directories & general paths
|
||||||
HOME_DIR = Path.home()
|
|
||||||
ROUTER_DIR = BASE_DIR / "routers"
|
ROUTER_DIR = BASE_DIR / "routers"
|
||||||
DATA_DIR = BASE_DIR / "data"
|
DATA_DIR = BASE_DIR / "data"
|
||||||
os.makedirs(DATA_DIR, exist_ok=True)
|
os.makedirs(DATA_DIR, exist_ok=True)
|
||||||
|
@ -50,7 +49,6 @@ REQUESTS_DIR = LOGS_DIR / "requests"
|
||||||
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
||||||
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
||||||
|
|
||||||
|
|
||||||
### LOCATE AND WEATHER LOCALIZATIONS
|
### LOCATE AND WEATHER LOCALIZATIONS
|
||||||
USER_FULLNAME = os.getenv('USER_FULLNAME')
|
USER_FULLNAME = os.getenv('USER_FULLNAME')
|
||||||
USER_BIO = os.getenv('USER_BIO')
|
USER_BIO = os.getenv('USER_BIO')
|
||||||
|
@ -68,7 +66,7 @@ GEO = Geocoder(NAMED_LOCATIONS, TZ_CACHE)
|
||||||
### Obsidian & notes
|
### Obsidian & notes
|
||||||
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
|
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
|
||||||
MAX_PATH_LENGTH = 254
|
MAX_PATH_LENGTH = 254
|
||||||
OBSIDIAN_VAULT_DIR = Path(os.getenv("OBSIDIAN_BASE_DIR") or HOME_DIR / "Nextcloud" / "notes")
|
OBSIDIAN_VAULT_DIR = Path(os.getenv("OBSIDIAN_BASE_DIR") or Path(DIR.HOME) / "Nextcloud" / "notes")
|
||||||
OBSIDIAN_JOURNAL_DIR = OBSIDIAN_VAULT_DIR / "journal"
|
OBSIDIAN_JOURNAL_DIR = OBSIDIAN_VAULT_DIR / "journal"
|
||||||
OBSIDIAN_RESOURCES_DIR = "obsidian/resources"
|
OBSIDIAN_RESOURCES_DIR = "obsidian/resources"
|
||||||
OBSIDIAN_BANNER_DIR = f"{OBSIDIAN_RESOURCES_DIR}/banners"
|
OBSIDIAN_BANNER_DIR = f"{OBSIDIAN_RESOURCES_DIR}/banners"
|
||||||
|
@ -122,7 +120,7 @@ SD_CONFIG_PATH = CONFIG_DIR / 'sd.yaml'
|
||||||
### ASR
|
### ASR
|
||||||
ASR_DIR = DATA_DIR / "asr"
|
ASR_DIR = DATA_DIR / "asr"
|
||||||
os.makedirs(ASR_DIR, exist_ok=True)
|
os.makedirs(ASR_DIR, exist_ok=True)
|
||||||
WHISPER_CPP_DIR = HOME_DIR / str(os.getenv("WHISPER_CPP_DIR"))
|
WHISPER_CPP_DIR = Path(DIR.HOME) / str(os.getenv("WHISPER_CPP_DIR"))
|
||||||
WHISPER_CPP_MODELS = os.getenv('WHISPER_CPP_MODELS', 'NULL,VOID').split(',')
|
WHISPER_CPP_MODELS = os.getenv('WHISPER_CPP_MODELS', 'NULL,VOID').split(',')
|
||||||
|
|
||||||
### TTS
|
### TTS
|
||||||
|
@ -185,7 +183,7 @@ CF_IP = DATA_DIR / "cf_ip.txt" # to be deprecated soon
|
||||||
CF_DOMAINS_PATH = DATA_DIR / "cf_domains.json" # to be deprecated soon
|
CF_DOMAINS_PATH = DATA_DIR / "cf_domains.json" # to be deprecated soon
|
||||||
|
|
||||||
### Caddy - not fully implemented
|
### Caddy - not fully implemented
|
||||||
BASE_URL = os.getenv("BASE_URL")
|
API.URL = os.getenv("API.URL")
|
||||||
CADDY_SERVER = os.getenv('CADDY_SERVER', None)
|
CADDY_SERVER = os.getenv('CADDY_SERVER', None)
|
||||||
CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
|
CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
|
||||||
CADDY_API_KEY = os.getenv("CADDY_API_KEY")
|
CADDY_API_KEY = os.getenv("CADDY_API_KEY")
|
||||||
|
|
|
@ -29,9 +29,7 @@ args = parser.parse_args()
|
||||||
from sijapi import L
|
from sijapi import L
|
||||||
L.setup_from_args(args)
|
L.setup_from_args(args)
|
||||||
|
|
||||||
from sijapi import HOST, ENV_PATH, GLOBAL_API_KEY, REQUESTS_DIR, ROUTER_DIR, REQUESTS_LOG_PATH, PUBLIC_SERVICES, TRUSTED_SUBNETS, ROUTERS
|
from sijapi import ROUTER_DIR
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Initialize a FastAPI application
|
# Initialize a FastAPI application
|
||||||
api = FastAPI()
|
api = FastAPI()
|
||||||
|
@ -52,13 +50,14 @@ class SimpleAPIKeyMiddleware(BaseHTTPMiddleware):
|
||||||
if request.method == "OPTIONS":
|
if request.method == "OPTIONS":
|
||||||
# Allow CORS preflight requests
|
# Allow CORS preflight requests
|
||||||
return JSONResponse(status_code=200)
|
return JSONResponse(status_code=200)
|
||||||
if request.url.path not in PUBLIC_SERVICES:
|
if request.url.path not in API.PUBLIC:
|
||||||
if not any(client_ip in subnet for subnet in TRUSTED_SUBNETS):
|
trusted_subnets = [ipaddress.ip_network(subnet) for subnet in API.TRUSTED_SUBNETS]
|
||||||
|
if not any(client_ip in subnet for subnet in trusted_subnets):
|
||||||
api_key_header = request.headers.get("Authorization")
|
api_key_header = request.headers.get("Authorization")
|
||||||
api_key_query = request.query_params.get("api_key")
|
api_key_query = request.query_params.get("api_key")
|
||||||
if api_key_header:
|
if api_key_header:
|
||||||
api_key_header = api_key_header.lower().split("bearer ")[-1]
|
api_key_header = api_key_header.lower().split("bearer ")[-1]
|
||||||
if api_key_header != GLOBAL_API_KEY and api_key_query != GLOBAL_API_KEY:
|
if api_key_header not in API.KEYS and api_key_query not in API.KEYS:
|
||||||
L.ERR(f"Invalid API key provided by a requester.")
|
L.ERR(f"Invalid API key provided by a requester.")
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
|
@ -68,8 +67,10 @@ class SimpleAPIKeyMiddleware(BaseHTTPMiddleware):
|
||||||
# L.DEBUG(f"Request from {client_ip} is complete")
|
# L.DEBUG(f"Request from {client_ip} is complete")
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
# Add the middleware to your FastAPI app
|
||||||
api.add_middleware(SimpleAPIKeyMiddleware)
|
api.add_middleware(SimpleAPIKeyMiddleware)
|
||||||
|
|
||||||
|
|
||||||
canceled_middleware = """
|
canceled_middleware = """
|
||||||
@api.middleware("http")
|
@api.middleware("http")
|
||||||
async def log_requests(request: Request, call_next):
|
async def log_requests(request: Request, call_next):
|
||||||
|
|
|
@ -5,6 +5,13 @@ import asyncio
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Union, Dict, Any, Optional
|
||||||
|
from pydantic import BaseModel, create_model
|
||||||
|
import yaml
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import yaml
|
import yaml
|
||||||
import math
|
import math
|
||||||
from timezonefinder import TimezoneFinder
|
from timezonefinder import TimezoneFinder
|
||||||
|
@ -31,21 +38,10 @@ from dotenv import load_dotenv
|
||||||
|
|
||||||
T = TypeVar('T', bound='Configuration')
|
T = TypeVar('T', bound='Configuration')
|
||||||
|
|
||||||
class ModulesConfig(BaseModel):
|
from pydantic import BaseModel, Field, create_model
|
||||||
asr: bool = Field(alias="asr")
|
from typing import List, Optional, Any, Dict
|
||||||
calendar: bool = Field(alias="calendar")
|
from pathlib import Path
|
||||||
email: bool = Field(alias="email")
|
import yaml
|
||||||
health: bool = Field(alias="health")
|
|
||||||
hooks: bool = Field(alias="hooks")
|
|
||||||
llm: bool = Field(alias="llm")
|
|
||||||
locate: bool = Field(alias="locate")
|
|
||||||
note: bool = Field(alias="note")
|
|
||||||
sd: bool = Field(alias="sd")
|
|
||||||
serve: bool = Field(alias="serve")
|
|
||||||
time: bool = Field(alias="time")
|
|
||||||
tts: bool = Field(alias="tts")
|
|
||||||
weather: bool = Field(alias="weather")
|
|
||||||
|
|
||||||
|
|
||||||
class APIConfig(BaseModel):
|
class APIConfig(BaseModel):
|
||||||
BIND: str
|
BIND: str
|
||||||
|
@ -53,12 +49,12 @@ class APIConfig(BaseModel):
|
||||||
URL: str
|
URL: str
|
||||||
PUBLIC: List[str]
|
PUBLIC: List[str]
|
||||||
TRUSTED_SUBNETS: List[str]
|
TRUSTED_SUBNETS: List[str]
|
||||||
MODULES: ModulesConfig
|
MODULES: Any # This will be replaced with a dynamic model
|
||||||
BaseTZ: Optional[str] = 'UTC'
|
BaseTZ: Optional[str] = 'UTC'
|
||||||
KEYS: List[str]
|
KEYS: List[str]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_from_yaml(cls, config_path: Path, secrets_path: Path):
|
def load(cls, config_path: Path, secrets_path: Path):
|
||||||
# Load main configuration
|
# Load main configuration
|
||||||
with open(config_path, 'r') as file:
|
with open(config_path, 'r') as file:
|
||||||
config_data = yaml.safe_load(file)
|
config_data = yaml.safe_load(file)
|
||||||
|
@ -93,66 +89,94 @@ class APIConfig(BaseModel):
|
||||||
else:
|
else:
|
||||||
print(f"Invalid secret placeholder format: {placeholder}")
|
print(f"Invalid secret placeholder format: {placeholder}")
|
||||||
|
|
||||||
# Convert 'on'/'off' to boolean for MODULES if they are strings
|
# Create dynamic ModulesConfig
|
||||||
for key, value in config_data['MODULES'].items():
|
modules_data = config_data.get('MODULES', {})
|
||||||
|
modules_fields = {}
|
||||||
|
for key, value in modules_data.items():
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
config_data['MODULES'][key] = value.lower() == 'on'
|
modules_fields[key] = (bool, value.lower() == 'on')
|
||||||
elif isinstance(value, bool):
|
elif isinstance(value, bool):
|
||||||
config_data['MODULES'][key] = value
|
modules_fields[key] = (bool, value)
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid value for module {key}: {value}. Must be 'on', 'off', True, or False.")
|
raise ValueError(f"Invalid value for module {key}: {value}. Must be 'on', 'off', True, or False.")
|
||||||
|
|
||||||
|
DynamicModulesConfig = create_model('DynamicModulesConfig', **modules_fields)
|
||||||
|
config_data['MODULES'] = DynamicModulesConfig(**modules_data)
|
||||||
|
|
||||||
return cls(**config_data)
|
return cls(**config_data)
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> Any:
|
||||||
|
if name == 'MODULES':
|
||||||
|
return self.__dict__['MODULES']
|
||||||
|
return super().__getattr__(name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def active_modules(self) -> List[str]:
|
||||||
|
return [module for module, is_active in self.MODULES.__dict__.items() if is_active]
|
||||||
|
|
||||||
|
|
||||||
class Configuration(BaseModel):
|
class Configuration(BaseModel):
|
||||||
@classmethod
|
HOME: Path = Path.home()
|
||||||
def load_config(cls: Type[T], yaml_path: Union[str, Path]) -> Union[T, List[T]]:
|
_dir_config: Optional['Configuration'] = None
|
||||||
yaml_path = Path(yaml_path)
|
|
||||||
with yaml_path.open('r') as file:
|
|
||||||
config_data = yaml.safe_load(file)
|
|
||||||
|
|
||||||
# Load environment variables
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
# Resolve placeholders
|
|
||||||
config_data = cls.resolve_placeholders(config_data)
|
|
||||||
|
|
||||||
if isinstance(config_data, list):
|
|
||||||
return [cls.create_dynamic_model(**cfg) for cfg in config_data]
|
|
||||||
elif isinstance(config_data, dict):
|
|
||||||
return cls.create_dynamic_model(**config_data)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported YAML structure in {yaml_path}")
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def resolve_placeholders(cls, data):
|
def load(cls, yaml_path: Union[str, Path], dir_config: Optional['Configuration'] = None) -> 'Configuration':
|
||||||
|
yaml_path = Path(yaml_path)
|
||||||
|
try:
|
||||||
|
with yaml_path.open('r') as file:
|
||||||
|
config_data = yaml.safe_load(file)
|
||||||
|
|
||||||
|
print(f"Loaded configuration data: {config_data}")
|
||||||
|
|
||||||
|
# Ensure HOME is set
|
||||||
|
if config_data.get('HOME') is None:
|
||||||
|
config_data['HOME'] = str(Path.home())
|
||||||
|
print(f"HOME was None in config, set to default: {config_data['HOME']}")
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
instance = cls.create_dynamic_model(**config_data)
|
||||||
|
instance._dir_config = dir_config or instance
|
||||||
|
|
||||||
|
resolved_data = instance.resolve_placeholders(config_data)
|
||||||
|
for key, value in resolved_data.items():
|
||||||
|
setattr(instance, key, value)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error loading configuration from {yaml_path}: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def resolve_placeholders(self, data: Any) -> Any:
|
||||||
if isinstance(data, dict):
|
if isinstance(data, dict):
|
||||||
return {k: cls.resolve_placeholders(v) for k, v in data.items()}
|
return {k: self.resolve_placeholders(v) for k, v in data.items()}
|
||||||
elif isinstance(data, list):
|
elif isinstance(data, list):
|
||||||
return [cls.resolve_placeholders(v) for v in data]
|
return [self.resolve_placeholders(v) for v in data]
|
||||||
elif isinstance(data, str):
|
elif isinstance(data, str):
|
||||||
return cls.resolve_string_placeholders(data)
|
return self.resolve_string_placeholders(data)
|
||||||
else:
|
else:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@classmethod
|
def resolve_string_placeholders(self, value: str) -> Any:
|
||||||
def resolve_string_placeholders(cls, value):
|
|
||||||
pattern = r'\{\{\s*([^}]+)\s*\}\}'
|
pattern = r'\{\{\s*([^}]+)\s*\}\}'
|
||||||
matches = re.findall(pattern, value)
|
matches = re.findall(pattern, value)
|
||||||
|
|
||||||
for match in matches:
|
for match in matches:
|
||||||
parts = match.split('.')
|
parts = match.split('.')
|
||||||
if len(parts) == 2:
|
if len(parts) == 1: # Internal reference
|
||||||
category, key = parts
|
replacement = getattr(self._dir_config, parts[0], str(Path.home() / parts[0].lower()))
|
||||||
if category == 'DIR':
|
elif len(parts) == 2 and parts[0] == 'DIR':
|
||||||
replacement = str(Path(os.getenv(key, '')))
|
replacement = getattr(self._dir_config, parts[1], str(Path.home() / parts[1].lower()))
|
||||||
elif category == 'SECRET':
|
elif len(parts) == 2 and parts[0] == 'ENV':
|
||||||
replacement = os.getenv(key, '')
|
replacement = os.getenv(parts[1], '')
|
||||||
else:
|
else:
|
||||||
replacement = os.getenv(match, '')
|
replacement = value # Keep original if not recognized
|
||||||
|
|
||||||
value = value.replace('{{' + match + '}}', replacement)
|
value = value.replace('{{' + match + '}}', str(replacement))
|
||||||
|
|
||||||
|
# Convert to Path if it looks like a file path
|
||||||
|
if isinstance(value, str) and (value.startswith(('/', '~')) or (':' in value and value[1] == ':')):
|
||||||
|
return Path(value).expanduser()
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -172,6 +196,7 @@ class Configuration(BaseModel):
|
||||||
extra = "allow"
|
extra = "allow"
|
||||||
arbitrary_types_allowed = True
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
|
||||||
class Location(BaseModel):
|
class Location(BaseModel):
|
||||||
latitude: float
|
latitude: float
|
||||||
longitude: float
|
longitude: float
|
||||||
|
|
|
@ -52,14 +52,12 @@
|
||||||
# ─────────────────────────────────────────────────────────────────
|
# ─────────────────────────────────────────────────────────────────
|
||||||
#
|
#
|
||||||
#─── first, bind an ip address and port : ──────────────────────────────────────────
|
#─── first, bind an ip address and port : ──────────────────────────────────────────
|
||||||
HOST_NET=0.0.0.0
|
# <--- replace with base URL of reverse proxy, etc
|
||||||
HOST_PORT=4444
|
|
||||||
BASE_URL=http://localhost:4444 # <--- replace with base URL of reverse proxy, etc
|
|
||||||
#─── notes: ──────────────────────────────────────────────────────────────────────
|
#─── notes: ──────────────────────────────────────────────────────────────────────
|
||||||
#
|
#
|
||||||
# HOST_NET† and HOST_PORT comprise HOST and determine the ip and port the server binds to.
|
# HOST_NET† and HOST_PORT comprise HOST and determine the ip and port the server binds to.
|
||||||
# BASE_URL is used to assemble URLs, e.g. in the MS authentication flow and for serving images generated on the sd router.
|
# API.URL is used to assemble URLs, e.g. in the MS authentication flow and for serving images generated on the sd router.
|
||||||
# BASE_URL should match the base URL used to access sijapi sans endpoint, e.g. http://localhost:4444 or https://api.sij.ai
|
# API.URL should match the base URL used to access sijapi sans endpoint, e.g. http://localhost:4444 or https://api.sij.ai
|
||||||
#
|
#
|
||||||
# † Take care here! Please ensure you understand the implications of setting HOST_NET to anything besides 127.0.0.1, and configure your firewall and router appropriately if you do. Setting HOST_NET to 0.0.0.0, for instance, opens sijapi to any device the server running it is accessible to — including potentially frightening internet randos (depending how your firewall, router, and NAT are configured).
|
# † Take care here! Please ensure you understand the implications of setting HOST_NET to anything besides 127.0.0.1, and configure your firewall and router appropriately if you do. Setting HOST_NET to 0.0.0.0, for instance, opens sijapi to any device the server running it is accessible to — including potentially frightening internet randos (depending how your firewall, router, and NAT are configured).
|
||||||
#
|
#
|
||||||
|
|
6
sijapi/config/dirs.yaml-example
Normal file
6
sijapi/config/dirs.yaml-example
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
HOME: ~
|
||||||
|
BASE: '{{ HOME }}/sijapi'
|
||||||
|
SIJAPI: '{{ BASE }}/sijapi'
|
||||||
|
CONFIG: '{{ SIJAPI }}/config'
|
||||||
|
DATA: '{{ SIJAPI }}/data'
|
||||||
|
LOGS: '{{ SIJAPI }}/logs'
|
|
@ -30,7 +30,7 @@ from dateutil.parser import parse as dateutil_parse
|
||||||
from fastapi import HTTPException, status
|
from fastapi import HTTPException, status
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from fastapi import APIRouter, Query, HTTPException
|
from fastapi import APIRouter, Query, HTTPException
|
||||||
from sijapi import L, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, ARCHIVE_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO
|
from sijapi import API, L, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO
|
||||||
from sijapi.routers import cal, loc, tts, llm, time, sd, weather, asr
|
from sijapi.routers import cal, loc, tts, llm, time, sd, weather, asr
|
||||||
from sijapi.utilities import assemble_journal_path, assemble_archive_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
|
from sijapi.utilities import assemble_journal_path, assemble_archive_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
|
||||||
from sijapi.classes import Location
|
from sijapi.classes import Location
|
||||||
|
@ -399,7 +399,7 @@ async def post_update_daily_weather_and_calendar_and_timeslips(date: str) -> Pla
|
||||||
await update_dn_weather(date_time)
|
await update_dn_weather(date_time)
|
||||||
await update_daily_note_events(date_time)
|
await update_daily_note_events(date_time)
|
||||||
await build_daily_timeslips(date_time)
|
await build_daily_timeslips(date_time)
|
||||||
return f"[Refresh]({BASE_URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
|
return f"[Refresh]({API.URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
|
||||||
|
|
||||||
async def update_dn_weather(date_time: dt_datetime, lat: float = None, lon: float = None):
|
async def update_dn_weather(date_time: dt_datetime, lat: float = None, lon: float = None):
|
||||||
L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
|
L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
|
||||||
|
|
|
@ -25,7 +25,7 @@ import tempfile
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
from sijapi import L, HOME_DIR, DATA_DIR, DEFAULT_VOICE, TTS_DIR, TTS_SEGMENTS_DIR, VOICE_DIR, PODCAST_DIR, TTS_OUTPUT_DIR, ELEVENLABS_API_KEY
|
from sijapi import L, DEFAULT_VOICE, TTS_SEGMENTS_DIR, VOICE_DIR, PODCAST_DIR, TTS_OUTPUT_DIR, ELEVENLABS_API_KEY
|
||||||
from sijapi.utilities import sanitize_filename
|
from sijapi.utilities import sanitize_filename
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,23 +19,40 @@ from typing import Optional, Union, Tuple
|
||||||
import asyncio
|
import asyncio
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import ipaddress
|
||||||
from scipy.spatial import cKDTree
|
from scipy.spatial import cKDTree
|
||||||
from dateutil.parser import parse as dateutil_parse
|
from dateutil.parser import parse as dateutil_parse
|
||||||
from docx import Document
|
from docx import Document
|
||||||
from sshtunnel import SSHTunnelForwarder
|
from sshtunnel import SSHTunnelForwarder
|
||||||
from fastapi import Depends, HTTPException, Request, UploadFile
|
from fastapi import Depends, HTTPException, Request, UploadFile
|
||||||
from fastapi.security.api_key import APIKeyHeader
|
from fastapi.security.api_key import APIKeyHeader
|
||||||
from sijapi import L, GLOBAL_API_KEY, YEAR_FMT, MONTH_FMT, DAY_FMT, DAY_SHORT_FMT, OBSIDIAN_VAULT_DIR, ALLOWED_FILENAME_CHARS, MAX_PATH_LENGTH, ARCHIVE_DIR
|
|
||||||
|
|
||||||
api_key_header = APIKeyHeader(name="Authorization")
|
from sijapi import L, API, YEAR_FMT, MONTH_FMT, DAY_FMT, DAY_SHORT_FMT, OBSIDIAN_VAULT_DIR, ALLOWED_FILENAME_CHARS, MAX_PATH_LENGTH, ARCHIVE_DIR
|
||||||
|
|
||||||
|
api_key_header = APIKeyHeader(name="Authorization", auto_error=False)
|
||||||
|
|
||||||
def validate_api_key(request: Request, api_key: str = Depends(api_key_header)):
|
def validate_api_key(request: Request, api_key: str = Depends(api_key_header)):
|
||||||
if request.url.path not in ["/health", "/ip", "/pgp"]:
|
if request.url.path in API.PUBLIC:
|
||||||
api_key_query = request.query_params.get("api_key")
|
return
|
||||||
if api_key_header:
|
|
||||||
|
client_ip = ipaddress.ip_address(request.client.host)
|
||||||
|
trusted_subnets = [ipaddress.ip_network(subnet) for subnet in API.TRUSTED_SUBNETS]
|
||||||
|
if any(client_ip in subnet for subnet in trusted_subnets):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check header-based API key
|
||||||
|
if api_key:
|
||||||
|
if api_key.lower().startswith("bearer "):
|
||||||
api_key = api_key.lower().split("bearer ")[-1]
|
api_key = api_key.lower().split("bearer ")[-1]
|
||||||
if api_key != GLOBAL_API_KEY and api_key_query != GLOBAL_API_KEY:
|
if api_key in API.KEYS:
|
||||||
raise HTTPException(status_code=401, detail="Invalid or missing API key")
|
return
|
||||||
|
|
||||||
|
# Check query-based API key
|
||||||
|
api_key_query = request.query_params.get("api_key")
|
||||||
|
if api_key_query in API.KEYS:
|
||||||
|
return
|
||||||
|
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid or missing API key")
|
||||||
|
|
||||||
|
|
||||||
def assemble_archive_path(filename: str, extension: str = ".md", date_time: datetime = datetime.now(), subdir: str = None) -> Tuple[Path, Path]:
|
def assemble_archive_path(filename: str, extension: str = ".md", date_time: datetime = datetime.now(), subdir: str = None) -> Tuple[Path, Path]:
|
||||||
|
|
Loading…
Add table
Reference in a new issue