From f1032ae2ed6f2e83a7b0266373507407cbfce806 Mon Sep 17 00:00:00 2001
From: sanj <67624670+iodrift@users.noreply.github.com>
Date: Thu, 4 Jul 2024 17:43:37 -0700
Subject: [PATCH 1/3] Auto-update: Thu Jul  4 17:43:37 PDT 2024

---
 sijapi/__main__.py     |  5 ++---
 sijapi/routers/note.py | 12 ++++--------
 2 files changed, 6 insertions(+), 11 deletions(-)

diff --git a/sijapi/__main__.py b/sijapi/__main__.py
index c5ef1bb..3bd4d2d 100755
--- a/sijapi/__main__.py
+++ b/sijapi/__main__.py
@@ -104,8 +104,6 @@ async def handle_exception_middleware(request: Request, call_next):
     return response
 
 
-
-
 def load_router(router_name):
     router_file = ROUTER_DIR / f'{router_name}.py'
     L.DEBUG(f"Attempting to load {router_name.capitalize()}...")
@@ -121,6 +119,7 @@ def load_router(router_name):
     else:
         L.ERR(f"Router file for {router_name} does not exist.")
 
+
 def main(argv):
     if args.test:
         load_router(args.test)
@@ -131,10 +130,10 @@ def main(argv):
             if getattr(API.MODULES, module_name):
                 load_router(module_name)
     
-   
     config = HypercornConfig()
     config.bind = [API.BIND]  # Use the resolved BIND value
     asyncio.run(serve(api, config))
 
+
 if __name__ == "__main__":
     main(sys.argv[1:])
\ No newline at end of file
diff --git a/sijapi/routers/note.py b/sijapi/routers/note.py
index 1fd2f00..e6da4b8 100644
--- a/sijapi/routers/note.py
+++ b/sijapi/routers/note.py
@@ -404,7 +404,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
             place = places[0]
             lat = place.latitude
             lon = place.longitude
-        
+        tz = await GEO.tz_at(lat, lon)
         L.DEBUG(f"lat: {lat}, lon: {lon}, place: {place}")
         city = GEO.find_override_location(lat, lon)
         if city:
@@ -460,8 +460,8 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
                     uvindex = DailyWeather.get('uvindex', 0)
                     uvwarn = f" - :LiRadiation: Caution! UVI today is {uvindex}! :LiRadiation:\n" if (uvindex and uvindex > 8) else ""
 
-                    sunrise = DailyWeather.get('sunrise')
-                    sunset = DailyWeather.get('sunset')
+                    sunrise = await loc.dt(DailyWeather.get('sunrise'), tz)
+                    sunset = await loc.dt(DailyWeather.get('sunset'), tz)
                     srise_str = sunrise.time().strftime("%H:%M")
                     sset_str = sunset.time().strftime("%H:%M")
                     
@@ -494,16 +494,12 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
                         times, condition_symbols, temps, winds = [], [], [], []
 
                         for hour in HourlyWeather:
+                            hour['datetime'] = await loc.dt(hour.get('datetime'), tz)
                             if hour.get('datetime').strftime("%H:%M:%S") in HOURLY_COLUMNS_MAPPING.values():
-
                                 times.append(format_hourly_time(hour)) 
-
                                 condition_symbols.append(format_hourly_icon(hour, sunrise, sunset))
-
                                 temps.append(format_hourly_temperature(hour))
-
                                 winds.append(format_hourly_wind(hour))
-                        
                         detailed_forecast += assemble_hourly_data_table(times, condition_symbols, temps, winds)
                         detailed_forecast += f"```\n\n"
                     

From 307d32e798c502f87f17193ce07ecba86906c977 Mon Sep 17 00:00:00 2001
From: sanj <67624670+iodrift@users.noreply.github.com>
Date: Mon, 8 Jul 2024 22:34:44 -0700
Subject: [PATCH 2/3] Currently broken but nearing improvement

---
 .gitignore                               |   2 +-
 sijapi/__init__.py                       | 164 ++----------
 sijapi/__main__.py                       |  51 +---
 sijapi/classes.py                        | 319 +++++++++++++++++------
 sijapi/config/.env-example               |   2 +-
 sijapi/config/asr.yaml-example           |  11 +
 sijapi/config/cal.yaml-example           |  19 ++
 sijapi/config/courtlistener.yaml-example |   6 +
 sijapi/config/dirs.yaml-example          |  12 +-
 sijapi/config/llm.yaml-example           |  17 ++
 sijapi/config/obsidian.yaml-example      |   6 +
 sijapi/config/tailscale.yaml-example     |   6 +
 sijapi/routers/asr.py                    |  12 +-
 sijapi/routers/cal.py                    | 239 ++++++++---------
 sijapi/routers/cf.py                     |   4 +-
 sijapi/routers/email.py                  |  13 +-
 sijapi/routers/img.py                    |   2 +-
 sijapi/routers/llm.py                    |   4 +-
 sijapi/routers/news.py                   |   2 -
 19 files changed, 484 insertions(+), 407 deletions(-)
 create mode 100644 sijapi/config/asr.yaml-example
 create mode 100644 sijapi/config/cal.yaml-example
 create mode 100644 sijapi/config/courtlistener.yaml-example
 create mode 100644 sijapi/config/llm.yaml-example
 create mode 100644 sijapi/config/obsidian.yaml-example
 create mode 100644 sijapi/config/tailscale.yaml-example

diff --git a/.gitignore b/.gitignore
index a0d77b1..168e754 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,7 +10,7 @@ sijapi/data/*.pbf
 sijapi/data/geonames.txt
 sijapi/data/img/images/
 sijapi/config/*.yaml
-sijapi/config/O365/
+sijapi/config/MS365/
 sijapi/local_only/
 sijapi/testbed/
 khoj/
diff --git a/sijapi/__init__.py b/sijapi/__init__.py
index 0aeb2e8..973c55f 100644
--- a/sijapi/__init__.py
+++ b/sijapi/__init__.py
@@ -1,63 +1,49 @@
 # __init__.py
 import os
-import json
-import yaml
 from pathlib import Path
 import ipaddress
-import multiprocessing
 from dotenv import load_dotenv
 from dateutil import tz
 from pathlib import Path
-from pydantic import BaseModel
-from typing import List, Optional
 from .logs import Logger
-from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, Database, Geocoder, APIConfig, Configuration
+from .classes import Database, Geocoder, APIConfig, Configuration, Dir
 
 ### Initial initialization
-BASE_DIR = Path(__file__).resolve().parent
-CONFIG_DIR = BASE_DIR / "config"
-ENV_PATH = CONFIG_DIR / ".env"
-LOGS_DIR = BASE_DIR / "logs"
-
+API = APIConfig.load('api', 'secrets')
+Dir = Dir.load('dirs')
+ENV_PATH = Dir.CONFIG / ".env"
+LOGS_DIR = Dir.LOGS
 L = Logger("Central", LOGS_DIR)
 os.makedirs(LOGS_DIR, exist_ok=True)
 load_dotenv(ENV_PATH)
 
 ### API essentials
-API = APIConfig.load('api', 'secrets')
-Dir = Configuration.load('dirs')
-HOST = f"{API.BIND}:{API.PORT}" 
-LOCAL_HOSTS = [ipaddress.ip_address(localhost.strip()) for localhost in os.getenv('LOCAL_HOSTS', '127.0.0.1').split(',')] + ['localhost']
-SUBNET_BROADCAST = os.getenv("SUBNET_BROADCAST", '10.255.255.255')
-MAX_CPU_CORES = min(int(os.getenv("MAX_CPU_CORES", int(multiprocessing.cpu_count()/2))), multiprocessing.cpu_count())
-DB = Database.from_env()
+DB = Database.from_yaml('db.yaml')
 
+ASR = Configuration.load('asr')
+IMG = Configuration.load('img')
+Cal = Configuration.load('cal', 'secrets')
+Email = Configuration.load('email', 'secrets')
+LLM = Configuration.load('llm', 'secrets')
 News = Configuration.load('news', 'secrets')
-IMG = Configuration.load('img', 'secrets')
+TTS = Configuration.load('tts', 'secrets')
+CourtListener = Configuration.load('courtlistener', 'secrets')
+Tailscale = Configuration.load('tailscale', 'secrets')
+Cloudflare = Configuration.load('cloudflare', 'secrets')
+
 
 ### Directories & general paths
-ROUTER_DIR = BASE_DIR / "routers"
-DATA_DIR = BASE_DIR / "data"
-os.makedirs(DATA_DIR, exist_ok=True)
-ALERTS_DIR = DATA_DIR / "alerts"
-os.makedirs(ALERTS_DIR, exist_ok=True)
 REQUESTS_DIR = LOGS_DIR / "requests"
 os.makedirs(REQUESTS_DIR, exist_ok=True)
 REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
 
 ### LOCATE AND WEATHER LOCALIZATIONS
-USER_FULLNAME = os.getenv('USER_FULLNAME')
-USER_BIO = os.getenv('USER_BIO')
-HOME_ZIP = os.getenv("HOME_ZIP") # unimplemented
-NAMED_LOCATIONS = CONFIG_DIR / "named-locations.yaml"
 # DB = DATA_DIR / "weatherlocate.db" # deprecated
 VISUALCROSSING_BASE_URL = os.getenv("VISUALCROSSING_BASE_URL", "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline")
 VISUALCROSSING_API_KEY = os.getenv("VISUALCROSSING_API_KEY")
-GEONAMES_TXT = DATA_DIR / "geonames.txt"
-LOCATIONS_CSV = DATA_DIR / "US.csv"
 TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
-TZ_CACHE = DATA_DIR / "tzcache.json"
-GEO = Geocoder(NAMED_LOCATIONS, TZ_CACHE)
+TZ_CACHE = Dir.DATA / "tzcache.json"
+GEO = Geocoder(Dir.config.locations, TZ_CACHE)
 
 ### Obsidian & notes
 ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
@@ -71,8 +57,6 @@ OBSIDIAN_BANNER_SCENE = os.getenv("OBSIDIAN_BANNER_SCENE", "wallpaper")
 OBSIDIAN_CHROMADB_COLLECTION = os.getenv("OBSIDIAN_CHROMADB_COLLECTION", "obsidian")
 ARCHIVE_DIR = Path(os.getenv("ARCHIVE_DIR", OBSIDIAN_VAULT_DIR / "archive"))
 os.makedirs(ARCHIVE_DIR, exist_ok=True)
-DOC_DIR = DATA_DIR / "docs"
-os.makedirs(DOC_DIR, exist_ok=True)
 
 ### DATETIME SCHEMA FOR DAILY NOTE FOLDER HIERARCHY FORMATTING ###
 YEAR_FMT = os.getenv("YEAR_FMT")
@@ -80,125 +64,15 @@ MONTH_FMT = os.getenv("MONTH_FMT")
 DAY_FMT = os.getenv("DAY_FMT")
 DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
 
-### Large language model
-LLM_URL = os.getenv("LLM_URL", "http://localhost:11434")
-LLM_SYS_MSG = os.getenv("SYSTEM_MSG", "You are a helpful AI assistant.")
-DEFAULT_LLM = os.getenv("DEFAULT_LLM", "llama3")
-DEFAULT_VISION = os.getenv("DEFAULT_VISION", "llava")
-DEFAULT_VOICE = os.getenv("DEFAULT_VOICE", "Luna")
-DEFAULT_11L_VOICE = os.getenv("DEFAULT_11L_VOICE", "Victoria")
-OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
-### Summarization
-SUMMARY_CHUNK_SIZE = int(os.getenv("SUMMARY_CHUNK_SIZE", 16384))  # measured in tokens
-SUMMARY_CHUNK_OVERLAP = int(os.getenv("SUMMARY_CHUNK_OVERLAP", 256))  # measured in tokens
-SUMMARY_TPW = float(os.getenv("SUMMARY_TPW", 1.3))  # measured in tokens
-SUMMARY_LENGTH_RATIO = int(os.getenv("SUMMARY_LENGTH_RATIO", 4))  # measured as original to length ratio
-SUMMARY_MIN_LENGTH = int(os.getenv("SUMMARY_MIN_LENGTH", 150))  # measured in tokens
-SUMMARY_MODEL = os.getenv("SUMMARY_MODEL", "dolphin-llama3:8b-256k")
-SUMMARY_TOKEN_LIMIT = int(os.getenv("SUMMARY_TOKEN_LIMIT", 16384))
-SUMMARY_INSTRUCT = os.getenv('SUMMARY_INSTRUCT', "You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.")
-SUMMARY_INSTRUCT_TTS = os.getenv('SUMMARY_INSTRUCT_TTS', "You are an AI assistant that provides email summaries for Sanjay. Your response will undergo Text-To-Speech conversion and added to Sanjay's private podcast. Providing adequate context (Sanjay did not send this question to you, he will only hear your response) but aiming for conciseness and precision, and bearing in mind the Text-To-Speech conversion (avoiding acronyms and formalities), summarize the following email.")
-
-
-### Stable diffusion
-IMG_DIR = DATA_DIR / "img" / "images"
-os.makedirs(IMG_DIR, exist_ok=True)
-IMG_WORKFLOWS_DIR = DATA_DIR / "img" / "workflows"
-os.makedirs(IMG_WORKFLOWS_DIR, exist_ok=True)
-COMFYUI_URL = os.getenv('COMFYUI_URL', "http://localhost:8188")
-COMFYUI_DIR = Path(os.getenv('COMFYUI_DIR'))
-COMFYUI_OUTPUT_DIR = COMFYUI_DIR / 'output'
-COMFYUI_LAUNCH_CMD = os.getenv('COMFYUI_LAUNCH_CMD', 'mamba activate comfyui && python main.py')
-IMG_CONFIG_PATH = CONFIG_DIR / 'img.yaml'
-
-### ASR
-ASR_DIR = DATA_DIR / "asr"
-os.makedirs(ASR_DIR, exist_ok=True)
-WHISPER_CPP_DIR = Path(Dir.HOME) / str(os.getenv("WHISPER_CPP_DIR"))
-WHISPER_CPP_MODELS = os.getenv('WHISPER_CPP_MODELS', 'NULL,VOID').split(',')
-
-### TTS
-PREFERRED_TTS = os.getenv("PREFERRED_TTS", "None")
-TTS_DIR = DATA_DIR / "tts"
-os.makedirs(TTS_DIR, exist_ok=True)
-VOICE_DIR = TTS_DIR / 'voices'
-os.makedirs(VOICE_DIR, exist_ok=True)
-PODCAST_DIR = os.getenv("PODCAST_DIR", TTS_DIR / "sideloads")
-os.makedirs(PODCAST_DIR, exist_ok=True)
-TTS_OUTPUT_DIR = TTS_DIR / 'outputs'
-os.makedirs(TTS_OUTPUT_DIR, exist_ok=True)
-TTS_SEGMENTS_DIR = TTS_DIR / 'segments'
-os.makedirs(TTS_SEGMENTS_DIR, exist_ok=True)
-ELEVENLABS_API_KEY = os.getenv("ELEVENLABS_API_KEY")
-
-
-### Calendar & email account
-MS365_TOGGLE = True if os.getenv("MS365_TOGGLE") == "True" else False
-ICAL_TOGGLE = True if os.getenv("ICAL_TOGGLE") == "True" else False
-ICS_PATH = DATA_DIR / 'calendar.ics' # deprecated now, but maybe revive?
-ICALENDARS = os.getenv('ICALENDARS', 'NULL,VOID').split(',')
-
-EMAIL_CONFIG = CONFIG_DIR / "email.yaml"
-EMAIL_LOGS = LOGS_DIR / "email"
-os.makedirs(EMAIL_LOGS, exist_ok = True)
-
-### Courtlistener & other webhooks
-COURTLISTENER_DOCKETS_DIR = DATA_DIR / "courtlistener" / "dockets"
-os.makedirs(COURTLISTENER_DOCKETS_DIR, exist_ok=True)
-COURTLISTENER_SEARCH_DIR = DATA_DIR / "courtlistener" / "cases"
-os.makedirs(COURTLISTENER_SEARCH_DIR, exist_ok=True)
-CASETABLE_PATH = DATA_DIR / "courtlistener" / "cases.json"
-COURTLISTENER_API_KEY = os.getenv("COURTLISTENER_API_KEY")
-COURTLISTENER_BASE_URL = os.getenv("COURTLISTENER_BASE_URL", "https://www.courtlistener.com")
-COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
-
 ### Keys & passwords
-PUBLIC_KEY_FILE = os.getenv("PUBLIC_KEY_FILE", 'you_public_key.asc')
-PUBLIC_KEY = (BASE_DIR.parent / PUBLIC_KEY_FILE).read_text()
 MAC_ID = os.getenv("MAC_ID")
 MAC_UN = os.getenv("MAC_UN")
 MAC_PW = os.getenv("MAC_PW")
 TIMING_API_KEY = os.getenv("TIMING_API_KEY")
 TIMING_API_URL = os.getenv("TIMING_API_URL", "https://web.timingapp.com/api/v1")
-PHOTOPRISM_URL = os.getenv("PHOTOPRISM_URL")
-PHOTOPRISM_USER = os.getenv("PHOTOPRISM_USER")
-PHOTOPRISM_PASS = os.getenv("PHOTOPRISM_PASS")
-
-### Tailscale
-TS_IP = ipaddress.ip_address(os.getenv("TS_IP", "NULL"))
-TS_SUBNET = ipaddress.ip_network(os.getenv("TS_SUBNET")) if os.getenv("TS_SUBNET") else None
-TS_ID = os.getenv("TS_ID", "NULL")
-TS_TAILNET = os.getenv("TS_TAILNET", "NULL")
-TS_ADDRESS = f"http://{TS_ID}.{TS_TAILNET}.ts.net"
-
-### Cloudflare
-CF_API_BASE_URL = os.getenv("CF_API_BASE_URL")
-CF_TOKEN = os.getenv("CF_TOKEN")
-CF_IP = DATA_DIR / "cf_ip.txt" # to be deprecated soon
-CF_DOMAINS_PATH = DATA_DIR / "cf_domains.json" # to be deprecated soon
 
 ### Caddy - not fully implemented
 API.URL = os.getenv("API.URL")
 CADDY_SERVER = os.getenv('CADDY_SERVER', None)
 CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
-CADDY_API_KEY = os.getenv("CADDY_API_KEY")
-
-
-### Microsoft Graph
-MS365_CLIENT_ID = os.getenv('MS365_CLIENT_ID')
-MS365_SECRET = os.getenv('MS365_SECRET')
-MS365_TENANT_ID = os.getenv('MS365_TENANT_ID') 
-MS365_CERT_PATH = CONFIG_DIR / 'MS365' / '.cert.pem' # deprecated
-MS365_KEY_PATH = CONFIG_DIR / 'MS365' / '.cert.key' # deprecated  
-MS365_KEY = MS365_KEY_PATH.read_text()
-MS365_TOKEN_PATH = CONFIG_DIR / 'MS365' / '.token.txt'
-MS365_THUMBPRINT = os.getenv('MS365_THUMBPRINT')
-
-MS365_LOGIN_URL = os.getenv("MS365_LOGIN_URL", "https://login.microsoftonline.com")
-MS365_AUTHORITY_URL = f"{MS365_LOGIN_URL}/{MS365_TENANT_ID}"
-MS365_REDIRECT_PATH = os.getenv("MS365_REDIRECT_PATH", "https://api.sij.ai/o365/oauth_redirect")
-MS365_SCOPE = os.getenv("MS365_SCOPE", 'Calendars.Read,Calendars.ReadWrite,offline_access').split(',')
-
-### Maintenance
-GARBAGE_COLLECTION_INTERVAL = 60 * 60  # Run cleanup every hour
-GARBAGE_TTL = 60 * 60 * 24  # Delete files older than 24 hours
\ No newline at end of file
+CADDY_API_KEY = os.getenv("CADDY_API_KEY")
\ No newline at end of file
diff --git a/sijapi/__main__.py b/sijapi/__main__.py
index 3bd4d2d..4a3f438 100755
--- a/sijapi/__main__.py
+++ b/sijapi/__main__.py
@@ -4,20 +4,13 @@ from fastapi import FastAPI, Request, HTTPException, Response
 from fastapi.responses import JSONResponse
 from fastapi.middleware.cors import CORSMiddleware
 from starlette.middleware.base import BaseHTTPMiddleware
-from starlette.middleware.base import BaseHTTPMiddleware
-from starlette.requests import ClientDisconnect
 from hypercorn.asyncio import serve
 from hypercorn.config import Config as HypercornConfig
 import sys
 import asyncio 
-import httpx
 import argparse
-import json
 import ipaddress
 import importlib
-from dotenv import load_dotenv
-from pathlib import Path
-from datetime import datetime
 import argparse
 
 parser = argparse.ArgumentParser(description='Personal API.')
@@ -25,17 +18,12 @@ parser.add_argument('--debug', action='store_true', help='Set log level to L.INF
 parser.add_argument('--test', type=str, help='Load only the specified module.')
 args = parser.parse_args()
 
-from . import L, API, ROUTER_DIR
+from . import L, API, Dir
 L.setup_from_args(args)
-
-from sijapi import ROUTER_DIR
-
-# Initialize a FastAPI application
-api = FastAPI()
-
+app = FastAPI()
 
 # CORSMiddleware
-api.add_middleware(
+app.add_middleware(
     CORSMiddleware,
     allow_origins=['*'],
     allow_credentials=True,
@@ -63,41 +51,22 @@ class SimpleAPIKeyMiddleware(BaseHTTPMiddleware):
                         content={"detail": "Invalid or missing API key"}
                     )
         response = await call_next(request)
-        # L.DEBUG(f"Request from {client_ip} is complete")
         return response
 
-# Add the middleware to your FastAPI app
-api.add_middleware(SimpleAPIKeyMiddleware)
+app.add_middleware(SimpleAPIKeyMiddleware)
 
-
-canceled_middleware = """
-@api.middleware("http")
-async def log_requests(request: Request, call_next):
-    L.DEBUG(f"Incoming request: {request.method} {request.url}")
-    L.DEBUG(f"Request headers: {request.headers}")
-    L.DEBUG(f"Request body: {await request.body()}")
-    response = await call_next(request)
-    return response
-
-async def log_outgoing_request(request):
-    L.INFO(f"Outgoing request: {request.method} {request.url}")
-    L.DEBUG(f"Request headers: {request.headers}")
-    L.DEBUG(f"Request body: {request.content}")
-"""
-
-@api.exception_handler(HTTPException)
+@app.exception_handler(HTTPException)
 async def http_exception_handler(request: Request, exc: HTTPException):
     L.ERR(f"HTTP Exception: {exc.status_code} - {exc.detail}")
     L.ERR(f"Request: {request.method} {request.url}")
     return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
 
-@api.middleware("http")
+@app.middleware("http")
 async def handle_exception_middleware(request: Request, call_next):
     try:
         response = await call_next(request)
     except RuntimeError as exc:
         if str(exc) == "Response content longer than Content-Length":
-            # Update the Content-Length header to match the actual response content length
             response.headers["Content-Length"] = str(len(response.body))
         else:
             raise
@@ -105,21 +74,20 @@ async def handle_exception_middleware(request: Request, call_next):
 
 
 def load_router(router_name):
-    router_file = ROUTER_DIR / f'{router_name}.py'
+    router_file = Dir.ROUTERS / f'{router_name}.py'
     L.DEBUG(f"Attempting to load {router_name.capitalize()}...")
     if router_file.exists():
         module_path = f'sijapi.routers.{router_name}'
         try:
             module = importlib.import_module(module_path)
             router = getattr(module, router_name)
-            api.include_router(router)
+            app.include_router(router)
             L.INFO(f"{router_name.capitalize()} router loaded.")
         except (ImportError, AttributeError) as e:
             L.CRIT(f"Failed to load router {router_name}: {e}")
     else:
         L.ERR(f"Router file for {router_name} does not exist.")
 
-
 def main(argv):
     if args.test:
         load_router(args.test)
@@ -132,8 +100,7 @@ def main(argv):
     
     config = HypercornConfig()
     config.bind = [API.BIND]  # Use the resolved BIND value
-    asyncio.run(serve(api, config))
-
+    asyncio.run(serve(app, config))
 
 if __name__ == "__main__":
     main(sys.argv[1:])
\ No newline at end of file
diff --git a/sijapi/classes.py b/sijapi/classes.py
index 1dd34c3..b1a9ad7 100644
--- a/sijapi/classes.py
+++ b/sijapi/classes.py
@@ -2,6 +2,7 @@
 import asyncio
 import json
 import math
+import multiprocessing
 import os
 import re
 from concurrent.futures import ThreadPoolExecutor
@@ -13,35 +14,196 @@ from zoneinfo import ZoneInfo
 import aiofiles
 import aiohttp
 import asyncpg
+from typing import Union, Any
+from pydantic import BaseModel, Field, ConfigDict
 import reverse_geocoder as rg
-import yaml
 from dotenv import load_dotenv
-from pydantic import BaseModel, Field, create_model
+from pydantic import BaseModel, Field, create_model, ConfigDict, validator
 from srtm import get_data
 from timezonefinder import TimezoneFinder
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Union, TypeVar, Type
+import yaml
+from typing import List, Optional
+from dotenv import load_dotenv
 
 T = TypeVar('T', bound='Configuration')
 
+class HierarchicalPath(os.PathLike):
+    def __init__(self, path=None, base=None, home=None):
+        self.home = Path(home).expanduser() if home else Path.home()
+        self.base = Path(base).resolve() if base else self._find_base()
+        self.path = self._resolve_path(path) if path else self.base
+
+    def _find_base(self):
+        current = Path(__file__).resolve().parent
+        while current.name != 'sijapi' and current != current.parent:
+            current = current.parent
+        return current
+
+    def _resolve_path(self, path):
+        if isinstance(path, HierarchicalPath):
+            return path.path
+        if isinstance(path, Path):
+            return path
+        path = self._resolve_placeholders(path)
+        if path.startswith(('~', 'HOME')):
+            return self.home / path.lstrip('~').lstrip('HOME').lstrip('/')
+        if path.startswith('/'):
+            return Path(path)
+        return self._resolve_relative_path(self.base / path)
+
+    def _resolve_placeholders(self, path):
+        placeholders = {
+            'HOME': str(self.home),
+            'BASE': str(self.base),
+        }
+        pattern = r'\{\{\s*([^}]+)\s*\}\}'
+        return re.sub(pattern, lambda m: placeholders.get(m.group(1).strip(), m.group(0)), path)
+
+    def _resolve_relative_path(self, path):
+        if path.is_file():
+            return path
+        if path.is_dir():
+            return path
+        yaml_path = path.with_suffix('.yaml')
+        if yaml_path.is_file():
+            return yaml_path
+        return path
+
+    def __truediv__(self, other):
+        return HierarchicalPath(self.path / other, base=self.base, home=self.home)
+
+    def __getattr__(self, name):
+        return HierarchicalPath(self.path / name, base=self.base, home=self.home)
+
+    def __str__(self):
+        return str(self.path)
+
+    def __repr__(self):
+        return f"HierarchicalPath('{self.path}')"
+
+    def __fspath__(self):
+        return os.fspath(self.path)
+
+    def __eq__(self, other):
+        if isinstance(other, (HierarchicalPath, Path, str)):
+            return str(self.path) == str(other)
+        return False
+
+    def __lt__(self, other):
+        if isinstance(other, (HierarchicalPath, Path, str)):
+            return str(self.path) < str(other)
+        return False
+
+    def __le__(self, other):
+        if isinstance(other, (HierarchicalPath, Path, str)):
+            return str(self.path) <= str(other)
+        return False
+
+    def __gt__(self, other):
+        if isinstance(other, (HierarchicalPath, Path, str)):
+            return str(self.path) > str(other)
+        return False
+
+    def __ge__(self, other):
+        if isinstance(other, (HierarchicalPath, Path, str)):
+            return str(self.path) >= str(other)
+        return False
+
+    def __hash__(self):
+        return hash(self.path)
+
+    def __getattribute__(self, name):
+        try:
+            return super().__getattribute__(name)
+        except AttributeError:
+            return getattr(self.path, name)
+
+
+class Dir(BaseModel):
+    HOME: HierarchicalPath = Field(default_factory=lambda: HierarchicalPath(Path.home()))
+    BASE: HierarchicalPath | None = None
+
+    model_config = ConfigDict(arbitrary_types_allowed=True)
+
+    @classmethod
+    def determine_base(cls) -> HierarchicalPath:
+        return HierarchicalPath(HierarchicalPath()._find_base())
+
+    def __init__(self, **data):
+        super().__init__(**data)
+        if self.BASE is None:
+            self.BASE = self.determine_base()
+
+    @classmethod
+    def load(cls, yaml_path: Union[str, Path] = None) -> 'Dir':
+        yaml_path = cls._resolve_path(yaml_path) if yaml_path else None
+        if yaml_path:
+            with open(yaml_path, 'r') as file:
+                config_data = yaml.safe_load(file)
+            print(f"Loaded directory configuration from {yaml_path}")
+            resolved_data = cls.resolve_placeholders(config_data)
+        else:
+            resolved_data = {}
+        return cls(**resolved_data)
+
+    @classmethod
+    def _resolve_path(cls, path: Union[str, Path]) -> Path:
+        base_path = cls.determine_base().path.parent
+        path = Path(path)
+        if not path.suffix:
+            path = base_path / 'sijapi' / 'config' / f"{path.name}.yaml"
+        elif not path.is_absolute():
+            path = base_path / path
+        return path
+
+    @classmethod
+    def resolve_placeholders(cls, data: Any) -> Any:
+        if isinstance(data, dict):
+            return {k: cls.resolve_placeholders(v) for k, v in data.items()}
+        elif isinstance(data, list):
+            return [cls.resolve_placeholders(v) for v in data]
+        elif isinstance(data, str):
+            return cls.resolve_string_placeholders(data)
+        return data
+
+    @classmethod
+    def resolve_string_placeholders(cls, value: str) -> Any:
+        if value.startswith('{{') and value.endswith('}}'):
+            parts = value.strip('{}').strip().split('.')
+            result = cls.HOME
+            for part in parts:
+                result = getattr(result, part)
+            return result
+        elif value == '*~*':
+            return cls.HOME
+        return HierarchicalPath(value)
+
+    def __getattr__(self, name):
+        return HierarchicalPath(self.BASE / name.lower(), base=self.BASE.path, home=self.HOME.path)
+
+    def model_dump(self, *args, **kwargs):
+        d = super().model_dump(*args, **kwargs)
+        return {k: str(v) for k, v in d.items()}
 
 
-import os
-from pathlib import Path
-from typing import Union, Optional, Any, Dict, List
-import yaml
-import re
-from pydantic import BaseModel, create_model
-from dotenv import load_dotenv
 
 class Configuration(BaseModel):
-    HOME: Path = Path.home()
+    HOME: Path = Field(default_factory=Path.home)
     _dir_config: Optional['Configuration'] = None
+    dir: Dir = Field(default_factory=Dir)
+
+    class Config:
+        arbitrary_types_allowed = True
+        extra = "allow"  # This allows extra fields
 
     @classmethod
     def load(cls, yaml_path: Union[str, Path], secrets_path: Optional[Union[str, Path]] = None, dir_config: Optional['Configuration'] = None) -> 'Configuration':
         yaml_path = cls._resolve_path(yaml_path, 'config')
         if secrets_path:
             secrets_path = cls._resolve_path(secrets_path, 'config')
-        
+
         try:
             with yaml_path.open('r') as file:
                 config_data = yaml.safe_load(file)
@@ -54,25 +216,31 @@ class Configuration(BaseModel):
                 print(f"Loaded secrets data from {secrets_path}")
                 config_data.update(secrets_data)
             
-            # Ensure HOME is set
-            if config_data.get('HOME') is None:
-                config_data['HOME'] = str(Path.home())
-                print(f"HOME was None in config, set to default: {config_data['HOME']}")
-            
-            load_dotenv()
-            
-            instance = cls.create_dynamic_model(**config_data)
+            instance = cls(**config_data)
             instance._dir_config = dir_config or instance
-            
+
             resolved_data = instance.resolve_placeholders(config_data)
-            instance = cls.create_dynamic_model(**resolved_data)
-            instance._dir_config = dir_config or instance
             
-            return instance
+            return cls._create_nested_config(resolved_data)
         except Exception as e:
             print(f"Error loading configuration: {str(e)}")
             raise
 
+    @classmethod
+    def _create_nested_config(cls, data):
+        if isinstance(data, dict):
+            return cls(**{k: cls._create_nested_config(v) for k, v in data.items()})
+        elif isinstance(data, list):
+            return [cls._create_nested_config(item) for item in data]
+        else:
+            return data
+
+    def __getattr__(self, name):
+        value = self.__dict__.get(name)
+        if isinstance(value, dict):
+            return Configuration(**value)
+        return value
+
     @classmethod
     def _resolve_path(cls, path: Union[str, Path], default_dir: str) -> Path:
         base_path = Path(__file__).parent.parent  # This will be two levels up from this file
@@ -106,7 +274,7 @@ class Configuration(BaseModel):
             elif len(parts) == 2 and parts[0] == 'ENV':
                 replacement = os.getenv(parts[1], '')
             else:
-                replacement = value  # Keep original if not recognized
+                replacement = value
             
             value = value.replace('{{' + match + '}}', str(replacement))
         
@@ -115,26 +283,6 @@ class Configuration(BaseModel):
             return Path(value).expanduser()
         return value
 
-    @classmethod
-    def create_dynamic_model(cls, **data):
-        for key, value in data.items():
-            if isinstance(value, dict):
-                data[key] = cls.create_dynamic_model(**value)
-            elif isinstance(value, list) and all(isinstance(item, dict) for item in value):
-                data[key] = [cls.create_dynamic_model(**item) for item in value]
-        
-        DynamicModel = create_model(
-            f'Dynamic{cls.__name__}',
-            __base__=cls,
-            **{k: (Any, v) for k, v in data.items()}
-        )
-        return DynamicModel(**data)
-
-    class Config:
-        extra = "allow"
-        arbitrary_types_allowed = True
-
-
 
 class APIConfig(BaseModel):
     HOST: str
@@ -146,23 +294,25 @@ class APIConfig(BaseModel):
     MODULES: Any  # This will be replaced with a dynamic model
     TZ: str
     KEYS: List[str]
+    MAX_CPU_CORES: int = Field(default_factory=lambda: min(
+        int(os.getenv("MAX_CPU_CORES", multiprocessing.cpu_count() // 2)), multiprocessing.cpu_count()
+    ))
 
     @classmethod
     def load(cls, config_path: Union[str, Path], secrets_path: Union[str, Path]):
         config_path = cls._resolve_path(config_path, 'config')
         secrets_path = cls._resolve_path(secrets_path, 'config')
 
-        # Load main configuration
         with open(config_path, 'r') as file:
             config_data = yaml.safe_load(file)
         
-        print(f"Loaded main config: {config_data}")  # Debug print
+        print(f"Loaded main config: {config_data}")
         
         # Load secrets
         try:
             with open(secrets_path, 'r') as file:
                 secrets_data = yaml.safe_load(file)
-            print(f"Loaded secrets: {secrets_data}")  # Debug print
+            print(f"Loaded secrets: {secrets_data}")
         except FileNotFoundError:
             print(f"Secrets file not found: {secrets_path}")
             secrets_data = {}
@@ -173,7 +323,7 @@ class APIConfig(BaseModel):
         # Resolve internal placeholders
         config_data = cls.resolve_placeholders(config_data)
         
-        print(f"Resolved config: {config_data}")  # Debug print
+        print(f"Resolved config: {config_data}")
         
         # Handle KEYS placeholder
         if isinstance(config_data.get('KEYS'), list) and len(config_data['KEYS']) == 1:
@@ -185,7 +335,7 @@ class APIConfig(BaseModel):
                     secret_key = parts[1]
                     if secret_key in secrets_data:
                         config_data['KEYS'] = secrets_data[secret_key]
-                        print(f"Replaced KEYS with secret: {config_data['KEYS']}")  # Debug print
+                        print(f"Replaced KEYS with secret: {config_data['KEYS']}")
                     else:
                         print(f"Secret key '{secret_key}' not found in secrets file")
                 else:
@@ -201,10 +351,10 @@ class APIConfig(BaseModel):
                 modules_fields[key] = (bool, value)
             else:
                 raise ValueError(f"Invalid value for module {key}: {value}. Must be 'on', 'off', True, or False.")
-        
+
         DynamicModulesConfig = create_model('DynamicModulesConfig', **modules_fields)
         config_data['MODULES'] = DynamicModulesConfig(**modules_data)
-        
+
         return cls(**config_data)
 
     @classmethod
@@ -236,12 +386,12 @@ class APIConfig(BaseModel):
                 resolved_data[key] = [resolve_value(item) for item in value]
             else:
                 resolved_data[key] = resolve_value(value)
-        
+
         # Resolve BIND separately to ensure HOST and PORT are used
         if 'BIND' in resolved_data:
             resolved_data['BIND'] = resolved_data['BIND'].replace('{{ HOST }}', str(resolved_data['HOST']))
             resolved_data['BIND'] = resolved_data['BIND'].replace('{{ PORT }}', str(resolved_data['PORT']))
-        
+
         return resolved_data
 
     def __getattr__(self, name: str) -> Any:
@@ -253,8 +403,6 @@ class APIConfig(BaseModel):
     def active_modules(self) -> List[str]:
         return [module for module, is_active in self.MODULES.__dict__.items() if is_active]
 
-
-
 class Location(BaseModel):
     latitude: float
     longitude: float
@@ -482,7 +630,6 @@ class Geocoder:
             timezone=await self.timezone(latitude, longitude)
         )
 
-
     def round_coords(self, lat: float, lon: float, decimal_places: int = 2) -> Tuple[float, float]:
         return (round(lat, decimal_places), round(lon, decimal_places))
 
@@ -583,55 +730,75 @@ class Database(BaseModel):
             await conn.close()
 
     @classmethod
-    def from_env(cls):
-        import os
-        return cls(
-            host=os.getenv("DB_HOST", "localhost"),
-            port=int(os.getenv("DB_PORT", 5432)),
-            user=os.getenv("DB_USER"),
-            password=os.getenv("DB_PASSWORD"),
-            database=os.getenv("DB_NAME"),
-            db_schema=os.getenv("DB_SCHEMA")
-        )
+    def from_yaml(cls, yaml_path: Union[str, Path]):
+        yaml_path = Path(yaml_path)
+        if not yaml_path.is_absolute():
+            yaml_path = Path(__file__).parent / 'config' / yaml_path
+
+        with open(yaml_path, 'r') as file:
+            config = yaml.safe_load(file)
+        return cls(**config)
 
     def to_dict(self):
         return self.dict(exclude_none=True)
 
 
+
 class IMAPConfig(BaseModel):
     username: str
     password: str
     host: str
     port: int
-    encryption: str = None
+    encryption: Optional[str]
 
 class SMTPConfig(BaseModel):
     username: str
     password: str
     host: str
     port: int
-    encryption: str = None
+    encryption: Optional[str]
 
 class AutoResponder(BaseModel):
     name: str
     style: str
     context: str
     ollama_model: str = "llama3"
+    image_prompt: Optional[str] = None
+    image_scene: Optional[str] = None
+
+class AccountAutoResponder(BaseModel):
+    name: str
+    smtp: str
     whitelist: List[str]
     blacklist: List[str]
-    image_prompt: Optional[str] = None
-    image_scene:  Optional[str] = None
-    smtp: SMTPConfig
-    
+
 class EmailAccount(BaseModel):
     name: str
-    refresh: int
     fullname: Optional[str]
     bio: Optional[str]
+    refresh: int
     summarize: bool = False
     podcast: bool = False
-    imap: IMAPConfig
-    autoresponders: Optional[List[AutoResponder]]
+    imap: str
+    autoresponders: List[AccountAutoResponder]
+
+class EmailConfiguration(Configuration):
+    imaps: List[IMAPConfig]
+    smtps: List[SMTPConfig]
+    autoresponders: List[AutoResponder]
+    accounts: List[EmailAccount]
+
+    def get_imap(self, username: str) -> Optional[IMAPConfig]:
+        return next((imap for imap in self.imaps if imap.username == username), None)
+
+    def get_smtp(self, username: str) -> Optional[SMTPConfig]:
+        return next((smtp for smtp in self.smtps if smtp.username == username), None)
+
+    def get_autoresponder(self, name: str) -> Optional[AutoResponder]:
+        return next((ar for ar in self.autoresponders if ar.name == name), None)
+
+    def get_account(self, name: str) -> Optional[EmailAccount]:
+        return next((account for account in self.accounts if account.name == name), None)
 
 class EmailContact(BaseModel):
     email: str
@@ -643,4 +810,4 @@ class IncomingEmail(BaseModel):
     recipients: List[EmailContact]
     subject: str
     body: str
-    attachments: List[dict] = []
+    attachments: List[dict] = []
\ No newline at end of file
diff --git a/sijapi/config/.env-example b/sijapi/config/.env-example
index 38a50b5..3d3fdda 100644
--- a/sijapi/config/.env-example
+++ b/sijapi/config/.env-example
@@ -291,7 +291,7 @@ MS365_SECRET=¿SECRET?             # <--- enter your app secret (found in Azure
 MS365_SCOPE='basic,calendar_all,Calendars.Read,Calendars.ReadWrite,offline_access'
 MS365_TOKEN_FILE=oauth_token.txt
 MS365_LOGIN_URL='https://login.microsoftonline.com'
-MS365_REDIRECT_PATH=¿SECRET?      # <--- e.g. http://localhost:4444/o365/oauth_redirect
+MS365_REDIRECT_PATH=¿SECRET?      # <--- e.g. http://localhost:4444/MS365/oauth_redirect
 #─── notes: ───────────────────────────────────────────────────────────────────────────────
 #                                                                                          
 #   # MS365_CLIENT_ID, _TENANT_ID, _SECRET, AND _SCOPES must be obtained from Microsoft 
diff --git a/sijapi/config/asr.yaml-example b/sijapi/config/asr.yaml-example
new file mode 100644
index 0000000..1838988
--- /dev/null
+++ b/sijapi/config/asr.yaml-example
@@ -0,0 +1,11 @@
+DIR: '{{ DIR.HOME }}/whisper.cpp'
+MODELS:
+  - small
+  - base
+  - base-en
+  - tiny
+  - medium
+  - medium-en
+  - large
+  - large-v2
+  - large-v3 
\ No newline at end of file
diff --git a/sijapi/config/cal.yaml-example b/sijapi/config/cal.yaml-example
new file mode 100644
index 0000000..e2fa1b9
--- /dev/null
+++ b/sijapi/config/cal.yaml-example
@@ -0,0 +1,19 @@
+MS365:
+  STATUS: OFF
+  AUTH:
+    TENANT: bad78048-a6e0-47b1-a24b-403c444aa349
+    CLIENT_ID: ce8cbd24-f146-4dc7-8ee7-51d9b69dec59
+    LOGIN: 'https://login.microsoftonline.com'
+    REDIRECT: 'https://api.sij.ai/MS365/oauth_redirect'
+    SCOPES: 
+      - basic
+      - calendar_all
+      - Calendars.Read
+      - Calendars.ReadWrite
+      - offline_access
+    SECRET: '{{ SECRET.MS365_SECRET }}'
+    TOKEN_FILE: '{{ DIR.CONFIG }}/ms365/oauth_token.txt'
+ICAL:
+  STATUS: ON
+  CALENDARS:
+    - ''
\ No newline at end of file
diff --git a/sijapi/config/courtlistener.yaml-example b/sijapi/config/courtlistener.yaml-example
new file mode 100644
index 0000000..b5b6c16
--- /dev/null
+++ b/sijapi/config/courtlistener.yaml-example
@@ -0,0 +1,6 @@
+url:
+  base: 'https://www.courtlistener.com'
+  dockets: '{{ url.base }}/api/rest/v3/dockets/'
+API_KEY: '{{ SECRET.COURTLISTENER_API_KEY }}'
+DOCKETS: '{{ DIR.DATA }}/cl/dockets'
+SEARCHES: '{{ DIR.DATA }}/cl/searches'
\ No newline at end of file
diff --git a/sijapi/config/dirs.yaml-example b/sijapi/config/dirs.yaml-example
index d9a58d1..1c6667f 100644
--- a/sijapi/config/dirs.yaml-example
+++ b/sijapi/config/dirs.yaml-example
@@ -1,6 +1,16 @@
 HOME: ~
-BASE: '{{ HOME }}/sijapi'
+BASE: '{{ HOME }}/workshop/sijapi'
 SIJAPI: '{{ BASE }}/sijapi'
 CONFIG: '{{ SIJAPI }}/config'
+CONFIG.email: '{{ CONFIG }}/email.yaml'
+CONFIG.img: '{{ CONFIG }}/img.yaml'
+CONFIG.news: '{{ CONFIG }}/news.yaml'
+SECRETS: '{{ CONFIG }}/secrets.yaml'
 DATA: '{{ SIJAPI }}/data'
+DATA.ALERTS: '{{ DATA }}/alerts'
+DATA.ASR: '{{ DATA }}/asr'
+DATA.BASE: '{{ DATA }}/db'
+DATA.IMG: '{{ DATA }}/img'
+DATA.TTS: '{{ DATA }}/tts'
+TTS.VOICES: '{{ TTS }}/voices'
 LOGS: '{{ SIJAPI }}/logs'
\ No newline at end of file
diff --git a/sijapi/config/llm.yaml-example b/sijapi/config/llm.yaml-example
new file mode 100644
index 0000000..5a51a35
--- /dev/null
+++ b/sijapi/config/llm.yaml-example
@@ -0,0 +1,17 @@
+url: http://localhost:11434
+sys: 'You are a helpful AI assistant.'
+tpw: 1.3s
+chat:
+  model: dolphin-mistral
+vision:
+  model: llava-llama3
+summary:
+  model: dolphin-llama3:8b-256k
+  chunk-size: 16384
+  chunk-overlap: 256
+  length-ratio: 4
+  min-length: 64
+  token-limit: 16384
+  instruct: 'You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.'
+functions:
+  model: 'command-r'
\ No newline at end of file
diff --git a/sijapi/config/obsidian.yaml-example b/sijapi/config/obsidian.yaml-example
new file mode 100644
index 0000000..fd158f4
--- /dev/null
+++ b/sijapi/config/obsidian.yaml-example
@@ -0,0 +1,6 @@
+DAILY_NOTE:
+  YEAR: '%Y'
+  MONTH: '%Y-%m %B'
+  DAY: '%Y-%m-%d %A'
+  DAY_SHORT: '%Y-%m-%d'
+DIR: '{{ HOME_DIR }}/Nextcloud/notes' # you can specify the absolute path or use '{{ HOME_DIR }}' followed by a relative path
diff --git a/sijapi/config/tailscale.yaml-example b/sijapi/config/tailscale.yaml-example
new file mode 100644
index 0000000..bbac9ae
--- /dev/null
+++ b/sijapi/config/tailscale.yaml-example
@@ -0,0 +1,6 @@
+ID: sij-mbp16
+IP: 100.64.64.20
+SUBNET: 100.64.64.0/24
+MDNS: starling-sailfin.ts.net
+API_KEY: '{{ SECRET.TAILSCALE_API_KEY }}'
+ADDRESS: 'http://{{ ID }}.{{ MDNS }}'
\ No newline at end of file
diff --git a/sijapi/routers/asr.py b/sijapi/routers/asr.py
index 399ab15..3920e95 100644
--- a/sijapi/routers/asr.py
+++ b/sijapi/routers/asr.py
@@ -15,7 +15,7 @@ from fastapi.responses import JSONResponse
 from pydantic import BaseModel, Field
 from typing import Optional
 
-from sijapi import L, ASR_DIR, WHISPER_CPP_MODELS, GARBAGE_COLLECTION_INTERVAL, GARBAGE_TTL, WHISPER_CPP_DIR, MAX_CPU_CORES
+from sijapi import L, API, Dir, ASR
 
 asr = APIRouter()
 
@@ -83,11 +83,11 @@ async def transcribe_endpoint(
 async def transcribe_audio(file_path, params: TranscribeParams):
     L.DEBUG(f"Transcribing audio file from {file_path}...")
     file_path = await convert_to_wav(file_path)
-    model = params.model if params.model in WHISPER_CPP_MODELS else 'small'
-    model_path = WHISPER_CPP_DIR / 'models' / f'ggml-{model}.bin'
-    command = [str(WHISPER_CPP_DIR / 'build' / 'bin' / 'main')]
+    model = params.model if params.model in ASR.MODELS else 'small'
+    model_path = ASR.WHISPER_DIR.models / f'ggml-{model}.bin'
+    command = [str(ASR.WHISPER_DIR.build.bin.main)]
     command.extend(['-m', str(model_path)])
-    command.extend(['-t', str(max(1, min(params.threads or MAX_CPU_CORES, MAX_CPU_CORES)))])
+    command.extend(['-t', str(max(1, min(params.threads or API.MAX_CPU_CORES, API.MAX_CPU_CORES)))])
     command.extend(['-np'])  # Always enable no-prints
 
 
@@ -187,7 +187,7 @@ async def run_transcription(command, file_path):
     return stdout.decode().strip()
 
 async def convert_to_wav(file_path: str):
-    wav_file_path = os.path.join(ASR_DIR, f"{uuid.uuid4()}.wav")
+    wav_file_path = os.path.join(Dir.data.asr, f"{uuid.uuid4()}.wav")
     proc = await asyncio.create_subprocess_exec(
         "ffmpeg", "-y", "-i", file_path, "-acodec", "pcm_s16le", "-ar", "16000", "-ac", "1", wav_file_path,
         stdout=asyncio.subprocess.PIPE,
diff --git a/sijapi/routers/cal.py b/sijapi/routers/cal.py
index 7cca17a..c04ab05 100644
--- a/sijapi/routers/cal.py
+++ b/sijapi/routers/cal.py
@@ -1,7 +1,7 @@
 '''
 Calendar module using macOS Calendars and/or Microsoft 365 via its Graph API.
 Depends on: 
-  LOGGER, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
+  LOGGER, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, Cal.MS365.auth.token
 '''
 from fastapi import APIRouter, Depends, HTTPException, status, Request
 from fastapi.responses import RedirectResponse, JSONResponse
@@ -16,45 +16,46 @@ from typing import Dict, List, Any
 from datetime import datetime, timedelta
 from Foundation import NSDate, NSRunLoop
 import EventKit as EK
-from sijapi import L, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
+from sijapi import L, Cal
 from sijapi.routers import loc
 
 cal = APIRouter()
 oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
 timeout = httpx.Timeout(12)
 
-if MS365_TOGGLE is True:
-    L.CRIT(f"Visit https://api.sij.ai/o365/login to obtain your Microsoft 365 authentication token.")
+print(f"Configuration MS365: {Cal.MS365}")
+if Cal.MS365.toggle == 'on':
+    L.CRIT(f"Visit https://api.sij.ai/MS365/login to obtain your Microsoft 365 authentication token.")
 
-    @cal.get("/o365/login")
+    @cal.get("/MS365/login")
     async def login():
-        L.DEBUG(f"Received request to /o365/login")
-        L.DEBUG(f"SCOPE: {MS365_SCOPE}")
-        if not MS365_SCOPE:
+        L.DEBUG(f"Received request to /MS365/login")
+        L.DEBUG(f"SCOPE: {Cal.MS365.auth.scopes}")
+        if not Cal.MS365.auth.scopes:
             L.ERR("No scopes defined for authorization.")
             raise HTTPException(
                 status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
                 detail="No scopes defined for authorization."
             )
-        authorization_url = f"{MS365_AUTHORITY_URL}/oauth2/v2.0/authorize?client_id={MS365_CLIENT_ID}&response_type=code&redirect_uri={MS365_REDIRECT_PATH}&scope={'+'.join(MS365_SCOPE)}"
+        authorization_url = f"{Cal.MS365.auth.url}/oauth2/v2.0/authorize?client_id={Cal.MS365.client}&response_type=code&redirect_uri={Cal.MS365.auth.redirect}&scope={'+'.join(Cal.MS365.auth.scopes)}"
         L.INFO(f"Redirecting to authorization URL: {authorization_url}")
         return RedirectResponse(authorization_url)
 
-    @cal.get("/o365/oauth_redirect")
+    @cal.get("/MS365/oauth_redirect")
     async def oauth_redirect(code: str = None, error: str = None):
-        L.DEBUG(f"Received request to /o365/oauth_redirect")
+        L.DEBUG(f"Received request to /MS365/oauth_redirect")
         if error:
             L.ERR(f"OAuth2 Error: {error}")
             raise HTTPException(
                 status_code=status.HTTP_400_BAD_REQUEST, detail="OAuth2 Error"
             )
         L.INFO(f"Requesting token with authorization code: {code}")
-        token_url = f"{MS365_AUTHORITY_URL}/oauth2/v2.0/token"
+        token_url = f"{Cal.MS365.auth.url}/oauth2/v2.0/token"
         data = {
-            "client_id": MS365_CLIENT_ID,
-            "client_secret": MS365_SECRET,
+            "client_id": Cal.MS365.client,
+            "client_secret": Cal.MS365.auth.secret,
             "code": code,
-            "redirect_uri": MS365_REDIRECT_PATH,
+            "redirect_uri": Cal.MS365.auth.redirect,
             "grant_type": "authorization_code"
         }
         async with httpx.AsyncClient(timeout=timeout) as client:
@@ -73,9 +74,9 @@ if MS365_TOGGLE is True:
                 detail="Failed to obtain access token"
             )
 
-    @cal.get("/o365/me")
+    @cal.get("/MS365/me")
     async def read_items():
-        L.DEBUG(f"Received request to /o365/me")
+        L.DEBUG(f"Received request to /MS365/me")
         token = await load_token()
         if not token:
             raise HTTPException(
@@ -102,16 +103,16 @@ if MS365_TOGGLE is True:
         L.DEBUG(f"Saving token: {token}")
         try:
             token["expires_at"] = int(time.time()) + token["expires_in"]
-            with open(MS365_TOKEN_PATH, "w") as file:
+            with open(Cal.MS365.auth.token, "w") as file:
                 json.dump(token, file)
-                L.DEBUG(f"Saved token to {MS365_TOKEN_PATH}")
+                L.DEBUG(f"Saved token to {Cal.MS365.auth.token}")
         except Exception as e:
             L.ERR(f"Failed to save token: {e}")
 
     async def load_token():
-        if os.path.exists(MS365_TOKEN_PATH):
+        if os.path.exists(Cal.MS365.auth.token):
             try:
-                with open(MS365_TOKEN_PATH, "r") as file:
+                with open(Cal.MS365.auth.token, "r") as file:
                     token = json.load(file)
             except FileNotFoundError:
                 L.ERR("Token file not found.")
@@ -128,7 +129,7 @@ if MS365_TOGGLE is True:
                 L.DEBUG("No token found.")
                 return None
         else:
-            L.ERR(f"No file found at {MS365_TOKEN_PATH}")
+            L.ERR(f"No file found at {Cal.MS365.auth.token}")
             return None
 
 
@@ -146,13 +147,13 @@ if MS365_TOGGLE is True:
         return response.status_code == 401
 
     async def get_new_token_with_refresh_token(refresh_token):
-        token_url = f"{MS365_AUTHORITY_URL}/oauth2/v2.0/token"
+        token_url = f"{Cal.MS365.auth.url}/oauth2/v2.0/token"
         data = {
-            "client_id": MS365_CLIENT_ID,
-            "client_secret": MS365_SECRET,
+            "client_id": Cal.MS365.client,
+            "client_secret": Cal.MS365.auth.secret,
             "refresh_token": refresh_token,
             "grant_type": "refresh_token",
-            "scope": " ".join(MS365_SCOPE),
+            "scope": " ".join(Cal.MS365.auth.scopes),
         }
         async with httpx.AsyncClient(timeout=timeout) as client:
             response = await client.post(token_url, data=data)
@@ -164,6 +165,36 @@ if MS365_TOGGLE is True:
             L.ERR("Failed to refresh access token")
             return None
 
+    async def get_ms365_events(start_date: datetime, end_date: datetime):
+        token = await load_token()
+        if token:
+            if await is_token_expired(token):
+                await refresh_token()
+        else:
+            raise HTTPException(
+                status_code=status.HTTP_401_UNAUTHORIZED,
+                detail="Access token not found",
+            )
+        # this looks like it might need updating to use tz-aware datetimes converted to UTC...
+        graph_url = f"https://graph.microsoft.com/v1.0/me/events?$filter=start/dateTime ge '{start_date}T00:00:00' and end/dateTime le '{end_date}T23:59:59'"
+        headers = {
+            "Authorization": f"Bearer {token['access_token']}",
+            "Prefer": 'outlook.timezone="Pacific Standard Time"',
+        }
+        async with httpx.AsyncClient() as client:
+            response = await client.get(graph_url, headers=headers)
+
+        if response.status_code != 200:
+            L.ERR("Failed to retrieve events from Microsoft 365")
+            raise HTTPException(
+                status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+                detail="Failed to retrieve events",
+            )
+
+        ms_events = response.json().get("value", [])
+        return ms_events
+
+
 
     async def refresh_token():
         token = await load_token()
@@ -223,12 +254,12 @@ async def get_events_endpoint(start_date: str, end_date: str):
 
 async def get_events(start_dt: datetime, end_dt: datetime) -> List:
     combined_events = []
-    if MS365_TOGGLE:
+    if Cal.MS365.toggle == "on":
         ms_events = await get_ms365_events(start_dt, end_dt)
         combined_events.extend(ms_events)  # Use extend instead of append
     
-    if ICAL_TOGGLE:
-        calendar_ids = ICALENDARS  
+    if Cal.ICAL.toggle == "on":
+        calendar_ids = Cal.ICAL.calendars  
         macos_events = get_macos_calendar_events(start_dt, end_dt, calendar_ids)
         combined_events.extend(macos_events)  # Use extend instead of append
     
@@ -236,108 +267,80 @@ async def get_events(start_dt: datetime, end_dt: datetime) -> List:
     return parsed_events
 
 
-def get_macos_calendar_events(start_date: datetime, end_date: datetime, calendar_ids: List[str] = None) -> List[Dict]:
-    event_store = EK.EKEventStore.alloc().init()
+if Cal.ICAL.toggle == "on":
+    def get_macos_calendar_events(start_date: datetime, end_date: datetime, calendar_ids: List[str] = None) -> List[Dict]:
+        event_store = EK.EKEventStore.alloc().init()
 
-    # Request access to EventKit
-    def request_access() -> bool:
-        access_granted = []
+        # Request access to EventKit
+        def request_access() -> bool:
+            access_granted = []
 
-        def completion_handler(granted, error):
-            if error is not None:
-                L.ERR(f"Error: {error}")
-            access_granted.append(granted)
-            # Notify the main thread that the completion handler has executed
+            def completion_handler(granted, error):
+                if error is not None:
+                    L.ERR(f"Error: {error}")
+                access_granted.append(granted)
+                # Notify the main thread that the completion handler has executed
+                with access_granted_condition:
+                    access_granted_condition.notify()
+
+            access_granted_condition = threading.Condition()
             with access_granted_condition:
-                access_granted_condition.notify()
+                event_store.requestAccessToEntityType_completion_(0, completion_handler)  # 0 corresponds to EKEntityTypeEvent
+                # Wait for the completion handler to be called
+                access_granted_condition.wait(timeout=10)
+                # Verify that the handler was called and access_granted is not empty
+                if access_granted:
+                    return access_granted[0]
+                else:
+                    L.ERR("Request access timed out or failed")
+                    return False
 
-        access_granted_condition = threading.Condition()
-        with access_granted_condition:
-            event_store.requestAccessToEntityType_completion_(0, completion_handler)  # 0 corresponds to EKEntityTypeEvent
-            # Wait for the completion handler to be called
-            access_granted_condition.wait(timeout=10)
-            # Verify that the handler was called and access_granted is not empty
-            if access_granted:
-                return access_granted[0]
-            else:
-                L.ERR("Request access timed out or failed")
-                return False
+        if not request_access():
+            L.ERR("Access to calendar data was not granted")
+            return []
 
-    if not request_access():
-        L.ERR("Access to calendar data was not granted")
-        return []
+        ns_start_date = datetime_to_nsdate(start_date)
+        ns_end_date = datetime_to_nsdate(end_date)
 
-    ns_start_date = datetime_to_nsdate(start_date)
-    ns_end_date = datetime_to_nsdate(end_date)
-
-    # Retrieve all calendars
-    all_calendars = event_store.calendarsForEntityType_(0)  # 0 corresponds to EKEntityTypeEvent
-    if calendar_ids:
-        selected_calendars = [cal for cal in all_calendars if cal.calendarIdentifier() in calendar_ids]
-    else:
-        selected_calendars = all_calendars
-
-    # Filtering events by selected calendars
-    predicate = event_store.predicateForEventsWithStartDate_endDate_calendars_(ns_start_date, ns_end_date, selected_calendars)
-    events = event_store.eventsMatchingPredicate_(predicate)
-
-    event_list = []
-    for event in events:
-        # Check if event.attendees() returns None
-        if event.attendees():
-            attendees = [{'name': att.name(), 'email': att.emailAddress()} for att in event.attendees() if att.emailAddress()]
+        # Retrieve all calendars
+        all_calendars = event_store.calendarsForEntityType_(0)  # 0 corresponds to EKEntityTypeEvent
+        if calendar_ids:
+            selected_calendars = [cal for cal in all_calendars if cal.calendarIdentifier() in calendar_ids]
         else:
-            attendees = []
+            selected_calendars = all_calendars
 
-        # Format the start and end dates properly
-        start_date_str = event.startDate().descriptionWithLocale_(None)
-        end_date_str = event.endDate().descriptionWithLocale_(None)
+        # Filtering events by selected calendars
+        predicate = event_store.predicateForEventsWithStartDate_endDate_calendars_(ns_start_date, ns_end_date, selected_calendars)
+        events = event_store.eventsMatchingPredicate_(predicate)
 
-        event_data = {
-            "subject": event.title(),
-            "id": event.eventIdentifier(),
-            "start": start_date_str,
-            "end": end_date_str,
-            "bodyPreview": event.notes() if event.notes() else '',
-            "attendees": attendees,
-            "location": event.location() if event.location() else '',
-            "onlineMeetingUrl": '',  # Defaulting to empty as macOS EventKit does not provide this
-            "showAs": 'busy',  # Default to 'busy'
-            "isAllDay": event.isAllDay()
-        }
+        event_list = []
+        for event in events:
+            # Check if event.attendees() returns None
+            if event.attendees():
+                attendees = [{'name': att.name(), 'email': att.emailAddress()} for att in event.attendees() if att.emailAddress()]
+            else:
+                attendees = []
 
-        event_list.append(event_data)
+            # Format the start and end dates properly
+            start_date_str = event.startDate().descriptionWithLocale_(None)
+            end_date_str = event.endDate().descriptionWithLocale_(None)
 
-    return event_list
+            event_data = {
+                "subject": event.title(),
+                "id": event.eventIdentifier(),
+                "start": start_date_str,
+                "end": end_date_str,
+                "bodyPreview": event.notes() if event.notes() else '',
+                "attendees": attendees,
+                "location": event.location() if event.location() else '',
+                "onlineMeetingUrl": '',  # Defaulting to empty as macOS EventKit does not provide this
+                "showAs": 'busy',  # Default to 'busy'
+                "isAllDay": event.isAllDay()
+            }
 
-async def get_ms365_events(start_date: datetime, end_date: datetime):
-    token = await load_token()
-    if token:
-        if await is_token_expired(token):
-            await refresh_token()
-    else:
-        raise HTTPException(
-            status_code=status.HTTP_401_UNAUTHORIZED,
-            detail="Access token not found",
-        )
-    # this looks like it might need updating to use tz-aware datetimes converted to UTC...
-    graph_url = f"https://graph.microsoft.com/v1.0/me/events?$filter=start/dateTime ge '{start_date}T00:00:00' and end/dateTime le '{end_date}T23:59:59'"
-    headers = {
-        "Authorization": f"Bearer {token['access_token']}",
-        "Prefer": 'outlook.timezone="Pacific Standard Time"',
-    }
-    async with httpx.AsyncClient() as client:
-        response = await client.get(graph_url, headers=headers)
+            event_list.append(event_data)
 
-    if response.status_code != 200:
-        L.ERR("Failed to retrieve events from Microsoft 365")
-        raise HTTPException(
-            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
-            detail="Failed to retrieve events",
-        )
-
-    ms_events = response.json().get("value", [])
-    return ms_events
+        return event_list
 
 
 async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]):
diff --git a/sijapi/routers/cf.py b/sijapi/routers/cf.py
index 1067b44..a44a6c3 100644
--- a/sijapi/routers/cf.py
+++ b/sijapi/routers/cf.py
@@ -130,12 +130,10 @@ async def add_config(record: DNSRecordRequest):
         raise HTTPException(status_code=400, detail=f"Failed to create A record: {error_message} (Code: {error_code})")
 
     # Update Caddyfile
-    await update_caddyfile(full_domain, caddy_ip, port)
-    
+    await update_caddyfile(full_domain, caddy_ip, port)    
     return {"message": "Configuration added successfully"}
 
 
-
 @cf.get("/cf/list_zones")
 async def list_zones_endpoint():
     domains = await list_zones()
diff --git a/sijapi/routers/email.py b/sijapi/routers/email.py
index 6707e91..6752c97 100644
--- a/sijapi/routers/email.py
+++ b/sijapi/routers/email.py
@@ -20,7 +20,7 @@ import ssl
 import yaml
 from typing import List, Dict, Optional, Set
 from datetime import datetime as dt_datetime
-from sijapi import L, PODCAST_DIR, DEFAULT_VOICE, EMAIL_CONFIG, EMAIL_LOGS
+from sijapi import L, TTS, Email, Dir
 from sijapi.routers import img, loc, tts, llm
 from sijapi.utilities import clean_text, assemble_journal_path, extract_text, prefix_lines
 from sijapi.classes import EmailAccount, IMAPConfig, SMTPConfig, IncomingEmail, EmailContact, AutoResponder
@@ -28,13 +28,11 @@ from sijapi.classes import EmailAccount
 
 email = APIRouter(tags=["private"])
 
-
 def load_email_accounts(yaml_path: str) -> List[EmailAccount]:
     with open(yaml_path, 'r') as file:
         config = yaml.safe_load(file)
     return [EmailAccount(**account) for account in config['accounts']]
 
-
 def get_imap_connection(account: EmailAccount):
     return Imbox(account.imap.host,
         username=account.imap.username,
@@ -43,8 +41,6 @@ def get_imap_connection(account: EmailAccount):
         ssl=account.imap.encryption == 'SSL',
         starttls=account.imap.encryption == 'STARTTLS')
 
-
-
 def get_smtp_connection(autoresponder):
     # Create an SSL context that doesn't verify certificates
     context = ssl.create_default_context()
@@ -83,7 +79,6 @@ def get_smtp_connection(autoresponder):
             L.ERR(f"Unencrypted connection failed: {str(e)}")
             raise
 
-
 async def send_response(to_email: str, subject: str, body: str, profile: AutoResponder, image_attachment: Path = None) -> bool:
     server = None
     try:
@@ -149,7 +144,7 @@ async def extract_attachments(attachments) -> List[str]:
     return attachment_texts
 
 async def process_account_archival(account: EmailAccount):
-    summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
+    summarized_log = Dir.logs.email / account.name / "summarized.txt"
     os.makedirs(summarized_log.parent, exist_ok = True)
 
     while True:
@@ -196,7 +191,7 @@ async def summarize_single_email(this_email: IncomingEmail, podcast: bool = Fals
         attachment_texts = await extract_attachments(this_email.attachments)
         email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
     summary = await llm.summarize_text(email_content)
-    await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = podcast, output_path = tts_path)
+    await tts.local_tts(text_content = summary, speed = 1.1, voice = TTS.xtts.voice, podcast = podcast, output_path = tts_path)
     md_summary = f'```ad.summary\n'
     md_summary += f'title: {this_email.subject}\n'
     md_summary += f'{summary}\n'
@@ -266,7 +261,7 @@ def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount
 
 
 async def process_account_autoresponding(account: EmailAccount):
-    EMAIL_AUTORESPONSE_LOG = EMAIL_LOGS / account.name / "autoresponded.txt"
+    EMAIL_AUTORESPONSE_LOG = Dir.logs.email / account.name / "autoresponded.txt"
     os.makedirs(EMAIL_AUTORESPONSE_LOG.parent, exist_ok=True)
 
     while True:
diff --git a/sijapi/routers/img.py b/sijapi/routers/img.py
index 7748a44..3245dce 100644
--- a/sijapi/routers/img.py
+++ b/sijapi/routers/img.py
@@ -405,7 +405,7 @@ async def load_workflow(workflow_path: str, workflow:str):
         return json.load(file)
 
 
-async def update_prompt_and_get_key(workflow: dict, post: dict, positive: str):
+async def update_prompt_and_get_key(workf0ow: dict, post: dict, positive: str):
     '''
 Recurses through the workflow searching for and substituting the dynamic values for API_PrePrompt, API_StylePrompt, API_NegativePrompt, width, height, and seed (random integer).
 Even more important, it finds and returns the key to the filepath where the file is saved, which we need to decipher status when generation is complete.
diff --git a/sijapi/routers/llm.py b/sijapi/routers/llm.py
index 4a19e3b..5ad02f1 100644
--- a/sijapi/routers/llm.py
+++ b/sijapi/routers/llm.py
@@ -26,7 +26,7 @@ import tempfile
 import shutil
 import html2text
 import markdown
-from sijapi import L, LLM_SYS_MSG, DEFAULT_LLM, DEFAULT_VISION, REQUESTS_DIR, OBSIDIAN_CHROMADB_COLLECTION, OBSIDIAN_VAULT_DIR, DOC_DIR, OPENAI_API_KEY, DEFAULT_VOICE, SUMMARY_INSTRUCT, SUMMARY_CHUNK_SIZE, SUMMARY_TPW, SUMMARY_CHUNK_OVERLAP, SUMMARY_LENGTH_RATIO, SUMMARY_TOKEN_LIMIT, SUMMARY_MIN_LENGTH, SUMMARY_MODEL
+from sijapi import L, Dir, API, LLM, TTS
 from sijapi.utilities import convert_to_unix_time, sanitize_filename, ocr_pdf, clean_text, should_use_ocr, extract_text_from_pdf, extract_text_from_docx, read_text_file, str_to_bool, get_extension
 from sijapi.routers import tts
 from sijapi.routers.asr import transcribe_audio
@@ -401,7 +401,7 @@ def query_gpt4(llmPrompt: List = [], system_msg: str = "", user_msg: str = "", m
                 {"role": "system", "content": system_msg},
                 {"role": "user", "content": user_msg}
             ]
-    LLM = OpenAI(api_key=OPENAI_API_KEY) 
+    LLM = OpenAI(api_key=LLM.OPENAI_API_KEY) 
     response = LLM.chat.completions.create(
         model="gpt-4",
         messages=messages,
diff --git a/sijapi/routers/news.py b/sijapi/routers/news.py
index 32097ca..ee3b290 100644
--- a/sijapi/routers/news.py
+++ b/sijapi/routers/news.py
@@ -26,8 +26,6 @@ from fastapi.responses import JSONResponse
 from pydantic import BaseModel
 
 from pathlib import Path
-
-from sijapi.classes import Configuration
 from sijapi import API, L, Dir, News, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO
 from sijapi.utilities import sanitize_filename, assemble_journal_path, assemble_archive_path
 from sijapi.routers import llm, tts, asr, loc

From 074aa2114af83a5cfaf7d4c91f993fc1cf4a4375 Mon Sep 17 00:00:00 2001
From: sanj <67624670+iodrift@users.noreply.github.com>
Date: Tue, 9 Jul 2024 16:14:52 -0700
Subject: [PATCH 3/3] Auto-update: Tue Jul  9 16:14:52 PDT 2024

---
 sijapi/__init__.py              |  8 +++--
 sijapi/classes.py               | 53 ++++++++++++++++++++++++-------
 sijapi/config/dirs.yaml-example | 16 ----------
 sijapi/routers/cal.py           |  5 ++-
 sijapi/routers/email.py         |  4 +--
 sijapi/routers/llm.py           | 56 ++++++++++++++++-----------------
 sijapi/routers/tts.py           | 54 ++++++++++++++-----------------
 7 files changed, 105 insertions(+), 91 deletions(-)
 delete mode 100644 sijapi/config/dirs.yaml-example

diff --git a/sijapi/__init__.py b/sijapi/__init__.py
index 973c55f..46ba33c 100644
--- a/sijapi/__init__.py
+++ b/sijapi/__init__.py
@@ -6,11 +6,11 @@ from dotenv import load_dotenv
 from dateutil import tz
 from pathlib import Path
 from .logs import Logger
-from .classes import Database, Geocoder, APIConfig, Configuration, Dir
+from .classes import Database, Geocoder, APIConfig, Configuration, EmailConfiguration, Dir
 
 ### Initial initialization
 API = APIConfig.load('api', 'secrets')
-Dir = Dir.load('dirs')
+Dir = Dir()
 ENV_PATH = Dir.CONFIG / ".env"
 LOGS_DIR = Dir.LOGS
 L = Logger("Central", LOGS_DIR)
@@ -23,9 +23,11 @@ DB = Database.from_yaml('db.yaml')
 ASR = Configuration.load('asr')
 IMG = Configuration.load('img')
 Cal = Configuration.load('cal', 'secrets')
-Email = Configuration.load('email', 'secrets')
+print(f"Cal configuration: {Cal.__dict__}")
+Email = EmailConfiguration.load('email', 'secrets')
 LLM = Configuration.load('llm', 'secrets')
 News = Configuration.load('news', 'secrets')
+Obsidian = Configuration.load('obsidian')
 TTS = Configuration.load('tts', 'secrets')
 CourtListener = Configuration.load('courtlistener', 'secrets')
 Tailscale = Configuration.load('tailscale', 'secrets')
diff --git a/sijapi/classes.py b/sijapi/classes.py
index b1a9ad7..ba1acb8 100644
--- a/sijapi/classes.py
+++ b/sijapi/classes.py
@@ -207,7 +207,6 @@ class Configuration(BaseModel):
         try:
             with yaml_path.open('r') as file:
                 config_data = yaml.safe_load(file)
-            
             print(f"Loaded configuration data from {yaml_path}")
             
             if secrets_path:
@@ -220,7 +219,6 @@ class Configuration(BaseModel):
             instance._dir_config = dir_config or instance
 
             resolved_data = instance.resolve_placeholders(config_data)
-            
             return cls._create_nested_config(resolved_data)
         except Exception as e:
             print(f"Error loading configuration: {str(e)}")
@@ -229,6 +227,8 @@ class Configuration(BaseModel):
     @classmethod
     def _create_nested_config(cls, data):
         if isinstance(data, dict):
+            print(f"Creating nested config for: {cls.__name__}")
+            print(f"Data: {data}")
             return cls(**{k: cls._create_nested_config(v) for k, v in data.items()})
         elif isinstance(data, list):
             return [cls._create_nested_config(item) for item in data]
@@ -267,15 +267,7 @@ class Configuration(BaseModel):
         
         for match in matches:
             parts = match.split('.')
-            if len(parts) == 1:  # Internal reference
-                replacement = getattr(self._dir_config, parts[0], str(Path.home() / parts[0].lower()))
-            elif len(parts) == 2 and parts[0] == 'Dir':
-                replacement = getattr(self._dir_config, parts[1], str(Path.home() / parts[1].lower()))
-            elif len(parts) == 2 and parts[0] == 'ENV':
-                replacement = os.getenv(parts[1], '')
-            else:
-                replacement = value
-            
+            replacement = self._resolve_nested_placeholder(parts)
             value = value.replace('{{' + match + '}}', str(replacement))
         
         # Convert to Path if it looks like a file path
@@ -283,6 +275,17 @@ class Configuration(BaseModel):
             return Path(value).expanduser()
         return value
 
+    def _resolve_nested_placeholder(self, parts: List[str]) -> Any:
+        current = self._dir_config
+        for part in parts:
+            if part == 'ENV':
+                return os.getenv(parts[-1], '')
+            elif hasattr(current, part):
+                current = getattr(current, part)
+            else:
+                return str(Path.home() / part.lower())
+        return current
+
 
 class APIConfig(BaseModel):
     HOST: str
@@ -788,6 +791,31 @@ class EmailConfiguration(Configuration):
     autoresponders: List[AutoResponder]
     accounts: List[EmailAccount]
 
+    @classmethod
+    def _create_nested_config(cls, data):
+        if isinstance(data, dict):
+            if 'imaps' in data:
+                return cls(
+                    imaps=[IMAPConfig(**imap) for imap in data['imaps']],
+                    smtps=[SMTPConfig(**smtp) for smtp in data['smtps']],
+                    autoresponders=[AutoResponder(**ar) for ar in data['autoresponders']],
+                    accounts=[EmailAccount(**account) for account in data['accounts']],
+                    **{k: v for k, v in data.items() if k not in ['imaps', 'smtps', 'autoresponders', 'accounts']}
+                )
+            else:
+                return data  # Return the dict as-is for nested structures
+        elif isinstance(data, list):
+            return [cls._create_nested_config(item) for item in data]
+        else:
+            return data
+
+    @classmethod
+    def load(cls, yaml_path: Union[str, Path], secrets_path: Optional[Union[str, Path]] = None, dir_config: Optional['Configuration'] = None) -> 'EmailConfiguration':
+        config_data = super().load(yaml_path, secrets_path, dir_config)
+        return cls._create_nested_config(config_data)
+
+    # ... (rest of the methods remain the same)
+
     def get_imap(self, username: str) -> Optional[IMAPConfig]:
         return next((imap for imap in self.imaps if imap.username == username), None)
 
@@ -800,6 +828,9 @@ class EmailConfiguration(Configuration):
     def get_account(self, name: str) -> Optional[EmailAccount]:
         return next((account for account in self.accounts if account.name == name), None)
 
+    def get_email_accounts(self) -> List[EmailAccount]:
+        return self.accounts
+
 class EmailContact(BaseModel):
     email: str
     name: Optional[str] = None
diff --git a/sijapi/config/dirs.yaml-example b/sijapi/config/dirs.yaml-example
deleted file mode 100644
index 1c6667f..0000000
--- a/sijapi/config/dirs.yaml-example
+++ /dev/null
@@ -1,16 +0,0 @@
-HOME: ~
-BASE: '{{ HOME }}/workshop/sijapi'
-SIJAPI: '{{ BASE }}/sijapi'
-CONFIG: '{{ SIJAPI }}/config'
-CONFIG.email: '{{ CONFIG }}/email.yaml'
-CONFIG.img: '{{ CONFIG }}/img.yaml'
-CONFIG.news: '{{ CONFIG }}/news.yaml'
-SECRETS: '{{ CONFIG }}/secrets.yaml'
-DATA: '{{ SIJAPI }}/data'
-DATA.ALERTS: '{{ DATA }}/alerts'
-DATA.ASR: '{{ DATA }}/asr'
-DATA.BASE: '{{ DATA }}/db'
-DATA.IMG: '{{ DATA }}/img'
-DATA.TTS: '{{ DATA }}/tts'
-TTS.VOICES: '{{ TTS }}/voices'
-LOGS: '{{ SIJAPI }}/logs'
\ No newline at end of file
diff --git a/sijapi/routers/cal.py b/sijapi/routers/cal.py
index c04ab05..eadadc2 100644
--- a/sijapi/routers/cal.py
+++ b/sijapi/routers/cal.py
@@ -23,7 +23,10 @@ cal = APIRouter()
 oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
 timeout = httpx.Timeout(12)
 
-print(f"Configuration MS365: {Cal.MS365}")
+print(f"Cal object: {Cal}")
+print(f"Cal.__dict__: {Cal.__dict__}")
+print(f"Cal.MS365: {Cal.MS365}")
+
 if Cal.MS365.toggle == 'on':
     L.CRIT(f"Visit https://api.sij.ai/MS365/login to obtain your Microsoft 365 authentication token.")
 
diff --git a/sijapi/routers/email.py b/sijapi/routers/email.py
index 6752c97..93b741f 100644
--- a/sijapi/routers/email.py
+++ b/sijapi/routers/email.py
@@ -363,7 +363,7 @@ async def save_processed_uid(filename: Path, account_name: str, uid: str):
 
 
 async def process_all_accounts():
-    email_accounts = load_email_accounts(EMAIL_CONFIG)
+    email_accounts = Email.get_email_accounts()
     summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
     autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
     await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
@@ -371,4 +371,4 @@ async def process_all_accounts():
 @email.on_event("startup")
 async def startup_event():
     await asyncio.sleep(5)
-    asyncio.create_task(process_all_accounts())
+    asyncio.create_task(process_all_accounts())
\ No newline at end of file
diff --git a/sijapi/routers/llm.py b/sijapi/routers/llm.py
index 5ad02f1..60432c8 100644
--- a/sijapi/routers/llm.py
+++ b/sijapi/routers/llm.py
@@ -26,7 +26,7 @@ import tempfile
 import shutil
 import html2text
 import markdown
-from sijapi import L, Dir, API, LLM, TTS
+from sijapi import L, Dir, API, LLM, TTS, Obsidian
 from sijapi.utilities import convert_to_unix_time, sanitize_filename, ocr_pdf, clean_text, should_use_ocr, extract_text_from_pdf, extract_text_from_docx, read_text_file, str_to_bool, get_extension
 from sijapi.routers import tts
 from sijapi.routers.asr import transcribe_audio
@@ -49,7 +49,7 @@ def read_markdown_files(folder: Path):
     return documents, file_paths
 
 # Read markdown files and generate embeddings
-documents, file_paths = read_markdown_files(DOC_DIR)
+documents, file_paths = read_markdown_files(Obsidian.docs)
 for i, doc in enumerate(documents):
     response = ollama.embeddings(model="mxbai-embed-large", prompt=doc)
     embedding = response["embedding"]
@@ -83,7 +83,7 @@ async def generate_response(prompt: str):
     return {"response": output['response']}
 
 
-async def query_ollama(usr: str, sys: str = LLM_SYS_MSG, model: str = DEFAULT_LLM, max_tokens: int = 200):
+async def query_ollama(usr: str, sys: str = LLM.chat.sys, model: str = LLM.chat.model, max_tokens: int = LLM.chat.max_tokens):
     messages = [{"role": "system", "content": sys},
                 {"role": "user", "content": usr}]
     LLM = Ollama()
@@ -100,8 +100,8 @@ async def query_ollama(usr: str, sys: str = LLM_SYS_MSG, model: str = DEFAULT_LL
     
 async def query_ollama_multishot(
     message_list: List[str],
-    sys: str = LLM_SYS_MSG,
-    model: str = DEFAULT_LLM,
+    sys: str = LLM.chat.sys,
+    model: str = LLM.chat.model,
     max_tokens: int = 200
 ):
     if len(message_list) % 2 == 0:
@@ -130,7 +130,7 @@ async def chat_completions(request: Request):
     body = await request.json()
 
     timestamp = dt_datetime.now().strftime("%Y%m%d_%H%M%S%f")
-    filename = REQUESTS_DIR / f"request_{timestamp}.json"
+    filename = Dir.logs.requests / f"request_{timestamp}.json"
 
     async with aiofiles.open(filename, mode='w') as file:
         await file.write(json.dumps(body, indent=4))
@@ -227,9 +227,9 @@ async def stream_messages_with_vision(message: dict, model: str, num_predict: in
         
 def get_appropriate_model(requested_model):
     if requested_model == "gpt-4-vision-preview":
-        return DEFAULT_VISION
+        return LLM.vision.model
     elif not is_model_available(requested_model):
-        return DEFAULT_LLM
+        return LLM.chat.model
     else:
         return requested_model
 
@@ -310,7 +310,7 @@ async def chat_completions_options(request: Request):
             ],
             "created": int(time.time()),
             "id": str(uuid.uuid4()),
-            "model": DEFAULT_LLM,
+            "model": LLM.chat.model,
             "object": "chat.completion.chunk",
         },
         status_code=200,
@@ -431,7 +431,7 @@ def llava(image_base64, prompt):
     return "" if "pass" in response["response"].lower() else response["response"] 
 
 def gpt4v(image_base64, prompt_sys: str, prompt_usr: str, max_tokens: int = 150):
-    VISION_LLM = OpenAI(api_key=OPENAI_API_KEY)
+    VISION_LLM = OpenAI(api_key=LLM.OPENAI_API_KEY)
     response_1 = VISION_LLM.chat.completions.create(
         model="gpt-4-vision-preview",
         messages=[
@@ -512,12 +512,12 @@ def gpt4v(image_base64, prompt_sys: str, prompt_usr: str, max_tokens: int = 150)
 
 
 @llm.get("/summarize")
-async def summarize_get(text: str = Form(None), instruction: str = Form(SUMMARY_INSTRUCT)):
+async def summarize_get(text: str = Form(None), instruction: str = Form(LLM.summary.instruct)):
     summarized_text = await summarize_text(text, instruction)
     return summarized_text
 
 @llm.post("/summarize")
-async def summarize_post(file: Optional[UploadFile] = File(None), text: Optional[str] = Form(None), instruction: str = Form(SUMMARY_INSTRUCT)):
+async def summarize_post(file: Optional[UploadFile] = File(None), text: Optional[str] = Form(None), instruction: str = Form(LLM.summary.instruct)):
     text_content = text if text else await extract_text(file)
     summarized_text = await summarize_text(text_content, instruction)
     return summarized_text
@@ -526,10 +526,10 @@ async def summarize_post(file: Optional[UploadFile] = File(None), text: Optional
 @llm.post("/speaksummary")
 async def summarize_tts_endpoint(
     bg_tasks: BackgroundTasks,
-    instruction: str = Form(SUMMARY_INSTRUCT),
+    instruction: str = Form(LLM.summary.instruct),
     file: Optional[UploadFile] = File(None),
     text: Optional[str] = Form(None),
-    voice: Optional[str] = Form(DEFAULT_VOICE),
+    voice: Optional[str] = Form(TTS.xtts.voice),
     speed: Optional[float] = Form(1.2),
     podcast: Union[bool, str] = Form(False)
 ):
@@ -572,8 +572,8 @@ async def summarize_tts_endpoint(
 
 async def summarize_tts(
     text: str, 
-    instruction: str = SUMMARY_INSTRUCT,
-    voice: Optional[str] = DEFAULT_VOICE,
+    instruction: str = LLM.summary.instruct,
+    voice: Optional[str] = TTS.xtts.voice,
     speed: float = 1.1,
     podcast: bool = False,
     LLM: Ollama = None
@@ -605,9 +605,9 @@ def split_text_into_chunks(text: str) -> List[str]:
     sentences = re.split(r'(?<=[.!?])\s+', text)
     words = text.split()
     total_words = len(words)
-    L.DEBUG(f"Total words: {total_words}. SUMMARY_CHUNK_SIZE: {SUMMARY_CHUNK_SIZE}. SUMMARY_TPW: {SUMMARY_TPW}.")
+    L.DEBUG(f"Total words: {total_words}. LLM.summary.chunk_size: {LLM.summary.chunk_size}. LLM.tpw: {LLM.tpw}.")
     
-    max_words_per_chunk = int(SUMMARY_CHUNK_SIZE / SUMMARY_TPW)
+    max_words_per_chunk = int(LLM.summary.chunk_size / LLM.tpw)
     L.DEBUG(f"Maximum words per chunk: {max_words_per_chunk}")
 
     chunks = []
@@ -633,8 +633,8 @@ def split_text_into_chunks(text: str) -> List[str]:
 
 
 def calculate_max_tokens(text: str) -> int:
-    tokens_count = max(1, int(len(text.split()) * SUMMARY_TPW))  # Ensure at least 1
-    return min(tokens_count // 4, SUMMARY_CHUNK_SIZE)
+    tokens_count = max(1, int(len(text.split()) * LLM.tpw))  # Ensure at least 1
+    return min(tokens_count // 4, LLM.summary.chunk_size)
 
 
 
@@ -694,7 +694,7 @@ async def extract_text(file: Union[UploadFile, bytes, bytearray, str, Path], bg_
         raise ValueError(f"Error extracting text: {str(e)}")
 
 
-async def summarize_text(text: str, instruction: str = SUMMARY_INSTRUCT, length_override: int = None, length_quotient: float = SUMMARY_LENGTH_RATIO, LLM: Ollama = None):
+async def summarize_text(text: str, instruction: str = LLM.summary.instruct, length_override: int = None, length_quotient: float = LLM.summary.length_ratio, LLM: Ollama = None):
     LLM = LLM if LLM else Ollama()
 
     chunked_text = split_text_into_chunks(text)
@@ -703,12 +703,12 @@ async def summarize_text(text: str, instruction: str = SUMMARY_INSTRUCT, length_
 
     total_words_count = sum(len(chunk.split()) for chunk in chunked_text)
     L.DEBUG(f"Total words count: {total_words_count}")
-    total_tokens_count = max(1, int(total_words_count * SUMMARY_TPW))
+    total_tokens_count = max(1, int(total_words_count * LLM.tpw))
     L.DEBUG(f"Total tokens count: {total_tokens_count}")
 
     total_summary_length = length_override if length_override else total_tokens_count // length_quotient
     L.DEBUG(f"Total summary length: {total_summary_length}")
-    corrected_total_summary_length = min(total_summary_length, SUMMARY_TOKEN_LIMIT)
+    corrected_total_summary_length = min(total_summary_length, LLM.summary.max_tokens)
     L.DEBUG(f"Corrected total summary length: {corrected_total_summary_length}")
 
     summaries = await asyncio.gather(*[
@@ -738,11 +738,11 @@ async def process_chunk(instruction: str, text: str, part: int, total_parts: int
     LLM = LLM if LLM else Ollama()
 
     words_count = len(text.split())
-    tokens_count = max(1, int(words_count * SUMMARY_TPW))
+    tokens_count = max(1, int(words_count * LLM.tpw))
 
-    summary_length_ratio = length_ratio if length_ratio else SUMMARY_LENGTH_RATIO
-    max_tokens = min(tokens_count // summary_length_ratio, SUMMARY_CHUNK_SIZE)
-    max_tokens = max(max_tokens, SUMMARY_MIN_LENGTH)
+    summary_length_ratio = length_ratio if length_ratio else LLM.summary.length_ratio
+    max_tokens = min(tokens_count // summary_length_ratio, LLM.summary.chunk_size)
+    max_tokens = max(max_tokens, LLM.summary.min_length)
     
     L.DEBUG(f"Processing part {part} of {total_parts}: Words: {words_count}, Estimated tokens: {tokens_count}, Max output tokens: {max_tokens}")
     
@@ -753,7 +753,7 @@ async def process_chunk(instruction: str, text: str, part: int, total_parts: int
     
     L.DEBUG(f"Starting LLM.generate for part {part} of {total_parts}")
     response = await LLM.generate(
-        model=SUMMARY_MODEL, 
+        model=LLM.summary.model, 
         prompt=prompt,
         stream=False,
         options={'num_predict': max_tokens, 'temperature': 0.5}
diff --git a/sijapi/routers/tts.py b/sijapi/routers/tts.py
index 2972607..e1141d4 100644
--- a/sijapi/routers/tts.py
+++ b/sijapi/routers/tts.py
@@ -12,7 +12,7 @@ import asyncio
 from pydantic import BaseModel
 from typing import Optional, Union, List
 from pydub import AudioSegment
-from TTS.api import TTS
+from TTS.api import TTS as XTTSv2
 from pathlib import Path
 from datetime import datetime as dt_datetime
 from time import time
@@ -25,7 +25,7 @@ import tempfile
 import random
 import re
 import os
-from sijapi import L, DEFAULT_VOICE, TTS_SEGMENTS_DIR, VOICE_DIR, PODCAST_DIR, TTS_OUTPUT_DIR, ELEVENLABS_API_KEY
+from sijapi import L, Dir, API, TTS
 from sijapi.utilities import sanitize_filename
 
 
@@ -39,14 +39,14 @@ MODEL_NAME = "tts_models/multilingual/multi-dataset/xtts_v2"
 
 @tts.get("/tts/local_voices", response_model=List[str])
 async def list_wav_files():
-    wav_files = [file.split('.')[0] for file in os.listdir(VOICE_DIR) if file.endswith(".wav")]
+    wav_files = [file.split('.')[0] for file in os.listdir(Dir.data.tts.voices) if file.endswith(".wav")]
     return wav_files
 
 @tts.get("/tts/elevenlabs_voices")
 async def list_11l_voices():
     formatted_list = ""
     url = "https://api.elevenlabs.io/v1/voices"
-    headers = {"xi-api-key": ELEVENLABS_API_KEY}
+    headers = {"xi-api-key": TTS.elevenlabs.api_key}
     async with httpx.AsyncClient() as client:
         try:
             response = await client.get(url, headers=headers)
@@ -71,10 +71,10 @@ async def select_voice(voice_name: str) -> str:
         # Case Insensitive comparison
         voice_name_lower = voice_name.lower()
         L.DEBUG(f"Looking for {voice_name_lower}")
-        for item in VOICE_DIR.iterdir():
+        for item in Dir.data.tts.voices.iterdir():
             L.DEBUG(f"Checking {item.name.lower()}")
             if item.name.lower() == f"{voice_name_lower}.wav":
-                L.DEBUG(f"select_voice received query to use voice: {voice_name}. Found {item} inside {VOICE_DIR}.")
+                L.DEBUG(f"select_voice received query to use voice: {voice_name}. Found {item} inside {Dir.data.tts.voices}.")
                 return str(item)
 
         L.ERR(f"Voice file not found")
@@ -131,7 +131,7 @@ async def generate_speech(
     title: str = None,
     output_dir = None
 ) -> str:
-    output_dir = Path(output_dir) if output_dir else TTS_OUTPUT_DIR
+    output_dir = Path(output_dir) if output_dir else TTS.data.tts.outputs
     if not output_dir.exists():
         output_dir.mkdir(parents=True)
 
@@ -149,7 +149,7 @@ async def generate_speech(
         #    raise HTTPException(status_code=400, detail="Invalid model specified")
 
         if podcast == True:
-            podcast_path = Path(PODCAST_DIR) / audio_file_path.name
+            podcast_path = TTS.podcast_dir / audio_file_path.name
             L.DEBUG(f"Podcast path: {podcast_path}")
             shutil.copy(str(audio_file_path), str(podcast_path))
             bg_tasks.add_task(os.remove, str(audio_file_path))
@@ -196,7 +196,7 @@ async def determine_voice_id(voice_name: str) -> str:
 
     L.DEBUG(f"Requested voice not among the hardcoded options.. checking with 11L next.")
     url = "https://api.elevenlabs.io/v1/voices"
-    headers = {"xi-api-key": ELEVENLABS_API_KEY}
+    headers = {"xi-api-key": TTS.elevenlabs.api_key}
     async with httpx.AsyncClient() as client:
         try:
             response = await client.get(url, headers=headers)
@@ -222,10 +222,10 @@ async def elevenlabs_tts(model: str, input_text: str, voice: str, title: str = N
         "text": input_text,
         "model_id": model
     }
-    headers = {"Content-Type": "application/json", "xi-api-key": ELEVENLABS_API_KEY}
+    headers = {"Content-Type": "application/json", "xi-api-key": TTS.elevenlabs.api_key}
     async with httpx.AsyncClient(timeout=httpx.Timeout(300.0)) as client:  # 5 minutes timeout
         response = await client.post(url, json=payload, headers=headers)
-        output_dir = output_dir if output_dir else TTS_OUTPUT_DIR
+        output_dir = output_dir if output_dir else TTS.podcast_dir
         title = title if title else dt_datetime.now().strftime("%Y%m%d%H%M%S")
         filename = f"{sanitize_filename(title)}.mp3"
         file_path = Path(output_dir) / filename
@@ -236,9 +236,6 @@ async def elevenlabs_tts(model: str, input_text: str, voice: str, title: str = N
         else:
             raise HTTPException(status_code=response.status_code, detail="Error from ElevenLabs API")
 
-
-
-
 async def get_text_content(text: Optional[str], file: Optional[UploadFile]) -> str:
     if file:
         return (await file.read()).decode("utf-8").strip()
@@ -247,20 +244,17 @@ async def get_text_content(text: Optional[str], file: Optional[UploadFile]) -> s
     else:
         raise HTTPException(status_code=400, detail="No text provided")
 
-
-
 async def get_voice_file_path(voice: str = None, voice_file: UploadFile = None) -> str:
     if voice:
         L.DEBUG(f"Looking for voice: {voice}")
-        selected_voice = await select_voice(voice)
+        selected_voice = await select_voice(voice) 
         return selected_voice
     elif voice_file and isinstance(voice_file, UploadFile):
-        VOICE_DIR.mkdir(exist_ok=True)
-
+        Dir.data.tts.voices.mkdir(exist_ok=True)
         content = await voice_file.read()
         checksum = hashlib.md5(content).hexdigest()
 
-        existing_file = VOICE_DIR / voice_file.filename
+        existing_file = Dir.data.tts.voices / voice_file.filename
         if existing_file.is_file():
             with open(existing_file, 'rb') as f:
                 existing_checksum = hashlib.md5(f.read()).hexdigest()
@@ -272,7 +266,7 @@ async def get_voice_file_path(voice: str = None, voice_file: UploadFile = None)
         counter = 1
         new_file = existing_file
         while new_file.is_file():
-            new_file = VOICE_DIR / f"{base_name}{counter:02}.wav"
+            new_file = Dir.data.tts.voices / f"{base_name}{counter:02}.wav"
             counter += 1
 
         with open(new_file, 'wb') as f:
@@ -280,8 +274,8 @@ async def get_voice_file_path(voice: str = None, voice_file: UploadFile = None)
         return str(new_file)
     
     else:
-        L.DEBUG(f"{dt_datetime.now().strftime('%Y%m%d%H%M%S')}: No voice specified or file provided, using default voice: {DEFAULT_VOICE}")
-        selected_voice = await select_voice(DEFAULT_VOICE)
+        L.DEBUG(f"{dt_datetime.now().strftime('%Y%m%d%H%M%S')}: No voice specified or file provided, using default voice: {TTS.xtts.voice}")
+        selected_voice = await select_voice(TTS.xtts.voice)
         return selected_voice
 
 
@@ -302,7 +296,7 @@ async def local_tts(
         datetime_str = dt_datetime.now().strftime("%Y%m%d%H%M%S")
         title = sanitize_filename(title) if title else "Audio"
         filename = f"{datetime_str}_{title}.wav"
-        file_path = TTS_OUTPUT_DIR / filename
+        file_path = Dir.data.tts.outputs / filename
 
     # Ensure the parent directory exists
     file_path.parent.mkdir(parents=True, exist_ok=True)
@@ -310,14 +304,14 @@ async def local_tts(
     voice_file_path = await get_voice_file_path(voice, voice_file)
     
     # Initialize TTS model in a separate thread
-    XTTS = await asyncio.to_thread(TTS, model_name=MODEL_NAME)
+    XTTS = await asyncio.to_thread(XTTSv2, model_name=MODEL_NAME)
     await asyncio.to_thread(XTTS.to, DEVICE)
 
     segments = split_text(text_content)
     combined_audio = AudioSegment.silent(duration=0)
 
     for i, segment in enumerate(segments):
-        segment_file_path = TTS_SEGMENTS_DIR / f"segment_{i}.wav"
+        segment_file_path = Dir.data.tts.segments / f"segment_{i}.wav"
         L.DEBUG(f"Segment file path: {segment_file_path}")
         
         # Run TTS in a separate thread
@@ -340,7 +334,7 @@ async def local_tts(
 
     # Export the combined audio in a separate thread
     if podcast:
-        podcast_file_path = Path(PODCAST_DIR) / file_path.name
+        podcast_file_path = Path(TTS.podcast_dir) / file_path.name
         await asyncio.to_thread(combined_audio.export, podcast_file_path, format="wav")
     
     await asyncio.to_thread(combined_audio.export, file_path, format="wav")
@@ -368,7 +362,7 @@ async def stream_tts(text_content: str, speed: float, voice: str, voice_file) ->
 async def generate_tts(text: str, speed: float, voice_file_path: str) -> str:
     output_dir = tempfile.mktemp(suffix=".wav", dir=tempfile.gettempdir())
 
-    XTTS = TTS(model_name=MODEL_NAME).to(DEVICE)
+    XTTS = XTTSv2(model_name=MODEL_NAME).to(DEVICE)
     XTTS.tts_to_file(text=text, speed=speed, file_path=output_dir, speaker_wav=[voice_file_path], language="en")
 
     return output_dir
@@ -381,7 +375,7 @@ async def get_audio_stream(model: str, input_text: str, voice: str):
         "text": input_text,
         "model_id": "eleven_turbo_v2"
     }
-    headers = {"Content-Type": "application/json", "xi-api-key": ELEVENLABS_API_KEY}
+    headers = {"Content-Type": "application/json", "xi-api-key": TTS.elevenlabs.api_key}
     response = requests.post(url, json=payload, headers=headers)
 
     if response.status_code == 200:
@@ -434,7 +428,7 @@ def copy_to_podcast_dir(file_path):
         file_name = Path(file_path).name
         
         # Construct the destination path in the PODCAST_DIR
-        destination_path = Path(PODCAST_DIR) / file_name
+        destination_path = TTS.podcast_dir / file_name
         
         # Copy the file to the PODCAST_DIR
         shutil.copy(file_path, destination_path)