Auto-update: Sat Jun 29 11:58:22 PDT 2024

This commit is contained in:
sanj 2024-06-29 11:58:22 -07:00
parent 9b6e64540d
commit f6cbe5b3b7
7 changed files with 238 additions and 71 deletions

View file

@ -9,27 +9,22 @@ from dateutil import tz
from pathlib import Path from pathlib import Path
from pydantic import BaseModel from pydantic import BaseModel
from typing import List, Optional from typing import List, Optional
import traceback
import logging
from .logs import Logger from .logs import Logger
from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, Database, Geocoder from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, Database, Geocoder, APIConfig, Configuration
# from sijapi.config.config import load_config
# cfg = load_config()
### Initial initialization ### Initial initialization
BASE_DIR = Path(__file__).resolve().parent BASE_DIR = Path(__file__).resolve().parent
CONFIG_DIR = BASE_DIR / "config" CONFIG_DIR = BASE_DIR / "config"
ENV_PATH = CONFIG_DIR / ".env" ENV_PATH = CONFIG_DIR / ".env"
LOGS_DIR = BASE_DIR / "logs" LOGS_DIR = BASE_DIR / "logs"
# Create logger instance
L = Logger("Central", LOGS_DIR) L = Logger("Central", LOGS_DIR)
os.makedirs(LOGS_DIR, exist_ok=True) os.makedirs(LOGS_DIR, exist_ok=True)
load_dotenv(ENV_PATH) load_dotenv(ENV_PATH)
### API essentials ### API essentials
API_CONFIG_PATH = CONFIG_DIR / "api.yaml"
SECRETS_CONFIG_PATH = CONFIG_DIR / "secrets.yaml"
API = APIConfig.load_from_yaml(API_CONFIG_PATH, SECRETS_CONFIG_PATH)
DB = Database.from_env() DB = Database.from_env()
ROUTERS = os.getenv('ROUTERS', '').split(',') ROUTERS = os.getenv('ROUTERS', '').split(',')
PUBLIC_SERVICES = os.getenv('PUBLIC_SERVICES', '').split(',') PUBLIC_SERVICES = os.getenv('PUBLIC_SERVICES', '').split(',')

View file

@ -18,9 +18,8 @@ from dotenv import load_dotenv
from pathlib import Path from pathlib import Path
from datetime import datetime from datetime import datetime
import argparse import argparse
from . import L, LOGS_DIR, OBSIDIAN_VAULT_DIR from . import L, API, OBSIDIAN_VAULT_DIR
from .logs import Logger from .logs import Logger
from .utilities import list_and_correct_impermissible_files
parser = argparse.ArgumentParser(description='Personal API.') parser = argparse.ArgumentParser(description='Personal API.')
parser.add_argument('--debug', action='store_true', help='Set log level to L.INFO') parser.add_argument('--debug', action='store_true', help='Set log level to L.INFO')
@ -106,6 +105,7 @@ async def handle_exception_middleware(request: Request, call_next):
def load_router(router_name): def load_router(router_name):
router_file = ROUTER_DIR / f'{router_name}.py' router_file = ROUTER_DIR / f'{router_name}.py'
L.DEBUG(f"Attempting to load {router_name.capitalize()}...") L.DEBUG(f"Attempting to load {router_name.capitalize()}...")
@ -127,15 +127,15 @@ def main(argv):
else: else:
L.CRIT(f"sijapi launched") L.CRIT(f"sijapi launched")
L.CRIT(f"{args._get_args}") L.CRIT(f"{args._get_args}")
for router_name in ROUTERS: for module_name in API.MODULES.__fields__:
load_router(router_name) if getattr(API.MODULES, module_name):
load_router(module_name)
journal = OBSIDIAN_VAULT_DIR / "journal"
list_and_correct_impermissible_files(journal, rename=True)
config = Config() config = Config()
config.keep_alive_timeout = 1200 config.keep_alive_timeout = 1200
config.bind = [HOST] config.bind = [API.BIND]
asyncio.run(serve(api, config)) asyncio.run(serve(api, config))
if __name__ == "__main__": if __name__ == "__main__":
main(sys.argv[1:]) main(sys.argv[1:])

View file

@ -3,6 +3,8 @@ from typing import List, Optional, Any, Tuple, Dict, Union, Tuple
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
import asyncio import asyncio
import json import json
import os
import re
import yaml import yaml
import math import math
from timezonefinder import TimezoneFinder from timezonefinder import TimezoneFinder
@ -15,6 +17,160 @@ from concurrent.futures import ThreadPoolExecutor
import reverse_geocoder as rg import reverse_geocoder as rg
from timezonefinder import TimezoneFinder from timezonefinder import TimezoneFinder
from srtm import get_data from srtm import get_data
from pathlib import Path
import yaml
from typing import Union, List, TypeVar, Type
from pydantic import BaseModel, create_model
from pydantic import BaseModel, Field
from typing import List, Dict
import yaml
from pathlib import Path
import os
from dotenv import load_dotenv
T = TypeVar('T', bound='Configuration')
class ModulesConfig(BaseModel):
asr: bool = Field(alias="asr")
calendar: bool = Field(alias="calendar")
email: bool = Field(alias="email")
health: bool = Field(alias="health")
hooks: bool = Field(alias="hooks")
llm: bool = Field(alias="llm")
locate: bool = Field(alias="locate")
note: bool = Field(alias="note")
sd: bool = Field(alias="sd")
serve: bool = Field(alias="serve")
time: bool = Field(alias="time")
tts: bool = Field(alias="tts")
weather: bool = Field(alias="weather")
class APIConfig(BaseModel):
BIND: str
PORT: int
URL: str
PUBLIC: List[str]
TRUSTED_SUBNETS: List[str]
MODULES: ModulesConfig
BaseTZ: Optional[str] = 'UTC'
KEYS: List[str]
@classmethod
def load_from_yaml(cls, config_path: Path, secrets_path: Path):
# Load main configuration
with open(config_path, 'r') as file:
config_data = yaml.safe_load(file)
print(f"Loaded main config: {config_data}") # Debug print
# Load secrets
try:
with open(secrets_path, 'r') as file:
secrets_data = yaml.safe_load(file)
print(f"Loaded secrets: {secrets_data}") # Debug print
except FileNotFoundError:
print(f"Secrets file not found: {secrets_path}")
secrets_data = {}
except yaml.YAMLError as e:
print(f"Error parsing secrets YAML: {e}")
secrets_data = {}
# Handle KEYS placeholder
if isinstance(config_data.get('KEYS'), list) and len(config_data['KEYS']) == 1:
placeholder = config_data['KEYS'][0]
if placeholder.startswith('{{') and placeholder.endswith('}}'):
key = placeholder[2:-2].strip() # Remove {{ }} and whitespace
parts = key.split('.')
if len(parts) == 2 and parts[0] == 'SECRET':
secret_key = parts[1]
if secret_key in secrets_data:
config_data['KEYS'] = secrets_data[secret_key]
print(f"Replaced KEYS with secret: {config_data['KEYS']}") # Debug print
else:
print(f"Secret key '{secret_key}' not found in secrets file")
else:
print(f"Invalid secret placeholder format: {placeholder}")
# Convert 'on'/'off' to boolean for MODULES if they are strings
for key, value in config_data['MODULES'].items():
if isinstance(value, str):
config_data['MODULES'][key] = value.lower() == 'on'
elif isinstance(value, bool):
config_data['MODULES'][key] = value
else:
raise ValueError(f"Invalid value for module {key}: {value}. Must be 'on', 'off', True, or False.")
return cls(**config_data)
class Configuration(BaseModel):
@classmethod
def load_config(cls: Type[T], yaml_path: Union[str, Path]) -> Union[T, List[T]]:
yaml_path = Path(yaml_path)
with yaml_path.open('r') as file:
config_data = yaml.safe_load(file)
# Load environment variables
load_dotenv()
# Resolve placeholders
config_data = cls.resolve_placeholders(config_data)
if isinstance(config_data, list):
return [cls.create_dynamic_model(**cfg) for cfg in config_data]
elif isinstance(config_data, dict):
return cls.create_dynamic_model(**config_data)
else:
raise ValueError(f"Unsupported YAML structure in {yaml_path}")
@classmethod
def resolve_placeholders(cls, data):
if isinstance(data, dict):
return {k: cls.resolve_placeholders(v) for k, v in data.items()}
elif isinstance(data, list):
return [cls.resolve_placeholders(v) for v in data]
elif isinstance(data, str):
return cls.resolve_string_placeholders(data)
else:
return data
@classmethod
def resolve_string_placeholders(cls, value):
pattern = r'\{\{\s*([^}]+)\s*\}\}'
matches = re.findall(pattern, value)
for match in matches:
parts = match.split('.')
if len(parts) == 2:
category, key = parts
if category == 'DIR':
replacement = str(Path(os.getenv(key, '')))
elif category == 'SECRET':
replacement = os.getenv(key, '')
else:
replacement = os.getenv(match, '')
value = value.replace('{{' + match + '}}', replacement)
return value
@classmethod
def create_dynamic_model(cls, **data):
for key, value in data.items():
if isinstance(value, dict):
data[key] = cls.create_dynamic_model(**value)
DynamicModel = create_model(
f'Dynamic{cls.__name__}',
__base__=cls,
**{k: (type(v), v) for k, v in data.items()}
)
return DynamicModel(**data)
class Config:
extra = "allow"
arbitrary_types_allowed = True
class Location(BaseModel): class Location(BaseModel):
latitude: float latitude: float

View file

@ -0,0 +1,3 @@
GLOBAL_API_KEYS:
- sk-YOUR-FIRST-API-KEY
- sk-YOUR-SECOND-API-KEY

View file

@ -483,15 +483,15 @@ def update_prompt(workflow: dict, post: dict, positive: str, found_key=[None], p
for index, item in enumerate(value): for index, item in enumerate(value):
update_prompt(item, post, positive, found_key, current_path + [str(index)]) update_prompt(item, post, positive, found_key, current_path + [str(index)])
if value == "API_PPrompt": if value == "API_PrePrompt":
workflow[key] = post.get(value, "") + positive workflow[key] = post.get(value, "") + positive
L.DEBUG(f"Updated API_PPrompt to: {workflow[key]}") L.DEBUG(f"Updated API_PrePrompt to: {workflow[key]}")
elif value == "API_SPrompt": elif value == "API_StylePrompt":
workflow[key] = post.get(value, "") workflow[key] = post.get(value, "")
L.DEBUG(f"Updated API_SPrompt to: {workflow[key]}") L.DEBUG(f"Updated API_StylePrompt to: {workflow[key]}")
elif value == "API_NPrompt": elif value == "API_NegativePrompt":
workflow[key] = post.get(value, "") workflow[key] = post.get(value, "")
L.DEBUG(f"Updated API_NPrompt to: {workflow[key]}") L.DEBUG(f"Updated API_NegativePrompt to: {workflow[key]}")
elif key == "seed" or key == "noise_seed": elif key == "seed" or key == "noise_seed":
workflow[key] = random.randint(1000000000000, 9999999999999) workflow[key] = random.randint(1000000000000, 9999999999999)
L.DEBUG(f"Updated seed to: {workflow[key]}") L.DEBUG(f"Updated seed to: {workflow[key]}")
@ -507,7 +507,7 @@ def update_prompt(workflow: dict, post: dict, positive: str, found_key=[None], p
return found_key[0] return found_key[0]
def update_prompt_custom(workflow: dict, API_PPrompt: str, API_SPrompt: str, API_NPrompt: str, found_key=[None], path=None): def update_prompt_custom(workflow: dict, API_PrePrompt: str, API_StylePrompt: str, API_NegativePrompt: str, found_key=[None], path=None):
if path is None: if path is None:
path = [] path = []
@ -519,21 +519,21 @@ def update_prompt_custom(workflow: dict, API_PPrompt: str, API_SPrompt: str, API
if isinstance(value, dict): if isinstance(value, dict):
if value.get('class_type') == 'SaveImage' and value.get('inputs', {}).get('filename_prefix') == 'API_': if value.get('class_type') == 'SaveImage' and value.get('inputs', {}).get('filename_prefix') == 'API_':
found_key[0] = key found_key[0] = key
update_prompt(value, API_PPrompt, API_SPrompt, API_NPrompt, found_key, current_path) update_prompt(value, API_PrePrompt, API_StylePrompt, API_NegativePrompt, found_key, current_path)
elif isinstance(value, list): elif isinstance(value, list):
# Recursive call with updated path for each item in a list # Recursive call with updated path for each item in a list
for index, item in enumerate(value): for index, item in enumerate(value):
update_prompt(item, API_PPrompt, API_SPrompt, API_NPrompt, found_key, current_path + [str(index)]) update_prompt(item, API_PrePrompt, API_StylePrompt, API_NegativePrompt, found_key, current_path + [str(index)])
if value == "API_PPrompt": if value == "API_PrePrompt":
workflow[key] = API_PPrompt workflow[key] = API_PrePrompt
L.DEBUG(f"Updated API_PPrompt to: {workflow[key]}") L.DEBUG(f"Updated API_PrePrompt to: {workflow[key]}")
elif value == "API_SPrompt": elif value == "API_StylePrompt":
workflow[key] = API_SPrompt workflow[key] = API_StylePrompt
L.DEBUG(f"Updated API_SPrompt to: {workflow[key]}") L.DEBUG(f"Updated API_StylePrompt to: {workflow[key]}")
elif value == "API_NPrompt": elif value == "API_NegativePrompt":
workflow[key] = API_NPrompt workflow[key] = API_NegativePrompt
L.DEBUG(f"Updated API_NPrompt to: {workflow[key]}") L.DEBUG(f"Updated API_NegativePrompt to: {workflow[key]}")
elif key == "seed" or key == "noise_seed": elif key == "seed" or key == "noise_seed":
workflow[key] = random.randint(1000000000000, 9999999999999) workflow[key] = random.randint(1000000000000, 9999999999999)
L.DEBUG(f"Updated seed to: {workflow[key]}") L.DEBUG(f"Updated seed to: {workflow[key]}")
@ -682,9 +682,9 @@ def handle_custom_image(custom_post: str):
else: else:
workflow_name = args.workflow if args.workflow else "selfie" workflow_name = args.workflow if args.workflow else "selfie"
post = { post = {
"API_PPrompt": "", "API_PrePrompt": "",
"API_SPrompt": "; (((masterpiece))); (beautiful lighting:1), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.", "API_StylePrompt": "; (((masterpiece))); (beautiful lighting:1), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
"API_NPrompt": "canvas frame, 3d, ((bad art)), illustrated, deformed, blurry, duplicate, bad art, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3", "API_NegativePrompt": "canvas frame, 3d, ((bad art)), illustrated, deformed, blurry, duplicate, bad art, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3",
"Vision_Prompt": "Write an upbeat Instagram description with emojis to accompany this selfie!", "Vision_Prompt": "Write an upbeat Instagram description with emojis to accompany this selfie!",
"frequency": 2, "frequency": 2,
"ghost_tags": [ "ghost_tags": [

View file

@ -32,12 +32,43 @@ from pathlib import Path
from fastapi import APIRouter, Query, HTTPException from fastapi import APIRouter, Query, HTTPException
from sijapi import L, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, ARCHIVE_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO from sijapi import L, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, ARCHIVE_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, GEO
from sijapi.routers import cal, loc, tts, llm, time, sd, weather, asr from sijapi.routers import cal, loc, tts, llm, time, sd, weather, asr
from sijapi.utilities import assemble_journal_path, assemble_archive_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, HOURLY_COLUMNS_MAPPING from sijapi.utilities import assemble_journal_path, assemble_archive_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
from sijapi.classes import Location from sijapi.classes import Location
note = APIRouter() note = APIRouter()
def list_and_correct_impermissible_files(root_dir, rename: bool = False):
"""List and correct all files with impermissible names."""
impermissible_files = []
for dirpath, _, filenames in os.walk(root_dir):
for filename in filenames:
if check_file_name(filename):
file_path = Path(dirpath) / filename
impermissible_files.append(file_path)
L.DEBUG(f"Impermissible file found: {file_path}")
# Sanitize the file name
new_filename = sanitize_filename(filename)
new_file_path = Path(dirpath) / new_filename
# Ensure the new file name does not already exist
if new_file_path.exists():
counter = 1
base_name, ext = os.path.splitext(new_filename)
while new_file_path.exists():
new_filename = f"{base_name}_{counter}{ext}"
new_file_path = Path(dirpath) / new_filename
counter += 1
# Rename the file
if rename:
os.rename(file_path, new_file_path)
L.DEBUG(f"Renamed: {file_path} -> {new_file_path}")
return impermissible_files
journal = OBSIDIAN_VAULT_DIR / "journal"
list_and_correct_impermissible_files(journal, rename=True)
### Daily Note Builder ### ### Daily Note Builder ###

View file

@ -6,6 +6,7 @@ import io
from io import BytesIO from io import BytesIO
import base64 import base64
import math import math
import paramiko
from dateutil import parser from dateutil import parser
from pathlib import Path from pathlib import Path
import filetype import filetype
@ -21,7 +22,6 @@ import pandas as pd
from scipy.spatial import cKDTree from scipy.spatial import cKDTree
from dateutil.parser import parse as dateutil_parse from dateutil.parser import parse as dateutil_parse
from docx import Document from docx import Document
import asyncpg
from sshtunnel import SSHTunnelForwarder from sshtunnel import SSHTunnelForwarder
from fastapi import Depends, HTTPException, Request, UploadFile from fastapi import Depends, HTTPException, Request, UploadFile
from fastapi.security.api_key import APIKeyHeader from fastapi.security.api_key import APIKeyHeader
@ -192,37 +192,6 @@ def check_file_name(file_name, max_length=255):
return needs_sanitization return needs_sanitization
def list_and_correct_impermissible_files(root_dir, rename: bool = False):
"""List and correct all files with impermissible names."""
impermissible_files = []
for dirpath, _, filenames in os.walk(root_dir):
for filename in filenames:
if check_file_name(filename):
file_path = Path(dirpath) / filename
impermissible_files.append(file_path)
L.DEBUG(f"Impermissible file found: {file_path}")
# Sanitize the file name
new_filename = sanitize_filename(filename)
new_file_path = Path(dirpath) / new_filename
# Ensure the new file name does not already exist
if new_file_path.exists():
counter = 1
base_name, ext = os.path.splitext(new_filename)
while new_file_path.exists():
new_filename = f"{base_name}_{counter}{ext}"
new_file_path = Path(dirpath) / new_filename
counter += 1
# Rename the file
if rename:
os.rename(file_path, new_file_path)
L.DEBUG(f"Renamed: {file_path} -> {new_file_path}")
return impermissible_files
def bool_convert(value: str = Form(None)): def bool_convert(value: str = Form(None)):
return value.lower() in ["true", "1", "t", "y", "yes"] return value.lower() in ["true", "1", "t", "y", "yes"]
@ -473,3 +442,16 @@ def load_geonames_data(path: str):
return data return data
async def run_ssh_command(server, command):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(server.ssh.host, username=server.ssh.user, password=server.ssh.password)
stdin, stdout, stderr = ssh.exec_command(command)
output = stdout.read().decode()
error = stderr.read().decode()
ssh.close()
return output, error
except Exception as e:
L.ERR(f"SSH command failed for server {server.id}: {str(e)}")
raise