Compare commits
No commits in common. "main" and "3df57462e25bed62704d5321071577324c30f91b" have entirely different histories.
main
...
3df57462e2
148 changed files with 9789 additions and 144250 deletions
72
.gitignore
vendored
72
.gitignore
vendored
|
@ -1,72 +0,0 @@
|
||||||
# Ignore specific data files and directories
|
|
||||||
sijapi/data/calendar.ics
|
|
||||||
sijapi/data/asr/
|
|
||||||
sijapi/data/courtlistener/
|
|
||||||
sijapi/data/db/
|
|
||||||
sijapi/data/geocoder/
|
|
||||||
sijapi/data/geonames.txt
|
|
||||||
sijapi/data/img/images/
|
|
||||||
sijapi/data/timing/
|
|
||||||
sijapi/data/tts/
|
|
||||||
sijapi/data/img/workflows/private
|
|
||||||
sijapi/data/*.pbf
|
|
||||||
sijapi/data/*.json
|
|
||||||
sijapi/data/*.geojson
|
|
||||||
sijapi/config/*.yaml
|
|
||||||
sijapi/config/ms365/
|
|
||||||
sijapi/data/ms365/
|
|
||||||
sijapi/local_only/
|
|
||||||
sijapi/testbed/
|
|
||||||
khoj/
|
|
||||||
r2r/
|
|
||||||
podcast/sideloads/*
|
|
||||||
|
|
||||||
|
|
||||||
**/.env
|
|
||||||
**/.config.yaml
|
|
||||||
**/*.log
|
|
||||||
**/logs/
|
|
||||||
**/__pycache__
|
|
||||||
**/.DS_Store
|
|
||||||
**/*.ics
|
|
||||||
**/*.sqlite
|
|
||||||
**/*.geojson
|
|
||||||
**/private/
|
|
||||||
**/*sync-conflict*.*
|
|
||||||
**/*.db
|
|
||||||
**/*.mp3
|
|
||||||
**/*.mp4
|
|
||||||
**/*.wav
|
|
||||||
**/*.pyc
|
|
||||||
**/.ipynb_checkpoints/
|
|
||||||
**/*.pem
|
|
||||||
**/*.key
|
|
||||||
**/*.sql
|
|
||||||
venv/
|
|
||||||
env/
|
|
||||||
.venv/
|
|
||||||
.vscode/
|
|
||||||
.nova/
|
|
||||||
.idea/
|
|
||||||
*~
|
|
||||||
*.swp
|
|
||||||
*.swo
|
|
||||||
*.com
|
|
||||||
*.class
|
|
||||||
*.dll
|
|
||||||
*.exe
|
|
||||||
*.o
|
|
||||||
*.so
|
|
||||||
*.7z
|
|
||||||
*.dmg
|
|
||||||
*.gz
|
|
||||||
*.iso
|
|
||||||
*.jar
|
|
||||||
*.key
|
|
||||||
*.pem
|
|
||||||
*.rar
|
|
||||||
*.tar
|
|
||||||
*.zip
|
|
||||||
ehthumbs.db
|
|
||||||
Thumbs.db
|
|
||||||
sijapi/testbed/
|
|
3
.gitmodules
vendored
3
.gitmodules
vendored
|
@ -1,3 +0,0 @@
|
||||||
[submodule "r2r"]
|
|
||||||
path = r2r
|
|
||||||
url = https://github.com/SciPhi-AI/R2R.git
|
|
|
@ -1,3 +0,0 @@
|
||||||
This is designed to work with UserScripts and similar browser extensions. Fill in the domain/URL where your sijapi instance is exposed (http://localhost:4444 is fine for the same device, but consider using a reverse proxy to extend to your mobile devices).
|
|
||||||
|
|
||||||
And fill in your GLOBAL_API_KEY that you chose when configuring sijapi.
|
|
|
@ -1,119 +0,0 @@
|
||||||
// ==UserScript==
|
|
||||||
// @name Archivist
|
|
||||||
// @version 0.5
|
|
||||||
// @description archivist userscript posts to sij.ai/archive
|
|
||||||
// @author sij
|
|
||||||
// @match *://*/*
|
|
||||||
// @grant GM_xmlhttpRequest
|
|
||||||
// ==/UserScript==
|
|
||||||
|
|
||||||
(function () {
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
// Function to check if the URL is likely an ad, tracker, or unwanted resource
|
|
||||||
function isUnwantedURL(url) {
|
|
||||||
const unwantedPatterns = [
|
|
||||||
/doubleclick\.net/,
|
|
||||||
/googlesyndication\.com/,
|
|
||||||
/adservice\./,
|
|
||||||
/analytics\./,
|
|
||||||
/tracker\./,
|
|
||||||
/pixel\./,
|
|
||||||
/ad\d*\./,
|
|
||||||
/\.ad\./,
|
|
||||||
/ads\./,
|
|
||||||
/\/ads\//,
|
|
||||||
/url=http/,
|
|
||||||
/safeframe/,
|
|
||||||
/adsystem/,
|
|
||||||
/adserver/,
|
|
||||||
/adnetwork/,
|
|
||||||
/sync\./,
|
|
||||||
/beacon\./,
|
|
||||||
/optimizely/,
|
|
||||||
/outbrain/,
|
|
||||||
/widgets\./,
|
|
||||||
/cdn\./,
|
|
||||||
/pixel\?/,
|
|
||||||
/recaptcha/,
|
|
||||||
/accounts\.google\.com\/gsi/,
|
|
||||||
/imasdk\.googleapis\.com/,
|
|
||||||
/amplify-imp/,
|
|
||||||
/zemanta/,
|
|
||||||
/monitor\.html/,
|
|
||||||
/widgetMonitor/,
|
|
||||||
/nanoWidget/,
|
|
||||||
/client_storage/,
|
|
||||||
];
|
|
||||||
return unwantedPatterns.some((pattern) => pattern.test(url));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to archive the page
|
|
||||||
function archivePage() {
|
|
||||||
var currentUrl = window.location.href;
|
|
||||||
|
|
||||||
if (isUnwantedURL(currentUrl)) {
|
|
||||||
console.log("Skipping unwanted URL:", currentUrl);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
var data = new URLSearchParams({
|
|
||||||
title: document.title,
|
|
||||||
url: currentUrl,
|
|
||||||
referrer: document.referrer || "",
|
|
||||||
width: window.innerWidth ? window.innerWidth.toString() : "",
|
|
||||||
encoding: document.characterSet,
|
|
||||||
source: document.documentElement.outerHTML,
|
|
||||||
});
|
|
||||||
|
|
||||||
GM_xmlhttpRequest({
|
|
||||||
method: "POST",
|
|
||||||
url: "https://api.sij.ai/archive?api_key=sk-NhrtQwCHNdK5sRZC",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/x-www-form-urlencoded",
|
|
||||||
Authorization: "bearer sk-NhrtQwCHNdK5sRZC",
|
|
||||||
},
|
|
||||||
data: data.toString(),
|
|
||||||
onload: function (response) {
|
|
||||||
console.log("Archive request sent for:", currentUrl);
|
|
||||||
},
|
|
||||||
onerror: function (error) {
|
|
||||||
console.error("Error sending archive request:", error);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Debounce function to limit how often archivePage can be called
|
|
||||||
function debounce(func, wait) {
|
|
||||||
let timeout;
|
|
||||||
return function executedFunction(...args) {
|
|
||||||
const later = () => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
func(...args);
|
|
||||||
};
|
|
||||||
clearTimeout(timeout);
|
|
||||||
timeout = setTimeout(later, wait);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Debounced version of archivePage
|
|
||||||
const debouncedArchivePage = debounce(archivePage, 2000);
|
|
||||||
|
|
||||||
// Listen for navigation events
|
|
||||||
window.addEventListener("popstate", debouncedArchivePage);
|
|
||||||
|
|
||||||
// Intercept link clicks
|
|
||||||
document.addEventListener(
|
|
||||||
"click",
|
|
||||||
function (e) {
|
|
||||||
var link = e.target.closest("a");
|
|
||||||
if (link && !isUnwantedURL(link.href)) {
|
|
||||||
setTimeout(debouncedArchivePage, 1000); // Delay to allow page to load
|
|
||||||
}
|
|
||||||
},
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
// Initial page load
|
|
||||||
setTimeout(archivePage, 5000);
|
|
||||||
})();
|
|
|
@ -1,218 +0,0 @@
|
||||||
{
|
|
||||||
log {
|
|
||||||
# Specify path and log level for Caddy logs
|
|
||||||
output file /var/log/caddy/logfile.log
|
|
||||||
level INFO
|
|
||||||
}
|
|
||||||
|
|
||||||
# replace `localhost` with an externally accessible IP address, e.g. a local LAN address or Tailscale IP. Take care not to use a publicly accessible IP address, as the Caddy API is not separately protected by API keys!
|
|
||||||
admin localhost:2019
|
|
||||||
|
|
||||||
servers {
|
|
||||||
metrics
|
|
||||||
}
|
|
||||||
|
|
||||||
# Replace with your email address for SSL certificate registration
|
|
||||||
email info@example.com
|
|
||||||
}
|
|
||||||
|
|
||||||
# This is a highly permissive CORS config. Dial it back as your use case allows.
|
|
||||||
(cors) {
|
|
||||||
@cors_preflight method OPTIONS
|
|
||||||
header {
|
|
||||||
Access-Control-Allow-Origin "*"
|
|
||||||
Access-Control-Expose-Headers "Authorization"
|
|
||||||
Access-Control-Allow-Credentials "true"
|
|
||||||
Access-Control-Allow-Headers "Authorization, Content-Type"
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @cors_preflight {
|
|
||||||
header {
|
|
||||||
Access-Control-Allow-Methods "GET, POST, PUT, PATCH, DELETE"
|
|
||||||
Access-Control-Max-Age "3600"
|
|
||||||
}
|
|
||||||
respond "" 204
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Replace with the subdomain you want to expose your API over
|
|
||||||
api.example.com {
|
|
||||||
import cors
|
|
||||||
|
|
||||||
# Specify which endpoints do not require an API key
|
|
||||||
@public {
|
|
||||||
path /img/* /oauth /oauth/* /MS365 /MS365/* /ip /health /health* /health/* /id /identity
|
|
||||||
}
|
|
||||||
|
|
||||||
# Accept your GLOBAL_API_KEY (specified via environment variable in Caddy's context) via `Authorization: Bearer` header
|
|
||||||
@apiKeyAuthHeader {
|
|
||||||
header Authorization "Bearer {env.GLOBAL_API_KEY}"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Optionally, accept your GLOBAL_API_KEY via query parameters
|
|
||||||
@apiKeyAuthQuery {
|
|
||||||
query api_key={env.GLOBAL_API_KEY}
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @public {
|
|
||||||
reverse_proxy {
|
|
||||||
# Specify the local (or Tailscale) IPs & ports where the API service is running
|
|
||||||
to 100.64.64.20:4444 100.64.64.11:4444 10.13.37.30:4444 localhost:4444
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
header_up X-Forwarded-For {remote}
|
|
||||||
header_up X-Forwarded-Proto {scheme}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @apiKeyAuthHeader {
|
|
||||||
reverse_proxy {
|
|
||||||
# Specify the local (or Tailscale) IPs & ports where the API service is running
|
|
||||||
to 100.64.64.20:4444 100.64.64.11:4444 10.13.37.30:4444 localhost:4444
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @apiKeyAuthQuery {
|
|
||||||
reverse_proxy {
|
|
||||||
# Specify the local (or Tailscale) IPs & ports where the API service is running
|
|
||||||
to 100.64.64.20:4444 100.64.64.11:4444 10.13.37.30:4444 localhost:4444
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handle {
|
|
||||||
respond "Unauthorized: Valid API key required" 401
|
|
||||||
}
|
|
||||||
|
|
||||||
# Assuming you use Cloudflare for DNS challenges and have configured a CLOUDFLARE_API_TOKEN environmental variable in Caddy's context
|
|
||||||
tls {
|
|
||||||
dns cloudflare {env.CLOUDFLARE_API_TOKEN}
|
|
||||||
}
|
|
||||||
|
|
||||||
log {
|
|
||||||
output file /var/log/caddy/sijapi.log {
|
|
||||||
roll_size 100mb
|
|
||||||
roll_keep 5
|
|
||||||
roll_keep_for 720h
|
|
||||||
}
|
|
||||||
format json {
|
|
||||||
time_format "iso8601"
|
|
||||||
message_key "message"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Everything below here is ancillary to the primary API functionality
|
|
||||||
# If you have another domain you want to expose a particular endpoint on, try something like this -- e.g., here, https://sij.law/pgp as a short URL to share my public PGP key via.
|
|
||||||
sij.law {
|
|
||||||
reverse_proxy /pgp 100.64.64.20:4444 100.64.64.30:4444 100.64.64.11:4444 localhost:4444 {
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
|
|
||||||
# Because I maintain a seperate service on this domain (a Ghost blog), I need fall back handling for everything besides `/pgp`.
|
|
||||||
reverse_proxy localhost:2368
|
|
||||||
tls {
|
|
||||||
dns cloudflare {env.CLOUDFLARE_API_TOKEN}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Another special use case example: this provides handling for my URL shortener.
|
|
||||||
sij.ai {
|
|
||||||
|
|
||||||
# Any three-character alphanumeric URI is construed as a shortened URL.
|
|
||||||
@shorturl {
|
|
||||||
path_regexp ^/[a-zA-Z0-9]{3}$
|
|
||||||
}
|
|
||||||
|
|
||||||
# https://sij.ai/s points to the WebUI for my URL shortener
|
|
||||||
@shortener_ui {
|
|
||||||
path /s
|
|
||||||
}
|
|
||||||
|
|
||||||
@apiKeyAuthHeader {
|
|
||||||
header Authorization "Bearer {env.GLOBAL_API_KEY}"
|
|
||||||
}
|
|
||||||
|
|
||||||
@apiKeyAuthQuery {
|
|
||||||
query api_key={env.GLOBAL_API_KEY}
|
|
||||||
}
|
|
||||||
|
|
||||||
@analytics {
|
|
||||||
path_regexp ^/analytics/[a-zA-Z0-9]{3}$
|
|
||||||
}
|
|
||||||
|
|
||||||
@pgp {
|
|
||||||
path /pgp
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @shortener_ui {
|
|
||||||
reverse_proxy 100.64.64.20:4444 100.64.64.30:4444 100.64.64.11:4444 localhost:4444 {
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @shorturl {
|
|
||||||
reverse_proxy 100.64.64.20:4444 100.64.64.30:4444 100.64.64.11:4444 localhost:4444 {
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handle @analytics {
|
|
||||||
reverse_proxy 100.64.64.20:4444 100.64.64.30:4444 100.64.64.11:4444 localhost:4444 {
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Handling for my public PGP key endpoint
|
|
||||||
handle @pgp {
|
|
||||||
reverse_proxy 100.64.64.20:4444 100.64.64.30:4444 100.64.64.11:4444 localhost:4444 {
|
|
||||||
lb_policy first
|
|
||||||
health_uri /health
|
|
||||||
health_interval 10s
|
|
||||||
health_timeout 5s
|
|
||||||
health_status 2xx
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Base domain redirects to my Ghost blog
|
|
||||||
handle / {
|
|
||||||
redir https://sij.law permanent
|
|
||||||
}
|
|
||||||
|
|
||||||
# All URIs that don't fit the patterns above redirect to the equivalent URI on my Ghost blog domain
|
|
||||||
handle /* {
|
|
||||||
redir https://sij.law{uri} permanent
|
|
||||||
}
|
|
||||||
|
|
||||||
tls {
|
|
||||||
dns cloudflare {env.CLOUDFLARE_API_TOKEN}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
This is a sample Caddyfile for a load-balancing reverse-proxy setup with HTTPS, Cloudflare DNS challenge handling, API key handling (and specified endpoints exempt from key requirement), and a second domain with special handling for certain endpoints (e.g. /s for the URL shortener, /pgp for public PGP key)
|
|
|
@ -1,63 +0,0 @@
|
||||||
import location
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
def get_current_location():
|
|
||||||
location.start_updates()
|
|
||||||
time.sleep(1) # Give it a moment to get an accurate fix
|
|
||||||
current_location = location.get_location()
|
|
||||||
location.stop_updates()
|
|
||||||
|
|
||||||
elevation = current_location['altitude']
|
|
||||||
latitude = current_location['latitude']
|
|
||||||
longitude = current_location['longitude']
|
|
||||||
current_time = datetime.now(timezone.utc)
|
|
||||||
timestamp = current_time.isoformat()
|
|
||||||
|
|
||||||
return {
|
|
||||||
'latitude': latitude,
|
|
||||||
'longitude': longitude,
|
|
||||||
'elevation': elevation,
|
|
||||||
'datetime': timestamp
|
|
||||||
}
|
|
||||||
|
|
||||||
def save_location_data(data, context, filename='location_log.json'):
|
|
||||||
if os.path.exists(filename):
|
|
||||||
with open(filename, 'r') as f:
|
|
||||||
existing_data = json.load(f)
|
|
||||||
else:
|
|
||||||
existing_data = []
|
|
||||||
|
|
||||||
data['context'] = context
|
|
||||||
existing_data.append(data)
|
|
||||||
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
json.dump(existing_data, f, indent=4)
|
|
||||||
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
try:
|
|
||||||
context = json.loads(sys.argv[1])
|
|
||||||
context.setdefault('action', 'manual')
|
|
||||||
context.setdefault('device_type', 'Pythonista')
|
|
||||||
context.setdefault('device_model', None)
|
|
||||||
context.setdefault('device_name', None)
|
|
||||||
context.setdefault('device_os', None)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
print("Error: The provided argument is not a valid JSON.")
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
context = {
|
|
||||||
'action': 'manual',
|
|
||||||
'device_type': 'Pythonista',
|
|
||||||
'device_model': None,
|
|
||||||
'device_name': None,
|
|
||||||
'device_os': None
|
|
||||||
}
|
|
||||||
|
|
||||||
location_data = get_current_location()
|
|
||||||
save_location_data(location_data, context)
|
|
||||||
print(f"Location data: {location_data} with context '{context}' saved locally.")
|
|
||||||
time.sleep(5)
|
|
|
@ -1 +0,0 @@
|
||||||
These two companion scripts are designed for use with Pythonista on iOS devices. GPS.py records and saves your latitude and longitude to a local file, and uploadGPS.py uploads it to your sijapi instance (assuming you've entered a valid URL and matching API key for your GLOBAL_API_KEY entered when configuring sijapi.
|
|
|
@ -1 +0,0 @@
|
||||||
how
|
|
528
README.md
528
README.md
|
@ -1,51 +1,481 @@
|
||||||
```
|
```
|
||||||
.x+=:. . .
|
#──────────────────────────────────────────────────────────────────────────────────
|
||||||
z` ^% @88> .. @88>
|
# C O N F I G U R A T I O N F I L E
|
||||||
. <k %8P 888> .d`` %8P
|
#──────────────────────────────────────────────────────────────────────────────────
|
||||||
.@8Ned8" . "8P u @8Ne. .u .
|
#
|
||||||
.@^%8888" .@88u . us888u. %8888:u@88N .@88u
|
# Hi friend! You've found my hidden .config.YAML-example file. Do you like
|
||||||
x88: `)8b. ''888E` u888u. .@88 "8888" `888I 888. ''888E`
|
# old-school ASCII art? I bet you do. So listen, this'll be your method for
|
||||||
8888N=*8888 888E `'888E 9888 9888 888I 888I 888E
|
# configuring sijapi, and nothing works until you at least:
|
||||||
%8" R88 888E 888E 9888 9888 888I 888I 888E
|
#
|
||||||
@8Wou 9% 888E 888E 9888 9888 uW888L 888' 888E
|
# (1) fill in the ESSENTIALS category, and
|
||||||
.888888P` 888& 888E 9888 9888 '*88888Nu88P 888&
|
#
|
||||||
` ^"F R888" 888E "888*""888"~ '88888F` R888"
|
# (2) rename this file `.config.yaml`
|
||||||
"" 888E ^Y" ^Y' 888 ^ ""
|
#
|
||||||
888E *8E
|
# ... and even then, certain features will not work until you set other
|
||||||
888P '8>
|
# relevant variables below.
|
||||||
.J88" "
|
#
|
||||||
|
# So get yourself a beverage, put on some sick beats, and settle in for a vibe-y
|
||||||
|
# configuration sesh. Remember to read my detailed notes if you ever feel lost,
|
||||||
|
# and most important, remember:
|
||||||
|
#
|
||||||
|
# you are NOT alone,
|
||||||
|
# I love you SO much,
|
||||||
|
# and you are SO worthy. <3
|
||||||
|
#
|
||||||
|
# y o u r b f & b f 4 e , †
|
||||||
|
# .x+=:. . .
|
||||||
|
# z` ^% @88> .. @88>
|
||||||
|
# . <k %8P 888> .d`` %8P
|
||||||
|
# .@8Ned8" . "8P u @8Ne. .u .
|
||||||
|
# .@^%8888" .@88u . us888u. %8888:u@88N .@88u
|
||||||
|
# x88: `)8b. ''888E` u888u. .@88 "8888" `888I 888. ''888E`
|
||||||
|
# ~ 8888N=*8888 888E `'888E 9888 9888 888I 888I 888E
|
||||||
|
# %8" R88 888E 888E 9888 9888 888I 888I 888E
|
||||||
|
# @8Wou 9% 888E 888E 9888 9888 uW888L 888' 888E
|
||||||
|
# .888888P` 888& 888E 9888 9888 '*88888Nu88P 888&
|
||||||
|
# ` ^"F R888" 888E "888*""888" ~ '88888F` R888"
|
||||||
|
# "" 888E ^Y" ^Y' 888 ^ ""
|
||||||
|
# 888E *8E
|
||||||
|
# 888P '8> † biggest fan
|
||||||
|
# .J88" " " and best
|
||||||
|
# friend 4 e v e r
|
||||||
|
#
|
||||||
|
# B U T I H E A R Y O U :
|
||||||
|
# L E T ' S T A K E I T S L O W A N D
|
||||||
|
# ────────────── S T A R T W I T H T H E ──────────────
|
||||||
|
#
|
||||||
|
# ███████╗███████╗███████╗███████╗███╗ ██╗████████╗██╗ █████╗ ██╗ ███████╗
|
||||||
|
# ██╔════╝██╔════╝██╔════╝██╔════╝████╗ ██║╚══██╔══╝██║██╔══██╗██║ ██╔════╝
|
||||||
|
# █████╗ ███████╗███████╗█████╗ ██╔██╗ ██║ ██║ ██║███████║██║ ███████╗
|
||||||
|
# ██╔══╝ ╚════██║╚════██║██╔══╝ ██║╚██╗██║ ██║ ██║██╔══██║██║ ╚════██║
|
||||||
|
# ███████╗███████║███████║███████╗██║ ╚████║ ██║ ██║██║ ██║███████╗███████║
|
||||||
|
# ╚══════╝╚══════╝╚══════╝╚══════╝╚═╝ ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝
|
||||||
|
# ─────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
#─── first, bind an ip address and port : ──────────────────────────────────────────
|
||||||
|
HOST_NET=0.0.0.0
|
||||||
|
HOST_PORT=4444
|
||||||
|
BASE_URL=https://api.sij.ai
|
||||||
|
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
───────────────── L E T ' S G E T S I L L Y , ────────────────────
|
# HOST_NET† and HOST_PORT comprise HOST and determine the ip and port the server binds to.
|
||||||
|
# BASE_URL is used to assemble URLs, e.g. in the MS authentication flow and for serving images generated on the sd router.
|
||||||
T H E N G O B͎̝̪̼͉͜ O͖͕͇͚͉̼ N̢̦͖̺͔͎ K̠͓̠͖͜ E̝̼̫̙͔̞ R̡͇͖̙͉͎ S̡͉̠͎͙̪
|
# BASE_URL should match the base URL used to access sijapi sans endpoint, e.g. http://localhost:4444 or https://api.sij.ai
|
||||||
|
#
|
||||||
W I T H O U R O W N
|
# † Take care here! Please ensure you understand the implications of setting HOST_NET to anything besides 127.0.0.1, and configure your firewall and router appropriately if you do. Setting HOST_NET to 0.0.0.0, for instance, opens sijapi to any device the server running it is accessible to — including potentially frightening internet randos (depending how your firewall, router, and NAT are configured).
|
||||||
|
#
|
||||||
░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓██████▒▓██████▒░
|
# Here are a few options to consider to more securely enable access from
|
||||||
░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
# other devices:
|
||||||
░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
#
|
||||||
░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
# (1) if all access can occur over Tailscale, either:
|
||||||
░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
# (a) leave HOST_NET set to 127.0.0.1, run `tailscale cert $(tailscale
|
||||||
░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
# whois $(tailscale ip | head -n 1) | awk '/Name:/ {print $2}')
|
||||||
░▒▓████████▓▒ ░▒▓████████▓▒ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
# if you haven't already issued yourself a TLS certificate on
|
||||||
|
# Tailscale, and then run `tailscale serve --bg --https=4443
|
||||||
|
# 4444` to expose sijapi to your other tailscale-enabled devices
|
||||||
( F O R R E A L T H O U G H , T H E S E A R E
|
# at `https://{device.magicdns-domain.net:4443`}; or
|
||||||
|
# (b) set HOST_NET to your server's Tailscale IP (this should work
|
||||||
────────────────────────────────────────────── S̝͖̦͓̪̻ O̡͖̘̫͇̟ H̢͔͔̫͉͜ O̢̢͉̞͍̘ T̟͍͍̪̦̞ R N )───────
|
# but for me doesn't reliably)
|
||||||
|
#
|
||||||
```
|
# (2) if WAN access truly is required, leave HOST_NET set to 127.0.0.1 and
|
||||||
|
# configure either:
|
||||||
## Overview
|
# (a) a Cloudflare tunnel, or
|
||||||
|
# (b) a reverse proxy with HTTPS (Caddy is excellent for this).
|
||||||
IN DRAFT
|
#
|
||||||
|
# And please be sure to set a strong API key either way but especially for (2).
|
||||||
## Installation
|
# ──────────
|
||||||
|
#
|
||||||
IN DRAFT
|
#──── configure API key authorization and select exemptions────────────────────────
|
||||||
|
GLOBAL_API_KEY=sk-NhrtQwCHNdK5sRZC
|
||||||
## Extras
|
PUBLIC_SERVICES=/id,/ip,/health,/img/,/cl/dockets,/cl/search,/cd/alert
|
||||||
|
TRUSTED_SUBNETS=127.0.0.1/32,10.13.37.0/24,100.64.64.0/24
|
||||||
[Apple Shortcut for location tracking Pythonista script](https://www.icloud.com/shortcuts/d63b179f9c664b0cbbacb1a607767ef7)
|
#─── notes: ──────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# GLOBAL_API_KEY determines the API key that will be required to access all endpoints, except access to PUBLIC_SERVICES or from TRUSTED_SUBNETS. Authentication is made via an `Authorization: Bearer {GLOBAL_API_KEY}` header.
|
||||||
|
# TRUSTED_SUBNETS might commonly include 127.0.0.1/32 (localhost), 100.x.x.0/24 (Tailscale tailnet), and/or 192.168.x.0/24 or 10.x.x.0/24 (local network).
|
||||||
|
# When configuring a reverse proxy or Cloudflare tunnel, please verify traffic through it does not appear to sijapi (i.e. in ./logs) as though it were coming from any of the subnets specified here. For sij, using Caddy, it does not, but your setup may differ.
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#─── router selection: ────────────────────────────────────────────────────────────
|
||||||
|
ROUTERS=asr,llm,health,hooks,locate,note,sd,serve,summarize,time,tts,weather
|
||||||
|
UNLOADED=auth,calendar,cf,email,ig
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# ROUTERS determines which routers are loaded.†
|
||||||
|
# UNLOADED is not used directly -- it's just there to help keep track which routers are disabled.
|
||||||
|
#
|
||||||
|
# † ┓ ┏ orth bearing in mind: some routers inherently rely on other routers,
|
||||||
|
# ┃┃┃ 3rd party APIs, or other apps being installed locally. If a router is
|
||||||
|
# ┗┻┛ set to load (i.e. is included in ROUTERS) depends on another router,
|
||||||
|
# that other router will also load too irrespective of whether it's listed.
|
||||||
|
#
|
||||||
|
# But let's get down to brass tacks, shall we?
|
||||||
|
#
|
||||||
|
# asr: requires faster_whisper — $ pip install faster_whisper — and
|
||||||
|
# downloading the model file specified in ASR_DEFAULT_MODEL.
|
||||||
|
#
|
||||||
|
# auth: authenticates a Microsoft 365 account (for email & calendar).
|
||||||
|
#
|
||||||
|
# calendar: requires (1) a Microsoft 365 account with a properly configured
|
||||||
|
# Azure Active Directory app, and/or (2) Calendars on macOS.
|
||||||
|
#
|
||||||
|
# cf: interfaces with the Cloudflare API and Caddy to register new
|
||||||
|
# [sub-]domains on Cloudflare and deploy them with Caddy as
|
||||||
|
# reverse proxy.
|
||||||
|
#
|
||||||
|
# llm: requires ollama — $ pip install ollama — and downloading the
|
||||||
|
# models set in LLM_DEFAULT_MODEL and LLM_VISION_MODEL.
|
||||||
|
#
|
||||||
|
# email: designed for accessing Protonmail via Protonmail Bridge and/or
|
||||||
|
# Microsoft 365, but should work with any IMAP/SMTP email account.
|
||||||
|
#
|
||||||
|
# hooks: designed for two specific use cases: monitoring court dockets
|
||||||
|
# through CourtListener.org, and monitoring arbitrary web pages for
|
||||||
|
# changes in tandem with a self-hosted changedetection.io instance.
|
||||||
|
# Both require accounts; other functionality would require
|
||||||
|
# additional / modified code.
|
||||||
|
#
|
||||||
|
# ig: requires an Instagram account, with credentials and other settings
|
||||||
|
# configured separately in the ig_config.json file; relies heavily
|
||||||
|
# on the llm and sd routers which have their own dependencies.
|
||||||
|
#
|
||||||
|
# locate: some endpoints work as is, but the core location tracking
|
||||||
|
# functionality requires Postgresql + PostGIS extension and are
|
||||||
|
# designed specifically to pair with a mobile device where
|
||||||
|
# Pythonista is installed and configured to run the
|
||||||
|
# `gps_tracker.py` and `gps_upload.py` scripts periodically or per
|
||||||
|
# repeating conditionwy (e.g. via automation under Apple Shortcuts).
|
||||||
|
#
|
||||||
|
# note: designed for use with Obsidian plus the Daily Notes and Tasks
|
||||||
|
# core extensions; and the Admonitions, Banners, Icons (with the
|
||||||
|
# Lucide pack), and Make.md community extensions. Moreover `notes`
|
||||||
|
# relies heavily on the calendar, llm, locate, sd, summarize, time,
|
||||||
|
# tts, and weather routers and accordingly on the external
|
||||||
|
# dependencies of each.
|
||||||
|
#
|
||||||
|
# sd: requires ComfyUI plus any modules and StableDiffusion models
|
||||||
|
# set in sd_config and individual workflow .json files.
|
||||||
|
#
|
||||||
|
# summarize: relies on the llm router and thus requires ollama.
|
||||||
|
#
|
||||||
|
# time: requires the subscription-based macOS app 'Timing' (one of many
|
||||||
|
# apps that together make SetApp an incredible value for macOS users!)
|
||||||
|
#
|
||||||
|
# tts: designed for use with coqui — $ pip install coqui — and/or the
|
||||||
|
# ElevenLabs API.
|
||||||
|
#
|
||||||
|
# weather: requires a VisualCrossing API key and is designed for (but doesn't
|
||||||
|
# itself strictly require) Postgresql with the PostGIS extension;
|
||||||
|
# (... but it presently relies on the locate router, which does).
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# ... Whew! that was a lot, right? I'm so glad we're in this together...
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#─────────────────────── Y O U ' R E G O N N A L O V E ────────────────────────
|
||||||
|
#
|
||||||
|
# ░ ░░ ░░ ░ ░░░░░░░░ ░░░ ░░░ ░░ ░░░░░░░ ░
|
||||||
|
# ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒
|
||||||
|
# ▓▓▓▓ ▓▓▓▓ ▓▓▓▓ ▓▓▓▓ ▓▓▓▓ ▓▓▓▓▓▓▓▓ ▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓ ▓▓▓▓▓▓▓ ▓▓▓
|
||||||
|
# ████ ████ ████ ████ █████████████ █ ████ █ █ ███████ ███████
|
||||||
|
# ████ ████ ████ █ █ ██ ███ ██ ████ █ █ █
|
||||||
|
#
|
||||||
|
# A N D I ' M N O T. E V E N. J E A L O U S.
|
||||||
|
# Y O U D E S E R V E I T A L L , B A B Y C A K E S.
|
||||||
|
#
|
||||||
|
#─── use tailscale for secure remote access: ───────────────────────────────────────
|
||||||
|
TS_IP=100.64.64.20
|
||||||
|
TS_SUBNET=100.64.64.0/24
|
||||||
|
TS_ID=sij-mbp16
|
||||||
|
TS_TAILNET=starling-sailfin
|
||||||
|
TAILSCALE_API_KEY=tskey-api-kosR4MfJtF11CNTRL-zJu4odnpr4huLwAGsuy54hvkJi2ScVWQL
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# TS_IP should match the Tailscale IP of the device. But this is deprecated, and if the functionality becomes relevant again, it should be come back in the form of a dynamic check (`tailscale status` in a shell subprocess) in __init__.py or even the /id endpoint.
|
||||||
|
# TS_SUBNET should match the IP/CIDR-format tailnet
|
||||||
|
# TS_ID currently has two roles: it's used to assemble the complete MagicDNS of the server, and it determines what the /id endpoint on the health router returns. This is relevant where multiple servers run the script behind a load balancer (e.g. Caddy), as a means to check which server responds. Bear in mind that /id is NOT API key-protected by default here.
|
||||||
|
# TS_TAILNET should match the tailnet's MagicDNS domain (omitting the `.net`, for reasons)
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#────────────── U & M E ── W E C A N G E T T H R O U G H ──────────────────
|
||||||
|
#
|
||||||
|
# ██▓███ ▒█████ ██████ ▄▄▄█████▓ ▄████ ██▀███ ▓█████ ██████
|
||||||
|
# ▓██░ ██▒██▒ ██▒▒██ ▒ ▓ ██▒ ▓▒ ██▒ ▀█▒▓██ ▒ ██▒▓█ ▀ ▒██ ▒
|
||||||
|
# ▓██░ ██▓▒██░ ██▒░ ▓██▄ ▒ ▓██░ ▒░▒██░▄▄▄░▓██ ░▄█ ▒▒███ ░ ▓██▄
|
||||||
|
# ▒██▄█▓▒ ▒██ ██░ ▒ ██▒░ ▓██▓ ░ ░▓█ ██▓▒██▀▀█▄ ▒▓█ ▄ ▒ ██▒
|
||||||
|
# ▒██▒ ░ ░ ████▓▒░▒██████▒▒ ▒██▒ ░ ░▒▓███▀▒░██▓ ▒██▒░▒████▒▒██████▒▒
|
||||||
|
# ▒██▒ ░ ░ ▒░▒░▒░ ▒ ▒▓▒ ▒ ░ ▒ ░░ ░▒ ▒ ░ ▒▓ ░▒▓░░░ ▒░ ░▒ ▒▓▒ ▒ ░
|
||||||
|
# ▒▓▒░ ░ ▒ ▒░ ░ ░▒ ░ ░ ░ ░ ░ ░▒ ░ ▒░ ░ ░ ░░ ░▒ ░ ░
|
||||||
|
# ░▒ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░ ░ ░
|
||||||
|
# ░░ ░ ░ T O G E T H ░ R . ░ ░ ░ ░ ░
|
||||||
|
# ░
|
||||||
|
#─── for weather and locate modules: ───────────── J U S T H O L D M Y H A N D .
|
||||||
|
DB=sij
|
||||||
|
# R E A L T I G H T.
|
||||||
|
DB_HOST=127.0.0.1
|
||||||
|
DB_PORT=5432
|
||||||
|
# U G O T T H I S , K ?
|
||||||
|
DB_USER=sij
|
||||||
|
DB_PASS='Synchr0!'
|
||||||
|
# Y E A H . . .
|
||||||
|
DB_SSH=100.64.64.15
|
||||||
|
# * J U S T L I K E T H A T . *
|
||||||
|
DB_SSH_USER=sij
|
||||||
|
DB_SSH_PASS='Synchr0!'
|
||||||
|
#─── notes: ────────────────────────────────────────────────── S E E ? E Z - P Z
|
||||||
|
#
|
||||||
|
# DB, DB_HOST, DB_PORT, DB_USER, and DB_PASS should specify those respective
|
||||||
|
# credentials for your Postgres database. DB_SSH and associated _USER and _PASS
|
||||||
|
# variables allow database access over an SSH tunnel.
|
||||||
|
#
|
||||||
|
# In the current implementation, we rely on Postgres to hold:
|
||||||
|
# i. user-logged location data (locate module), and
|
||||||
|
# ii. results from past weather forecast checks (weather module).
|
||||||
|
#
|
||||||
|
# A future version will hopefully make use of PostGIS's geocoding capabilities,
|
||||||
|
# and add a vector database for the LLM module. Until then it's up to you if the
|
||||||
|
# locate and weather modules are worth the hassle of maintaining Postgres.
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#───── Y O U C A N S I T T H I S O N E) O U T B A B E , ────────<3─────────
|
||||||
|
# ( ( ( I F Y O U ' D )
|
||||||
|
# ))\( ( /(( L I K E . . . ( (
|
||||||
|
# ( (()/(( /((_)\ )\())),----,.
|
||||||
|
# )\((__ ))\( ()) |__))((_)- ))((,' ,'
|
||||||
|
# ,' , `. /((_)\(_) / / '. |(_)|_ ,' .'
|
||||||
|
# ,-+-,.' _ | / / '. / ../ ; ,---. ,----.' .'
|
||||||
|
# ,-+-. ; , || | : /`. / \ ``\ .`- ' / \ | | .'
|
||||||
|
# ,--.'|' | ;|; | |--` \___\/ \ : / / ' : : |--,
|
||||||
|
# | | ,', | ':| : ;_ \ : | . ' / : | ;.' \
|
||||||
|
# | | / | | || \ \ `. / / / ' / ; | | |
|
||||||
|
# ' | : | : |, `----. \ ) \ \ \ | : \ `----'.'\ ;
|
||||||
|
# ; . | ; |--' )(__ \ \ | ((__ / : |; | ``. __ \ . |
|
||||||
|
# | : | | , / /`--' / /)\(/\ / :' ; \ / /\/ / :
|
||||||
|
# | : ' |/ '--'. / / ,,/ ',- .' | .\ |/ ,,/ ',- .
|
||||||
|
# ; | |`-' `--'---' \ ''\ ; | : '; :\ ''\ ;
|
||||||
|
# | ;/ O R , Y U P , \ \ .' \ \ / \ \ .'
|
||||||
|
# '---'B U R N I T A L L D O W N.-`-,,-' `---`--` `--`-,-'
|
||||||
|
# Y O U H A V E A
|
||||||
|
# G O D D E S S O F D E S T R U C T I O N W I T H I N ,
|
||||||
|
# A N D T H A T I S S O V A L I D !!
|
||||||
|
#─── ms365 (calendars): ──────────────────────────────────────────────────────────────
|
||||||
|
MS365_TOGGLE=False
|
||||||
|
ICAL_TOGGLE=True
|
||||||
|
ICALENDARS=3CCC9C7B-BFF0-4850-9CE9-BC504859CBC6,E68FE085-2ECA-4097-AF0A-8D38C404D8DA,AB5A0473-16DD-4916-BD6D-6CB916726605∑∑
|
||||||
|
MS365_CLIENT_ID=ce8cbd24-f146-4dc7-8ee7-51d9b69dec59
|
||||||
|
MS365_TENANT_ID=bad78048-a6e0-47b1-a24b-403c444aa349
|
||||||
|
MS365_SECRET=gbw8Q~7U90GMdvneNnPnzAUt5nWVJPbOsagLPbMe
|
||||||
|
MS365_THUMBPRINT=4CD86699A8B675411EE9C971CB2783E11F9E52CB
|
||||||
|
MS365_SCOPE=basic,calendar_all
|
||||||
|
MS365_TOKEN_FILE=oauth_token.txt
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# # MS365_CLIENT_ID, _TENANT_ID, _SECRET, AND _SCOPES must be obtained from Microsoft
|
||||||
|
# via the Azure portal, by creating a new app registration and an accompanying secret.
|
||||||
|
# MS365_THUMBPRINT is vestige of an earlier failed attempt to get this working, and
|
||||||
|
# for now is deprecated. I recommend seeking out a well-reviewed tutorial for
|
||||||
|
# creating an app on Azure with a client_id and secret and necessary scopes for
|
||||||
|
# individual calendar access, because I had one heck of a time trying various approaches.
|
||||||
|
# Do better, Microsoft.
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#──────────────────────────────── I B E T Y O U ──────────────────────────────────
|
||||||
|
# R E C E I V E A L O T O F L O V E L E T T E R S O V E R
|
||||||
|
#
|
||||||
|
# .----------------. .----------------. .----------------. .----------------.
|
||||||
|
# | .--------------. | .--------------. | .--------------. | .--------------. |
|
||||||
|
# | | _____ | | | ____ ____ | | | __ | | | ______ | |
|
||||||
|
# | | |_ _| | | ||_ \ / _|| | | / \ | | | |_ __ \ | |
|
||||||
|
# | | | | | | | | \/ | | | | / /\ \ | | | | |__) | | |
|
||||||
|
# | | | | | | | | |\ /| | | | | / ____ \ | | | | ___/ | |
|
||||||
|
# | | _| |_ | | | _| |_\/_| |_ | | | _/ / \ \_ | | | _| |_ | |
|
||||||
|
# | | |_____| | | ||_____||_____|| | ||____| |____|| | | |_____| | |
|
||||||
|
# | | | | | | | | | | | | |
|
||||||
|
# | '--------------' | '--------------' | '--------------' | '--------------' |
|
||||||
|
# '----------------' '----------------' '----------------' '----------------'
|
||||||
|
#
|
||||||
|
# E M A I L
|
||||||
|
#
|
||||||
|
#─── imap & smtp: ────────────────────────────────────────────────────────────────────────
|
||||||
|
IMAP_HOST=127.0.0.1
|
||||||
|
EMAIL_ADDRESS='sij@sij.law'
|
||||||
|
EMAIL_PASSWORD='hesSw7Kum16z-_yxI4kfXQ'
|
||||||
|
IMAP_PORT=1143
|
||||||
|
IMAP_ENCRYPTION=STARTTLS
|
||||||
|
SMTP_PORT=1025
|
||||||
|
SMTP_ENCRYPTION=SSL
|
||||||
|
#─── notes: ───────────────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# This is primarily for summarizing incoming emails. Any IMAP account should work, but
|
||||||
|
# I focused testing on a somewhat complex setup involving Protonmail Bridge.
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#──────────────────────────────── G E T S I L L Y ────────────────────────────────────
|
||||||
|
# T H E N G O B O N K E R S
|
||||||
|
# W I T H Y O U R O W N
|
||||||
|
#
|
||||||
|
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓██████▒▓██████▒░
|
||||||
|
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
||||||
|
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
||||||
|
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
||||||
|
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
||||||
|
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
||||||
|
# ░▒▓████████▓▒ ░▒▓████████▓▒ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
||||||
|
#
|
||||||
|
# ( F O R R E A L T H O U G H — T H E S E
|
||||||
|
#─── via comfyui (stable diffusion): A R E S O H O T R I G H T N O W )
|
||||||
|
LLM_URL=http://localhost:11434
|
||||||
|
SYSTEM_MSG=You are a helpful AI assistant.
|
||||||
|
DEFAULT_LLM=dolphin-mistral
|
||||||
|
DEFAULT_VISION=llava-llama3
|
||||||
|
SUMMARY_MODEL=dolphin-mistral
|
||||||
|
SUMMARY_CHUNK_SIZE=4000
|
||||||
|
SUMMARY_CHUNK_OVERLAP=100
|
||||||
|
SUMMARY_TPW=1.3
|
||||||
|
SUMMARY_LENGTH_RATIO=4
|
||||||
|
SUMMARY_MIN_LENGTH=150
|
||||||
|
SUMMARY_TOKEN_LIMIT=4096
|
||||||
|
SUMMARY_INSTRUCT='You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.'
|
||||||
|
SUMMARY_INSTRUCT_TTS='You are an AI assistant that provides email summaries for Sanjay -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary. Your response will undergo Text-To-Speech conversion and added to Sanjays private podcast. Providing adequate context (Sanjay did not send this question to you, he will only hear your response) but aiming for conciseness and precision, and bearing in mind the Text-To-Speech conversion (avoiding acronyms and formalities), summarize the following.'
|
||||||
|
DEFAULT_VOICE=Luna
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# The exact values here will depend on what software you are using to inference an LLM,
|
||||||
|
# and of course what models and capabilities are available through it. The script was
|
||||||
|
# designed for use with `ollama`, but most of the functionality should be equal with
|
||||||
|
# LM Studio, LocalAI, ect...
|
||||||
|
#
|
||||||
|
# DEFAULT_LLM is self-explanatory; DEFAULT_VISION is used for image recognition within
|
||||||
|
# a multimodal chat context, such as on the ig module for generating intelligible
|
||||||
|
# comments to Instagram posts, or more realistic captions for sd-generated images.
|
||||||
|
#
|
||||||
|
# Note it's possible to specify a separate model for general purposes and for
|
||||||
|
# summarization tasks. The other SUMMARY_ variables call for some explanation,
|
||||||
|
# in particular six that are most relevant when summarizing very long documents:
|
||||||
|
#
|
||||||
|
# SUMMARY_CHUNK_SIZE: determines the maximum length, in tokens, the pieces that are
|
||||||
|
# split and sent individually to the model.
|
||||||
|
#
|
||||||
|
# SUMMARY_CHUNK_OVERLAP: determines how much of each chunk is overlapped with the prior
|
||||||
|
# and next chunks. Set too high causes repetition, set too low
|
||||||
|
# causes misunderstood confusion and poor summary results.
|
||||||
|
# The summarization algorithm is flawed but I've gotten the best
|
||||||
|
# results with this set around 100–200.
|
||||||
|
#
|
||||||
|
# SUMMARY_TPW: used in estimating the token count of a prompt for purposes of
|
||||||
|
# complying with the maximum tokens a model can handle at once.
|
||||||
|
# Best you can do is estimate. I tend to use long words a fair
|
||||||
|
# excessively and found my average was 1.3 tokens per word. YMMV.
|
||||||
|
#
|
||||||
|
# SUMMARY_LENGTH_RATIO: this is the primary control over the length of generated
|
||||||
|
# summaries, expressed as the ratio of original text length to
|
||||||
|
# summary length. The default, 4, means the summaries will be
|
||||||
|
# around 1/4 the length of the original text you provide it.
|
||||||
|
#
|
||||||
|
# SUMMARY_MIN_LENGTH: the default SUMMARY_LENGTH_RATIO of 4 isn't ideal for very
|
||||||
|
# short texts, but setting it any lower sacrifices conciseness
|
||||||
|
# in summaries of longer texts. In short one size doesn't fit
|
||||||
|
# all. The compromise I landed on was to set a "maximum minimum"
|
||||||
|
# summary length: under no circumstances will the script impose
|
||||||
|
# a smaller maximum length than this value.
|
||||||
|
#
|
||||||
|
# SUMMARY_INSTRUCT: sets the prompt used when summarizing text.
|
||||||
|
#
|
||||||
|
# SUMMARY_INSTRUCT_TTS: sets a separate prompt for use when summarizing text where
|
||||||
|
# tts output was requested; tends to yield "cleaner" audio
|
||||||
|
# with less numbers (page numbers, citations) and other
|
||||||
|
# information extraneous to spoken contexts.
|
||||||
|
#
|
||||||
|
# DEFAULT_VOICE: used for all tts tasks when a specific voice is not requested.
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
#────,-_/────────── W E C A N E X P E R I M E N T W I T H ──────────.───────────
|
||||||
|
# ' | ,~-,-. ,-. ,-. ,--. | --' ,--. ,-. ,--. ,-. ,-. |-- . ,-. ,-.
|
||||||
|
# .^ | | | | ,--| | | | --' | -,- | --' | | | --' | ,--| | | | | | |
|
||||||
|
# `--' ' ' ' `-^ `-| `--' `---| `--' ' ' `--' ' `--^ `' ` `-' ' '
|
||||||
|
# , | ,-. | ~ I N T H E N U D E . ~
|
||||||
|
# `~~' `-+'
|
||||||
|
# O R F U L L Y C L O T H E D ── U P T O Y O U
|
||||||
|
#
|
||||||
|
#─── via comfyui (stable diffusion): ───── ( B U T L E T M E K N O W , Y E A H ? )
|
||||||
|
COMFYUI_URL=http://localhost:8188
|
||||||
|
COMFYUI_DIR=/Users/sij/workshop/sd/ComfyUI
|
||||||
|
PHOTOPRISM_USER=NOT_IMPLEMENTED
|
||||||
|
PHOTOPRISM_PASS=NOT_IMPLEMENTED
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# COMFY_URL, as you may expect, should point to the URL you use to access ComfyUI. If you
|
||||||
|
# don't know, watch for it in the server logs once ComfyUI is fully launched.
|
||||||
|
#
|
||||||
|
# COMFYUI_DIR, with similar self-evidence, should point to the base directory of your
|
||||||
|
# ComfyUI installation (i.e. the folder that contains `models`, `inputs`, and `outputs`)
|
||||||
|
#
|
||||||
|
# PhotoPrism integration is not yet implemented, so don't bother with that just yet.
|
||||||
|
# ──────────
|
||||||
|
#
|
||||||
|
# D O N ' T M I S S O N E ───────────────────────────────────────
|
||||||
|
#\ F I N A L S M A T T E R I N G O F M I S C E L L A N Y \
|
||||||
|
# \ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
||||||
|
# \ _ _ _/\\\\_ _ _ _ _ _ /\\\\ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
||||||
|
# \ _ _ \/\\\\\\_ _ _ _ /\\\\\\ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
||||||
|
# \ _ _ \/\\\//\\\_ _ /\\\//\\\ _ _/\\\ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
||||||
|
# \ _ _ \/\\\\///\\\/\\\/ \/\\\ _ _///_ _ _/\\\\\\\\\\_ _ _ _/\\\\\\\\_ _\
|
||||||
|
# \ _ _ \/\\\ _\///\\\/ _ \/\\\ _ _/\\\ _ \/\\\////// _ _ _/\\\////// _ _\
|
||||||
|
# \ _ _ \/\\\ _ _\/// _ _ \/\\\ _ _/\\\ _ \/\\\\\\\\\\_ _ /\\\_ _ _ _ _ _\
|
||||||
|
# \ _ _ \/\\\ _ _ _ _ _ _ \/\\\ _ _/\\\ _ \////////\\\_ _\//\\\ _ _ _ _ _\
|
||||||
|
# \ _ _ \/\\\ _ _ _ _ _ _ \/\\\ _ _/\\\ _ _/\\\\\\\\\\_ _ \///\\\\\\\\_ _\
|
||||||
|
# \ _ _ \///_ _ _ _ _ _ _ \///_ _ _///_ _ \////////// _ _ _ \//////// _ _\
|
||||||
|
# \ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
||||||
|
# ─────────────────── A N D O T H E R W H A T - H A V E - Y O U S ──
|
||||||
|
#
|
||||||
|
#─── other needful API keys, mainly: ────────────────────────────────────────────────────
|
||||||
|
CF_TOKEN=ESjjVFHXfe6NrBo5TrN4_AfhHNezytCVlY-VS2HD
|
||||||
|
VISUALCROSSING_API_KEY=DAEJSKWJQ2CHM3J6B7C5FWQZV
|
||||||
|
ELEVENLABS_API_KEY=01eeafb6ce0f6d1fd70e4aa9e7262827
|
||||||
|
COURTLISTENER_BASE_URL=https://www.courtlistener.com
|
||||||
|
COURTLISTENER_API_KEY=your_courtlistener_api_key_here
|
||||||
|
TIMING_API_URL=https://web.timingapp.com/api/v1
|
||||||
|
TIMING_API_KEY=eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiODMyNTMwYTAxYjJhNzdlOTgzZWRlMjRiNDdkMmY0YWYzYWU3YTIzYjkxM2QyNTFmZjE1YTQ4YTkzYjI3YTQ4M2I0MzE5MzU2MzZlMjYyZWYiLCJpYXQiOjE2OTI0MjEyNjkuMDc4MzU4LCJuYmYiOjE2OTI0MjEyNjkuMDc4MzYsImV4cCI6MjAwODA0MDQ2OS4wNzMzMjcsInN1YiI6IjM2MTgxNzA4NDcyMTEwMzg5NzYiLCJzY29wZXMiOltdfQ.fVhhJtYb6wtHBQj7J9sxTsT3nb6_BLu4ynqNMC-SpJ2exj31wF7dHXfdGF-ZCg_H2TWh8Jsrak7ovwHsMPvkLRPgxkyjkyLgVbnzZICbP0xffrsguTnillXKCbEkwOVo4s7esf829VVagHCkpNwYbfKLJ9FLHIqs0hQMhh_S7jpbzmXUe7t6tUG43IgILBD0IwphJ2BGs5X2fhjW8FkCke85JxbQ4a29cqYtgFJ7tMP97noTFB4e_gxFpHUl-Ou_bwdpBKfarTyxhtwm1DJkQB_MrAX4py8tmFlFFJPd-7WG-LaswiI7bctN3Lux-If5oxAhm29PkS3ooxvJD86YDR0rJ94aGc8IBypnqYyGFW1ks5fsQ057UwC3XK62ezilWdamh7gtcInShSdHr7pPtIxntCe3x039NSVTBIQ54WHNaWrfI0i83Lm61ak7Ss3qotJhwtIv0aUOUKS3DOz7jfL4Z4GHUjXgBmubeC_vuLHUVCutBsiZ4Jv4QxmWKy2sPlp-r2OgJlAPkcULvTu1GvXavRTrXb7PXkEKO4ErdBTvu2RyA6XLR1MKs0V7mRNvBfuiMRvWRuv9NBLh6vpgRTVo5Tthh-ahSQ-Rd6QcmvVNf-rLnxWGY4nOdx6QLcYKPukQourR2o6QzxGcpb9pDc8X0p2SEtDrDijpy6usFxk
|
||||||
|
MAC_ID=sij-mbp16
|
||||||
|
MAC_UN=sij
|
||||||
|
MAC_PW="Synchr0!"
|
||||||
|
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# CF_TOKEN: a Cloudflare token. This is used on the cf router for quick
|
||||||
|
# deployment of new domains in tandem with Caddy and for ddns.
|
||||||
|
#
|
||||||
|
# VISUALCROSSING_API_KEY: used for obtaining weather forecasts. It is a very data-rich
|
||||||
|
# yet affordable source of weather info, with a generous free
|
||||||
|
# plan.
|
||||||
|
#
|
||||||
|
# ELEVENLABS_API_KEY: used when on the tts router if tts tasks are outsourced to
|
||||||
|
# the state-of-the-art models at ElevenLabs.
|
||||||
|
#
|
||||||
|
# COURTLISTENER_API_KEY: used primarily on the hooks router, but likely relevant only
|
||||||
|
# to legal professionals that will be aware what it is for.
|
||||||
|
#
|
||||||
|
# TIMING_API_URL: are used on the time router for generating various tasks
|
||||||
|
# & related to timekeeping, as well as on the notes router for
|
||||||
|
# TIMING_API_KEY: generating markdown-formatted timeslips. It requires an
|
||||||
|
# active subscription to the Timing app (macOS or web), but
|
||||||
|
# it's worth noting comes included in the SetApp subscribtion
|
||||||
|
# bundle, for the same price, last I checked, as subscribing to
|
||||||
|
# Timing alone. If you have a Mac and somehow don't know this
|
||||||
|
# already, SetApp is an utterly insane value. I pay $15/mo for
|
||||||
|
# apps that I would otherwise pay ~$100/mo for if subscribing
|
||||||
|
# individually. I want to say I wasn't paid to say this, but
|
||||||
|
# with those savings I almost feel like I was.
|
||||||
|
#
|
||||||
|
# MAC_ID: These last three variables are for a specific use case where
|
||||||
|
# MAC_UN: you want certain commands run, or alerts appearing, on a
|
||||||
|
# MAD_PW: designated macaOS computer. The alerts router is designed to
|
||||||
|
# deliver OS-level notifications to the specified Mac when a
|
||||||
|
# webhook gets a hit on specified keywords within the payload.
|
||||||
|
# Setting the MAC_ID to the TS_ID of the target Mac, allows
|
||||||
|
# the script to readily know whether it itself is the target
|
||||||
|
# (this is relevant in a load-balancing context), and how to
|
||||||
|
# reach the target if not — to wit, ssh using MagicDNS.
|
||||||
|
```
|
|
@ -1,11 +0,0 @@
|
||||||
__future__
|
|
||||||
cv2
|
|
||||||
dateparser_data
|
|
||||||
ionic
|
|
||||||
json # Used for working with JSON data
|
|
||||||
llama_cpp
|
|
||||||
markdown_it
|
|
||||||
phonenumber_field
|
|
||||||
pptx
|
|
||||||
requests # Used for making HTTP requests
|
|
||||||
sijapi
|
|
153
requirements.txt
153
requirements.txt
|
@ -1,134 +1,47 @@
|
||||||
setuptools
|
|
||||||
adblockparser
|
|
||||||
aiofiles
|
|
||||||
aiohttp
|
|
||||||
asyncpg
|
|
||||||
better_profanity
|
|
||||||
elevation
|
|
||||||
matplotlib
|
|
||||||
pydantic
|
|
||||||
python-dotenv
|
python-dotenv
|
||||||
pyyaml
|
setuptools
|
||||||
reverse_geocoder
|
|
||||||
timezonefinder
|
|
||||||
python-dateutil
|
|
||||||
loguru
|
|
||||||
fastapi
|
|
||||||
httpx
|
|
||||||
hypercorn
|
|
||||||
starlette
|
|
||||||
PyPDF2
|
PyPDF2
|
||||||
filetype
|
fastapi
|
||||||
pandas
|
|
||||||
paramiko
|
|
||||||
pdf2image
|
pdf2image
|
||||||
pdfminer
|
pdfminer
|
||||||
pytesseract
|
pytesseract
|
||||||
|
python-dateutil
|
||||||
python-docx
|
python-docx
|
||||||
scipy
|
hypercorn
|
||||||
sshtunnel
|
starlette
|
||||||
torch
|
httpx
|
||||||
requests
|
pydantic
|
||||||
instagrapi
|
|
||||||
jwt
|
|
||||||
ollama
|
|
||||||
pyotp
|
|
||||||
pytz
|
pytz
|
||||||
tqdm
|
requests
|
||||||
chromadb
|
aiohttp
|
||||||
html2text
|
paramiko
|
||||||
markdown
|
tailscale
|
||||||
openai
|
pandas
|
||||||
folium
|
pydub
|
||||||
|
torch
|
||||||
selenium
|
selenium
|
||||||
webdriver_manager
|
webdriver_manager
|
||||||
pydub
|
faster_whisper
|
||||||
shapely
|
filetype
|
||||||
|
html2text
|
||||||
|
markdown
|
||||||
|
ollama
|
||||||
|
aiofiles
|
||||||
bs4
|
bs4
|
||||||
imbox
|
imbox
|
||||||
markdownify
|
|
||||||
newspaper3k
|
newspaper3k
|
||||||
readability
|
python-magic
|
||||||
trafilatura
|
|
||||||
urllib3
|
urllib3
|
||||||
anyio
|
|
||||||
location
|
|
||||||
SRTM.py
|
|
||||||
better_profanity
|
|
||||||
EventKit
|
|
||||||
Foundation
|
|
||||||
aiosqlite
|
|
||||||
anthropic
|
|
||||||
apscheduler
|
|
||||||
asgiref
|
|
||||||
aura_sr
|
|
||||||
authlib
|
|
||||||
backports.zoneinfo
|
|
||||||
boto3
|
|
||||||
click
|
|
||||||
colorama
|
|
||||||
contextvars
|
|
||||||
cron_descriptor
|
|
||||||
dateparser
|
|
||||||
deprecated
|
|
||||||
django
|
|
||||||
django_apscheduler
|
|
||||||
exa_py
|
|
||||||
factory
|
|
||||||
faker
|
|
||||||
faster_whisper
|
|
||||||
ffmpeg
|
|
||||||
fire
|
|
||||||
flupy
|
|
||||||
freezegun
|
|
||||||
google
|
|
||||||
huggingface_hub
|
|
||||||
jinja2
|
|
||||||
khoj
|
|
||||||
konlpy
|
|
||||||
langchain
|
|
||||||
langchain_community
|
|
||||||
libtmux
|
|
||||||
litellm
|
|
||||||
llama_index
|
|
||||||
lxml
|
|
||||||
magika
|
|
||||||
moviepy
|
|
||||||
neo4j
|
|
||||||
nest_asyncio
|
|
||||||
nltk
|
|
||||||
numpy
|
|
||||||
openpyxl
|
|
||||||
osmium
|
|
||||||
packaging
|
|
||||||
pgvector
|
|
||||||
posthog
|
|
||||||
psutil
|
|
||||||
psycopg2
|
|
||||||
pypdf
|
|
||||||
pytest
|
|
||||||
r2r
|
|
||||||
redis
|
|
||||||
resend
|
|
||||||
rich
|
|
||||||
schedule
|
|
||||||
semaphore
|
|
||||||
sentence_transformers
|
|
||||||
soundfile
|
|
||||||
spacy
|
|
||||||
sqlalchemy
|
|
||||||
stripe
|
|
||||||
tailscale
|
|
||||||
tenacity
|
|
||||||
tiktoken
|
|
||||||
torchaudio
|
|
||||||
transformers
|
|
||||||
twilio
|
|
||||||
typing_extensions
|
|
||||||
uvicorn
|
|
||||||
vecs
|
|
||||||
vectordb
|
|
||||||
websockets
|
|
||||||
whisper
|
whisper
|
||||||
whisperplus
|
huggingface_hub
|
||||||
youtube_dl
|
numpy
|
||||||
|
tqdm
|
||||||
|
tiktoken
|
||||||
|
numba
|
||||||
|
scipy
|
||||||
|
vectordb
|
||||||
|
IPython
|
||||||
|
torchaudio
|
||||||
|
lxml
|
||||||
|
lxml_html_clean
|
||||||
|
pdfminer.six
|
||||||
|
|
1
setup.py
1
setup.py
|
@ -27,6 +27,7 @@ setup(
|
||||||
'requests',
|
'requests',
|
||||||
'aiohttp',
|
'aiohttp',
|
||||||
'paramiko',
|
'paramiko',
|
||||||
|
'tailscale',
|
||||||
'pandas',
|
'pandas',
|
||||||
'pydub',
|
'pydub',
|
||||||
'torch',
|
'torch',
|
||||||
|
|
|
@ -1,140 +1,185 @@
|
||||||
# __init__.py
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import ipaddress
|
||||||
|
import multiprocessing
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from .logs import L, get_logger
|
from dateutil import tz
|
||||||
|
from pathlib import Path
|
||||||
|
from pydantic import BaseModel
|
||||||
|
import traceback
|
||||||
|
import logging
|
||||||
|
from . import logs
|
||||||
|
|
||||||
# Initialize logger before loading any other dependencies
|
### Logs ###
|
||||||
|
HYPERCORN_LOG_LEVEL = None
|
||||||
|
LOGGER = logging.getLogger('LOGGER')
|
||||||
|
def DEBUG(d): LOGGER.debug(d)
|
||||||
|
def INFO(i): LOGGER.debug(i)
|
||||||
|
def WARN(w): LOGGER.warning(w)
|
||||||
|
def ERR(e):
|
||||||
|
LOGGER.error(e)
|
||||||
|
LOGGER.error(traceback.format_exc())
|
||||||
|
def CRITICAL(c):
|
||||||
|
LOGGER.critical(c)
|
||||||
|
LOGGER.critical(traceback.format_exc())
|
||||||
|
|
||||||
|
# from sijapi.config.config import load_config
|
||||||
|
# cfg = load_config()
|
||||||
|
|
||||||
|
### Initial initialization
|
||||||
BASE_DIR = Path(__file__).resolve().parent
|
BASE_DIR = Path(__file__).resolve().parent
|
||||||
CONFIG_DIR = BASE_DIR / "config"
|
CONFIG_DIR = BASE_DIR / "config"
|
||||||
ENV_PATH = CONFIG_DIR / ".env"
|
ENV_PATH = CONFIG_DIR / ".env"
|
||||||
load_dotenv(ENV_PATH)
|
load_dotenv(ENV_PATH)
|
||||||
LOGS_DIR = BASE_DIR / "logs"
|
|
||||||
os.makedirs(LOGS_DIR, exist_ok=True)
|
|
||||||
L.init('sys', LOGS_DIR)
|
|
||||||
l = get_logger("init")
|
|
||||||
|
|
||||||
import ipaddress
|
### API essentials
|
||||||
import multiprocessing
|
ROUTERS = os.getenv('ROUTERS', '').split(',')
|
||||||
from dateutil import tz
|
PUBLIC_SERVICES = os.getenv('PUBLIC_SERVICES', '').split(',')
|
||||||
from pathlib import Path
|
GLOBAL_API_KEY = os.getenv("GLOBAL_API_KEY")
|
||||||
from .database import Database
|
# HOST_NET and HOST_PORT comprise HOST, which is what the server will bind to
|
||||||
from .classes import Config, SysConfig, DirConfig, Geocoder
|
HOST_NET = os.getenv("HOST_NET", "127.0.0.1")
|
||||||
|
HOST_PORT = int(os.getenv("HOST_PORT", 4444))
|
||||||
|
HOST = f"{HOST_NET}:{HOST_PORT}"
|
||||||
|
LOCAL_HOSTS = [ipaddress.ip_address(localhost.strip()) for localhost in os.getenv('LOCAL_HOSTS', '127.0.0.1').split(',')] + ['localhost']
|
||||||
|
SUBNET_BROADCAST = os.getenv("SUBNET_BROADCAST", '10.255.255.255')
|
||||||
|
TRUSTED_SUBNETS = [ipaddress.ip_network(subnet.strip()) for subnet in os.getenv('TRUSTED_SUBNETS', '127.0.0.1/32').split(',')]
|
||||||
|
MAX_CPU_CORES = min(int(os.getenv("MAX_CPU_CORES", int(multiprocessing.cpu_count()/2))), multiprocessing.cpu_count())
|
||||||
|
|
||||||
# Load core configurations
|
### Directories & general paths
|
||||||
Sys = SysConfig.init('sys', 'secrets') # load configuration from config/sys.yaml and config/secrets.yaml
|
HOME_DIR = Path.home()
|
||||||
Db = Database.init('sys') # load configuration from config/sys.yaml
|
ROUTER_DIR = BASE_DIR / "routers"
|
||||||
Dir = DirConfig.init('dirs') # load configuration from config/dirs.yaml
|
|
||||||
|
|
||||||
# Load module configurations
|
|
||||||
Gis = Config.init('gis', 'secrets', Dir)
|
|
||||||
Img = Config.init('img', 'secrets', Dir)
|
|
||||||
Llm = Config.init('llm', 'secrets', Dir)
|
|
||||||
News = Config.init('news', 'secrets', Dir)
|
|
||||||
Archivist = Config.init('archivist', 'secrets', Dir)
|
|
||||||
Scrape = Config.init('scrape', 'secrets', Dir)
|
|
||||||
Serve = Config.init('serve', 'secrets', Dir)
|
|
||||||
Tts = Config.init('tts', 'secrets', Dir)
|
|
||||||
|
|
||||||
# Directories & general paths
|
|
||||||
DATA_DIR = BASE_DIR / "data"
|
DATA_DIR = BASE_DIR / "data"
|
||||||
os.makedirs(DATA_DIR, exist_ok=True)
|
os.makedirs(DATA_DIR, exist_ok=True)
|
||||||
ALERTS_DIR = DATA_DIR / "alerts"
|
ALERTS_DIR = DATA_DIR / "alerts"
|
||||||
os.makedirs(ALERTS_DIR, exist_ok=True)
|
os.makedirs(ALERTS_DIR, exist_ok=True)
|
||||||
|
LOGS_DIR = BASE_DIR / "logs"
|
||||||
|
os.makedirs(LOGS_DIR, exist_ok=True)
|
||||||
REQUESTS_DIR = LOGS_DIR / "requests"
|
REQUESTS_DIR = LOGS_DIR / "requests"
|
||||||
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
||||||
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
||||||
BLOCKLISTS_DIR = DATA_DIR / "blocklists"
|
|
||||||
|
|
||||||
# LOCATE AND WEATHER LOCALIZATIONS
|
|
||||||
|
### Databases
|
||||||
|
DB = os.getenv("DB", 'sijdb')
|
||||||
|
DB_HOST = os.getenv("DB_HOST", "127.0.0.1")
|
||||||
|
DB_PORT = os.getenv("DB_PORT", 5432)
|
||||||
|
DB_USER = os.getenv("DB_USER", 'sij')
|
||||||
|
DB_PASS = os.getenv("DB_PASS")
|
||||||
|
DB_SSH = os.getenv("DB_SSH", "100.64.64.15")
|
||||||
|
DB_SSH_USER = os.getenv("DB_SSH_USER")
|
||||||
|
DB_SSH_PASS = os.getenv("DB_SSH_ENV")
|
||||||
|
DB_URL = f'postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB}'
|
||||||
|
|
||||||
|
|
||||||
|
### LOCATE AND WEATHER LOCALIZATIONS
|
||||||
USER_FULLNAME = os.getenv('USER_FULLNAME')
|
USER_FULLNAME = os.getenv('USER_FULLNAME')
|
||||||
USER_BIO = os.getenv('USER_BIO')
|
USER_BIO = os.getenv('USER_BIO')
|
||||||
NAMED_LOCATIONS = CONFIG_DIR / "named-locations.yaml"
|
TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
|
||||||
|
HOME_ZIP = os.getenv("HOME_ZIP") # unimplemented
|
||||||
|
LOCATION_OVERRIDES = DATA_DIR / "loc_overrides.json"
|
||||||
|
LOCATIONS_CSV = DATA_DIR / "US.csv"
|
||||||
|
# DB = DATA_DIR / "weatherlocate.db" # deprecated
|
||||||
VISUALCROSSING_BASE_URL = os.getenv("VISUALCROSSING_BASE_URL", "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline")
|
VISUALCROSSING_BASE_URL = os.getenv("VISUALCROSSING_BASE_URL", "https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline")
|
||||||
VISUALCROSSING_API_KEY = os.getenv("VISUALCROSSING_API_KEY")
|
VISUALCROSSING_API_KEY = os.getenv("VISUALCROSSING_API_KEY")
|
||||||
GEONAMES_TXT = DATA_DIR / "geonames.txt"
|
|
||||||
LOCATIONS_CSV = DATA_DIR / "US.csv"
|
|
||||||
TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
|
|
||||||
TZ_CACHE = DATA_DIR / "tzcache.json"
|
|
||||||
GEO = Geocoder(NAMED_LOCATIONS, TZ_CACHE)
|
|
||||||
|
|
||||||
# Obsidian & notes
|
|
||||||
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
|
### Obsidian & notes
|
||||||
MAX_PATH_LENGTH = 254
|
OBSIDIAN_VAULT_DIR = Path(os.getenv("OBSIDIAN_BASE_DIR") or HOME_DIR / "Nextcloud" / "notes")
|
||||||
OBSIDIAN_VAULT_DIR = Path(os.getenv("OBSIDIAN_BASE_DIR") or Path(Dir.HOME) / "Nextcloud" / "notes")
|
|
||||||
OBSIDIAN_JOURNAL_DIR = OBSIDIAN_VAULT_DIR / "journal"
|
OBSIDIAN_JOURNAL_DIR = OBSIDIAN_VAULT_DIR / "journal"
|
||||||
OBSIDIAN_RESOURCES_DIR = "obsidian/resources"
|
OBSIDIAN_RESOURCES_DIR = "obsidian/resources"
|
||||||
OBSIDIAN_BANNER_DIR = f"{OBSIDIAN_RESOURCES_DIR}/banners"
|
OBSIDIAN_BANNER_DIR = f"{OBSIDIAN_RESOURCES_DIR}/banners"
|
||||||
os.makedirs(Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_BANNER_DIR, exist_ok=True)
|
os.makedirs(Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_BANNER_DIR, exist_ok=True)
|
||||||
OBSIDIAN_BANNER_SCENE = os.getenv("OBSIDIAN_BANNER_SCENE", "wallpaper")
|
OBSIDIAN_BANNER_SCENE = os.getenv("OBSIDIAN_BANNER_SCENE", "wallpaper")
|
||||||
OBSIDIAN_CHROMADB_COLLECTION = os.getenv("OBSIDIAN_CHROMADB_COLLECTION", "obsidian")
|
OBSIDIAN_CHROMADB_COLLECTION = os.getenv("OBSIDIAN_CHROMADB_COLLECTION", "obsidian")
|
||||||
ARCHIVE_DIR = Path(os.getenv("ARCHIVE_DIR", OBSIDIAN_VAULT_DIR / "archive"))
|
|
||||||
os.makedirs(ARCHIVE_DIR, exist_ok=True)
|
|
||||||
DOC_DIR = DATA_DIR / "docs"
|
DOC_DIR = DATA_DIR / "docs"
|
||||||
os.makedirs(DOC_DIR, exist_ok=True)
|
os.makedirs(DOC_DIR, exist_ok=True)
|
||||||
|
|
||||||
# DATETIME SCHEMA FOR DAILY NOTE FOLDER HIERARCHY FORMATTING ###
|
### DATETIME SCHEMA FOR DAILY NOTE FOLDER HIERARCHY FORMATTING ###
|
||||||
YEAR_FMT = os.getenv("YEAR_FMT")
|
YEAR_FMT = os.getenv("YEAR_FMT")
|
||||||
MONTH_FMT = os.getenv("MONTH_FMT")
|
MONTH_FMT = os.getenv("MONTH_FMT")
|
||||||
DAY_FMT = os.getenv("DAY_FMT")
|
DAY_FMT = os.getenv("DAY_FMT")
|
||||||
DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
|
DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
|
||||||
|
|
||||||
# Large language model
|
### Large language model
|
||||||
LLM_URL = os.getenv("LLM_URL", "http://localhost:11434")
|
LLM_URL = os.getenv("LLM_URL", "http://localhost:11434")
|
||||||
LLM_SYS_MSG = os.getenv("SYSTEM_MSG", "You are a helpful AI assistant.")
|
LLM_SYS_MSG = os.getenv("SYSTEM_MSG", "You are a helpful AI assistant.")
|
||||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
|
||||||
|
|
||||||
# Summarization
|
|
||||||
SUMMARY_CHUNK_SIZE = int(os.getenv("SUMMARY_CHUNK_SIZE", 16384)) # measured in tokens
|
|
||||||
SUMMARY_CHUNK_OVERLAP = int(os.getenv("SUMMARY_CHUNK_OVERLAP", 256)) # measured in tokens
|
|
||||||
SUMMARY_TPW = float(os.getenv("SUMMARY_TPW", 1.3)) # measured in tokens
|
|
||||||
SUMMARY_LENGTH_RATIO = int(os.getenv("SUMMARY_LENGTH_RATIO", 4)) # measured as original to length ratio
|
|
||||||
SUMMARY_MIN_LENGTH = int(os.getenv("SUMMARY_MIN_LENGTH", 150)) # measured in tokens
|
|
||||||
SUMMARY_MODEL = os.getenv("SUMMARY_MODEL", "command-r:latest")
|
|
||||||
SUMMARY_TOKEN_LIMIT = int(os.getenv("SUMMARY_TOKEN_LIMIT", 16384))
|
|
||||||
SUMMARY_INSTRUCT = os.getenv('SUMMARY_INSTRUCT', "You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.")
|
SUMMARY_INSTRUCT = os.getenv('SUMMARY_INSTRUCT', "You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.")
|
||||||
SUMMARY_INSTRUCT_TTS = os.getenv('SUMMARY_INSTRUCT_TTS', "You are an AI assistant that provides email summaries for Sanjay. Your response will undergo Text-To-Speech conversion and added to Sanjay's private podcast. Providing adequate context (Sanjay did not send this question to you, he will only hear your response) but aiming for conciseness and precision, and bearing in mind the Text-To-Speech conversion (avoiding acronyms and formalities), summarize the following email.")
|
SUMMARY_INSTRUCT_TTS = os.getenv('SUMMARY_INSTRUCT_TTS', "You are an AI assistant that provides email summaries for Sanjay. Your response will undergo Text-To-Speech conversion and added to Sanjay's private podcast. Providing adequate context (Sanjay did not send this question to you, he will only hear your response) but aiming for conciseness and precision, and bearing in mind the Text-To-Speech conversion (avoiding acronyms and formalities), summarize the following email.")
|
||||||
|
DEFAULT_LLM = os.getenv("DEFAULT_LLM", "dolphin-mistral")
|
||||||
|
DEFAULT_VISION = os.getenv("DEFAULT_VISION", "llava")
|
||||||
|
DEFAULT_VOICE = os.getenv("DEFAULT_VOICE", "Luna")
|
||||||
|
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
||||||
|
|
||||||
# Stable diffusion
|
### Stable diffusion
|
||||||
IMG_DIR = DATA_DIR / "img" / "images"
|
SD_IMAGE_DIR = DATA_DIR / "sd" / "images"
|
||||||
os.makedirs(IMG_DIR, exist_ok=True)
|
os.makedirs(SD_IMAGE_DIR, exist_ok=True)
|
||||||
IMG_WORKFLOWS_DIR = DATA_DIR / "img" / "workflows"
|
SD_WORKFLOWS_DIR = DATA_DIR / "sd" / "workflows"
|
||||||
os.makedirs(IMG_WORKFLOWS_DIR, exist_ok=True)
|
os.makedirs(SD_WORKFLOWS_DIR, exist_ok=True)
|
||||||
COMFYUI_URL = os.getenv('COMFYUI_URL', "http://localhost:8188")
|
COMFYUI_URL = os.getenv('COMFYUI_URL', "http://localhost:8188")
|
||||||
COMFYUI_DIR = Path(os.getenv('COMFYUI_DIR'))
|
COMFYUI_DIR = Path(os.getenv('COMFYUI_DIR'))
|
||||||
COMFYUI_OUTPUT_DIR = COMFYUI_DIR / 'output'
|
COMFYUI_OUTPUT_DIR = COMFYUI_DIR / 'output'
|
||||||
COMFYUI_LAUNCH_CMD = os.getenv('COMFYUI_LAUNCH_CMD', 'mamba activate comfyui && python main.py')
|
COMFYUI_LAUNCH_CMD = os.getenv('COMFYUI_LAUNCH_CMD', 'mamba activate comfyui && python main.py')
|
||||||
IMG_CONFIG_PATH = CONFIG_DIR / 'img.yaml'
|
|
||||||
|
|
||||||
# ASR
|
### Summarization
|
||||||
|
SUMMARY_CHUNK_SIZE = int(os.getenv("SUMMARY_CHUNK_SIZE", 4000)) # measured in tokens
|
||||||
|
SUMMARY_CHUNK_OVERLAP = int(os.getenv("SUMMARY_CHUNK_OVERLAP", 100)) # measured in tokens
|
||||||
|
SUMMARY_TPW = float(os.getenv("SUMMARY_TPW", 1.3)) # measured in tokens
|
||||||
|
SUMMARY_LENGTH_RATIO = int(os.getenv("SUMMARY_LENGTH_RATIO", 4)) # measured as original to length ratio
|
||||||
|
SUMMARY_MIN_LENGTH = int(os.getenv("SUMMARY_MIN_LENGTH", 150)) # measured in tokens
|
||||||
|
SUMMARY_INSTRUCT = os.getenv("SUMMARY_INSTRUCT", "Summarize the provided text. Respond with the summary and nothing else. Do not otherwise acknowledge the request. Just provide the requested summary.")
|
||||||
|
SUMMARY_MODEL = os.getenv("SUMMARY_MODEL", "llama3")
|
||||||
|
SUMMARY_TOKEN_LIMIT = int(os.getenv("SUMMARY_TOKEN_LIMIT", 4096))
|
||||||
|
|
||||||
|
### ASR
|
||||||
ASR_DIR = DATA_DIR / "asr"
|
ASR_DIR = DATA_DIR / "asr"
|
||||||
os.makedirs(ASR_DIR, exist_ok=True)
|
os.makedirs(ASR_DIR, exist_ok=True)
|
||||||
WHISPER_CPP_DIR = Path(Dir.HOME) / str(os.getenv("WHISPER_CPP_DIR"))
|
WHISPER_CPP_DIR = HOME_DIR / str(os.getenv("WHISPER_CPP_DIR"))
|
||||||
WHISPER_CPP_MODELS = os.getenv('WHISPER_CPP_MODELS', 'NULL,VOID').split(',')
|
WHISPER_CPP_MODELS = os.getenv('WHISPER_CPP_MODELS', 'NULL,VOID').split(',')
|
||||||
|
|
||||||
# TTS
|
### TTS
|
||||||
PREFERRED_TTS = os.getenv("PREFERRED_TTS", "None")
|
PREFERRED_TTS = os.getenv("PREFERRED_TTS", "None")
|
||||||
TTS_DIR = DATA_DIR / "tts"
|
TTS_DIR = DATA_DIR / "tts"
|
||||||
os.makedirs(TTS_DIR, exist_ok=True)
|
os.makedirs(TTS_DIR, exist_ok=True)
|
||||||
VOICE_DIR = TTS_DIR / 'voices'
|
VOICE_DIR = TTS_DIR / 'voices'
|
||||||
os.makedirs(VOICE_DIR, exist_ok=True)
|
os.makedirs(VOICE_DIR, exist_ok=True)
|
||||||
|
PODCAST_DIR = TTS_DIR / "sideloads"
|
||||||
|
os.makedirs(PODCAST_DIR, exist_ok=True)
|
||||||
TTS_OUTPUT_DIR = TTS_DIR / 'outputs'
|
TTS_OUTPUT_DIR = TTS_DIR / 'outputs'
|
||||||
os.makedirs(TTS_OUTPUT_DIR, exist_ok=True)
|
os.makedirs(TTS_OUTPUT_DIR, exist_ok=True)
|
||||||
TTS_SEGMENTS_DIR = TTS_DIR / 'segments'
|
TTS_SEGMENTS_DIR = TTS_DIR / 'segments'
|
||||||
os.makedirs(TTS_SEGMENTS_DIR, exist_ok=True)
|
os.makedirs(TTS_SEGMENTS_DIR, exist_ok=True)
|
||||||
|
ELEVENLABS_API_KEY = os.getenv("ELEVENLABS_API_KEY")
|
||||||
|
|
||||||
# Calendar & email account
|
### Calendar & email account
|
||||||
MS365_TOGGLE = True if os.getenv("MS365_TOGGLE") == "True" else False
|
MS365_TOGGLE = True if os.getenv("MS365_TOGGLE") == "True" else False
|
||||||
ICAL_TOGGLE = True if os.getenv("ICAL_TOGGLE") == "True" else False
|
ICAL_TOGGLE = True if os.getenv("ICAL_TOGGLE") == "True" else False
|
||||||
ICS_PATH = DATA_DIR / 'calendar.ics' # deprecated now, but maybe revive?
|
ICS_PATH = DATA_DIR / 'calendar.ics' # deprecated now, but maybe revive?
|
||||||
ICALENDARS = os.getenv('ICALENDARS', 'NULL,VOID').split(',')
|
ICALENDARS = os.getenv('ICALENDARS', 'NULL,VOID').split(',')
|
||||||
|
class IMAP_DETAILS(BaseModel):
|
||||||
|
email: str
|
||||||
|
password: str
|
||||||
|
host: str
|
||||||
|
imap_port: int
|
||||||
|
smtp_port: int
|
||||||
|
imap_encryption: str = None
|
||||||
|
smtp_encryption: str = None
|
||||||
|
|
||||||
EMAIL_CONFIG = CONFIG_DIR / "email.yaml"
|
IMAP = IMAP_DETAILS(
|
||||||
EMAIL_LOGS = LOGS_DIR / "email"
|
email = os.getenv('IMAP_EMAIL'),
|
||||||
os.makedirs(EMAIL_LOGS, exist_ok = True)
|
password = os.getenv('IMAP_PASSWORD'),
|
||||||
|
host = os.getenv('IMAP_HOST', '127.0.0.1'),
|
||||||
|
imap_port = int(os.getenv('IMAP_PORT', 1143)),
|
||||||
|
smtp_port = int(os.getenv('SMTP_PORT', 469)),
|
||||||
|
imap_encryption = os.getenv('IMAP_ENCRYPTION', None),
|
||||||
|
smtp_encryption = os.getenv('SMTP_ENCRYPTION', None)
|
||||||
|
)
|
||||||
|
AUTORESPONSE_WHITELIST = os.getenv('AUTORESPONSE_WHITELIST', '').split(',')
|
||||||
|
AUTORESPONSE_BLACKLIST = os.getenv('AUTORESPONSE_BLACKLIST', '').split(',')
|
||||||
|
AUTORESPONSE_BLACKLIST.extend(["no-reply@", "noreply@", "@uscourts.gov", "@doi.gov"])
|
||||||
|
AUTORESPONSE_CONTEXT = os.getenv('AUTORESPONSE_CONTEXT', None)
|
||||||
|
AUTORESPOND = AUTORESPONSE_CONTEXT != None
|
||||||
|
|
||||||
# Courtlistener & other webhooks
|
### Courtlistener & other webhooks
|
||||||
COURTLISTENER_DOCKETS_DIR = DATA_DIR / "courtlistener" / "dockets"
|
COURTLISTENER_DOCKETS_DIR = DATA_DIR / "courtlistener" / "dockets"
|
||||||
os.makedirs(COURTLISTENER_DOCKETS_DIR, exist_ok=True)
|
os.makedirs(COURTLISTENER_DOCKETS_DIR, exist_ok=True)
|
||||||
COURTLISTENER_SEARCH_DIR = DATA_DIR / "courtlistener" / "cases"
|
COURTLISTENER_SEARCH_DIR = DATA_DIR / "courtlistener" / "cases"
|
||||||
|
@ -144,7 +189,7 @@ COURTLISTENER_API_KEY = os.getenv("COURTLISTENER_API_KEY")
|
||||||
COURTLISTENER_BASE_URL = os.getenv("COURTLISTENER_BASE_URL", "https://www.courtlistener.com")
|
COURTLISTENER_BASE_URL = os.getenv("COURTLISTENER_BASE_URL", "https://www.courtlistener.com")
|
||||||
COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
|
COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
|
||||||
|
|
||||||
# Keys & passwords
|
### Keys & passwords
|
||||||
PUBLIC_KEY_FILE = os.getenv("PUBLIC_KEY_FILE", 'you_public_key.asc')
|
PUBLIC_KEY_FILE = os.getenv("PUBLIC_KEY_FILE", 'you_public_key.asc')
|
||||||
PUBLIC_KEY = (BASE_DIR.parent / PUBLIC_KEY_FILE).read_text()
|
PUBLIC_KEY = (BASE_DIR.parent / PUBLIC_KEY_FILE).read_text()
|
||||||
MAC_ID = os.getenv("MAC_ID")
|
MAC_ID = os.getenv("MAC_ID")
|
||||||
|
@ -152,43 +197,45 @@ MAC_UN = os.getenv("MAC_UN")
|
||||||
MAC_PW = os.getenv("MAC_PW")
|
MAC_PW = os.getenv("MAC_PW")
|
||||||
TIMING_API_KEY = os.getenv("TIMING_API_KEY")
|
TIMING_API_KEY = os.getenv("TIMING_API_KEY")
|
||||||
TIMING_API_URL = os.getenv("TIMING_API_URL", "https://web.timingapp.com/api/v1")
|
TIMING_API_URL = os.getenv("TIMING_API_URL", "https://web.timingapp.com/api/v1")
|
||||||
|
|
||||||
PHOTOPRISM_URL = os.getenv("PHOTOPRISM_URL")
|
PHOTOPRISM_URL = os.getenv("PHOTOPRISM_URL")
|
||||||
PHOTOPRISM_USER = os.getenv("PHOTOPRISM_USER")
|
PHOTOPRISM_USER = os.getenv("PHOTOPRISM_USER")
|
||||||
PHOTOPRISM_PASS = os.getenv("PHOTOPRISM_PASS")
|
PHOTOPRISM_PASS = os.getenv("PHOTOPRISM_PASS")
|
||||||
|
|
||||||
# Tailscale
|
### Tailscale
|
||||||
TS_IP = ipaddress.ip_address(os.getenv("TS_IP", "NULL"))
|
TS_IP = ipaddress.ip_address(os.getenv("TS_IP", "NULL"))
|
||||||
TS_SUBNET = ipaddress.ip_network(os.getenv("TS_SUBNET")) if os.getenv("TS_SUBNET") else None
|
TS_SUBNET = ipaddress.ip_network(os.getenv("TS_SUBNET")) if os.getenv("TS_SUBNET") else None
|
||||||
TS_ID = os.getenv("TS_ID", "NULL")
|
TS_ID = os.getenv("TS_ID", "NULL")
|
||||||
TS_TAILNET = os.getenv("TS_TAILNET", "NULL")
|
TS_TAILNET = os.getenv("TS_TAILNET", "NULL")
|
||||||
TS_ADDRESS = f"http://{TS_ID}.{TS_TAILNET}.ts.net"
|
TS_ADDRESS = f"http://{TS_ID}.{TS_TAILNET}.ts.net"
|
||||||
|
|
||||||
# Cloudflare
|
### Cloudflare
|
||||||
CF_API_BASE_URL = os.getenv("CF_API_BASE_URL")
|
CF_API_BASE_URL = os.getenv("CF_API_BASE_URL")
|
||||||
CF_TOKEN = os.getenv("CF_TOKEN")
|
CF_TOKEN = os.getenv("CF_TOKEN")
|
||||||
CF_IP = DATA_DIR / "cf_ip.txt" # to be deprecated soon
|
CF_IP = DATA_DIR / "cf_ip.txt" # to be deprecated soon
|
||||||
CF_DOMAINS_PATH = DATA_DIR / "cf_domains.json" # to be deprecated soon
|
CF_DOMAINS_PATH = DATA_DIR / "cf_domains.json" # to be deprecated soon
|
||||||
|
|
||||||
# Caddy - not fully implemented
|
### Caddy - not fully implemented
|
||||||
|
BASE_URL = os.getenv("BASE_URL")
|
||||||
CADDY_SERVER = os.getenv('CADDY_SERVER', None)
|
CADDY_SERVER = os.getenv('CADDY_SERVER', None)
|
||||||
CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
|
CADDYFILE_PATH = os.getenv("CADDYFILE_PATH", "") if CADDY_SERVER is not None else None
|
||||||
CADDY_API_KEY = os.getenv("CADDY_API_KEY")
|
CADDY_API_KEY = os.getenv("CADDY_API_KEY")
|
||||||
|
|
||||||
# Microsoft Graph
|
|
||||||
|
### Microsoft Graph
|
||||||
MS365_CLIENT_ID = os.getenv('MS365_CLIENT_ID')
|
MS365_CLIENT_ID = os.getenv('MS365_CLIENT_ID')
|
||||||
MS365_SECRET = os.getenv('MS365_SECRET')
|
MS365_SECRET = os.getenv('MS365_SECRET')
|
||||||
MS365_TENANT_ID = os.getenv('MS365_TENANT_ID')
|
MS365_TENANT_ID = os.getenv('MS365_TENANT_ID')
|
||||||
MS365_CERT_PATH = DATA_DIR / 'ms365' / '.cert.pem' # deprecated
|
MS365_CERT_PATH = CONFIG_DIR / 'MS365' / '.cert.pem' # deprecated
|
||||||
MS365_KEY_PATH = DATA_DIR / 'ms365' / '.cert.key' # deprecated
|
MS365_KEY_PATH = CONFIG_DIR / 'MS365' / '.cert.key' # deprecated
|
||||||
MS365_KEY = MS365_KEY_PATH.read_text()
|
MS365_KEY = MS365_KEY_PATH.read_text()
|
||||||
MS365_TOKEN_PATH = CONFIG_DIR / 'MS365' / '.token.txt'
|
MS365_TOKEN_PATH = CONFIG_DIR / 'MS365' / '.token.txt'
|
||||||
MS365_THUMBPRINT = os.getenv('MS365_THUMBPRINT')
|
MS365_THUMBPRINT = os.getenv('MS365_THUMBPRINT')
|
||||||
|
|
||||||
MS365_LOGIN_URL = os.getenv("MS365_LOGIN_URL", "https://login.microsoftonline.com")
|
MS365_LOGIN_URL = os.getenv("MS365_LOGIN_URL", "https://login.microsoftonline.com")
|
||||||
MS365_AUTHORITY_URL = f"{MS365_LOGIN_URL}/{MS365_TENANT_ID}"
|
MS365_AUTHORITY_URL = f"{MS365_LOGIN_URL}/{MS365_TENANT_ID}"
|
||||||
MS365_REDIRECT_PATH = os.getenv("MS365_REDIRECT_PATH", "https://api.sij.ai/o365/oauth_redirect")
|
MS365_REDIRECT_PATH = os.getenv("MS365_REDIRECT_PATH", "https://api.sij.ai/o365/oauth_redirect")
|
||||||
MS365_SCOPE = os.getenv("MS365_SCOPE", 'Calendars.Read,Calendars.ReadWrite,offline_access').split(',')
|
MS365_SCOPE = os.getenv("MS365_SCOPE", 'Calendars.Read,Calendars.ReadWrite,offline_access').split(',')
|
||||||
|
|
||||||
# Maintenance
|
### Maintenance
|
||||||
GARBAGE_COLLECTION_INTERVAL = 60 * 60 # Run cleanup every hour
|
GARBAGE_COLLECTION_INTERVAL = 60 * 60 # Run cleanup every hour
|
||||||
GARBAGE_TTL = 60 * 60 * 24 # Delete files older than 24 hours
|
GARBAGE_TTL = 60 * 60 * 24 # Delete files older than 24 hours
|
|
@ -1,85 +1,39 @@
|
||||||
#!/Users/sij/miniforge3/envs/api/bin/python
|
#!/Users/sij/miniforge3/envs/api/bin/python
|
||||||
#__main__.py
|
from fastapi import FastAPI, Request, HTTPException, Response
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from fastapi import FastAPI, Request, HTTPException
|
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.requests import ClientDisconnect
|
||||||
from hypercorn.asyncio import serve
|
from hypercorn.asyncio import serve
|
||||||
from hypercorn.config import Config as HypercornConfig
|
from hypercorn.config import Config
|
||||||
import sys
|
import sys
|
||||||
import os
|
import asyncio
|
||||||
import traceback
|
import httpx
|
||||||
import asyncio
|
import argparse
|
||||||
|
import json
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import importlib
|
import importlib
|
||||||
|
from dotenv import load_dotenv
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import argparse
|
from datetime import datetime
|
||||||
from . import Sys, Db, Dir
|
from . import logs
|
||||||
from .logs import L, get_logger
|
parser = argparse.ArgumentParser(description='Personal API.')
|
||||||
|
parser.add_argument('--debug', action='store_true', help='Set log level to DEBUG')
|
||||||
|
parser.add_argument('--test', type=str, help='Load only the specified module.')
|
||||||
|
args = parser.parse_args()
|
||||||
|
logs.setup("debug")
|
||||||
|
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL
|
||||||
|
|
||||||
def parse_args():
|
from sijapi import HOST, ENV_PATH, GLOBAL_API_KEY, REQUESTS_DIR, ROUTER_DIR, REQUESTS_LOG_PATH, PUBLIC_SERVICES, TRUSTED_SUBNETS, ROUTERS
|
||||||
parser = argparse.ArgumentParser(description='Personal API.')
|
|
||||||
parser.add_argument('--log', type=str, default='INFO',
|
|
||||||
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
|
||||||
help='Set overall log level (e.g., DEBUG, INFO, WARNING)')
|
|
||||||
parser.add_argument('--debug', nargs='+', default=[],
|
|
||||||
help='Set DEBUG log level for specific modules')
|
|
||||||
parser.add_argument('--info', nargs='+', default=[],
|
|
||||||
help='Set INFO log level for specific modules')
|
|
||||||
parser.add_argument('--test', type=str, help='Load only the specified module.')
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
args = parse_args()
|
|
||||||
|
|
||||||
# Setup logging
|
|
||||||
L.setup_from_args(args)
|
|
||||||
l = get_logger("main")
|
|
||||||
l.info(f"Logging initialized. Debug modules: {L.debug_modules}")
|
|
||||||
l.info(f"Command line arguments: {args}")
|
|
||||||
|
|
||||||
l.debug(f"Current working directory: {os.getcwd()}")
|
|
||||||
l.debug(f"__file__ path: {__file__}")
|
|
||||||
l.debug(f"Absolute path of __file__: {os.path.abspath(__file__)}")
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
l.critical("sijapi launched")
|
|
||||||
l.info(f"Arguments: {args}")
|
|
||||||
|
|
||||||
# Load routers
|
|
||||||
if args.test:
|
|
||||||
load_router(args.test)
|
|
||||||
else:
|
|
||||||
for module_name in Sys.MODULES.__fields__:
|
|
||||||
if getattr(Sys.MODULES, module_name):
|
|
||||||
load_router(module_name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await Db.initialize_engines()
|
|
||||||
except Exception as e:
|
|
||||||
l.critical(f"Error during startup: {str(e)}")
|
|
||||||
l.critical(f"Traceback: {traceback.format_exc()}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield # This is where the app runs
|
|
||||||
finally:
|
|
||||||
# Shutdown
|
|
||||||
l.critical("Shutting down...")
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(Db.close(), timeout=20)
|
|
||||||
l.critical("Database pools closed.")
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
l.critical("Timeout while closing database pools.")
|
|
||||||
except Exception as e:
|
|
||||||
l.critical(f"Error during shutdown: {str(e)}")
|
|
||||||
l.critical(f"Traceback: {traceback.format_exc()}")
|
|
||||||
|
|
||||||
|
|
||||||
app = FastAPI(lifespan=lifespan)
|
# Initialize a FastAPI application
|
||||||
|
api = FastAPI()
|
||||||
|
|
||||||
app.add_middleware(
|
|
||||||
|
# CORSMiddleware
|
||||||
|
api.add_middleware(
|
||||||
CORSMiddleware,
|
CORSMiddleware,
|
||||||
allow_origins=['*'],
|
allow_origins=['*'],
|
||||||
allow_credentials=True,
|
allow_credentials=True,
|
||||||
|
@ -93,134 +47,87 @@ class SimpleAPIKeyMiddleware(BaseHTTPMiddleware):
|
||||||
if request.method == "OPTIONS":
|
if request.method == "OPTIONS":
|
||||||
# Allow CORS preflight requests
|
# Allow CORS preflight requests
|
||||||
return JSONResponse(status_code=200)
|
return JSONResponse(status_code=200)
|
||||||
if request.url.path not in Sys.PUBLIC:
|
if request.url.path not in PUBLIC_SERVICES:
|
||||||
trusted_subnets = [ipaddress.ip_network(subnet) for subnet in Sys.TRUSTED_SUBNETS]
|
if not any(client_ip in subnet for subnet in TRUSTED_SUBNETS):
|
||||||
if not any(client_ip in subnet for subnet in trusted_subnets):
|
|
||||||
api_key_header = request.headers.get("Authorization")
|
api_key_header = request.headers.get("Authorization")
|
||||||
api_key_query = request.query_params.get("api_key")
|
api_key_query = request.query_params.get("api_key")
|
||||||
|
|
||||||
# Convert Sys.KEYS to lowercase for case-insensitive comparison
|
|
||||||
api_keys_lower = [key.lower() for key in Sys.KEYS]
|
|
||||||
l.debug(f"Sys.KEYS (lowercase): {api_keys_lower}")
|
|
||||||
|
|
||||||
if api_key_header:
|
if api_key_header:
|
||||||
api_key_header = api_key_header.lower().split("bearer ")[-1]
|
api_key_header = api_key_header.lower().split("bearer ")[-1]
|
||||||
l.debug(f"API key provided in header: {api_key_header}")
|
if api_key_header != GLOBAL_API_KEY and api_key_query != GLOBAL_API_KEY:
|
||||||
if api_key_query:
|
WARN(f"Invalid API key provided by a requester.")
|
||||||
api_key_query = api_key_query.lower()
|
|
||||||
l.debug(f"API key provided in query: {api_key_query}")
|
|
||||||
|
|
||||||
if (api_key_header is None or api_key_header.lower() not in api_keys_lower) and \
|
|
||||||
(api_key_query is None or api_key_query.lower() not in api_keys_lower):
|
|
||||||
l.error(f"Invalid API key provided by a requester.")
|
|
||||||
if api_key_header:
|
|
||||||
l.debug(f"Invalid API key in header: {api_key_header}")
|
|
||||||
if api_key_query:
|
|
||||||
l.debug(f"Invalid API key in query: {api_key_query}")
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=401,
|
status_code=401,
|
||||||
content={"detail": "Invalid or missing API key"}
|
content={"detail": "Invalid or missing API key"}
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
if api_key_header and api_key_header.lower() in api_keys_lower:
|
|
||||||
l.debug(f"Valid API key provided in header: {api_key_header}")
|
|
||||||
if api_key_query and api_key_query.lower() in api_keys_lower:
|
|
||||||
l.debug(f"Valid API key provided in query: {api_key_query}")
|
|
||||||
|
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
|
# DEBUG(f"Request from {client_ip} is complete")
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# Add the middleware to your FastAPI app
|
api.add_middleware(SimpleAPIKeyMiddleware)
|
||||||
app.add_middleware(SimpleAPIKeyMiddleware)
|
|
||||||
|
|
||||||
@app.exception_handler(HTTPException)
|
canceled_middleware = """
|
||||||
|
@api.middleware("http")
|
||||||
|
async def log_requests(request: Request, call_next):
|
||||||
|
DEBUG(f"Incoming request: {request.method} {request.url}")
|
||||||
|
DEBUG(f"Request headers: {request.headers}")
|
||||||
|
DEBUG(f"Request body: {await request.body()}")
|
||||||
|
response = await call_next(request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def log_outgoing_request(request):
|
||||||
|
INFO(f"Outgoing request: {request.method} {request.url}")
|
||||||
|
DEBUG(f"Request headers: {request.headers}")
|
||||||
|
DEBUG(f"Request body: {request.content}")
|
||||||
|
"""
|
||||||
|
|
||||||
|
@api.exception_handler(HTTPException)
|
||||||
async def http_exception_handler(request: Request, exc: HTTPException):
|
async def http_exception_handler(request: Request, exc: HTTPException):
|
||||||
l.error(f"HTTP Exception: {exc.status_code} - {exc.detail}")
|
ERR(f"HTTP Exception: {exc.status_code} - {exc.detail}")
|
||||||
l.error(f"Request: {request.method} {request.url}")
|
ERR(f"Request: {request.method} {request.url}")
|
||||||
return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
|
return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail})
|
||||||
|
|
||||||
@app.middleware("http")
|
@api.middleware("http")
|
||||||
async def handle_exception_middleware(request: Request, call_next):
|
async def handle_exception_middleware(request: Request, call_next):
|
||||||
try:
|
try:
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
return response
|
except RuntimeError as exc:
|
||||||
except Exception as exc:
|
if str(exc) == "Response content longer than Content-Length":
|
||||||
l.error(f"Unhandled exception in request: {request.method} {request.url}")
|
# Update the Content-Length header to match the actual response content length
|
||||||
l.error(f"Exception: {str(exc)}")
|
response.headers["Content-Length"] = str(len(response.body))
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
else:
|
||||||
return JSONResponse(
|
raise
|
||||||
status_code=500,
|
return response
|
||||||
content={"detail": "Internal Server Error"}
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.post("/sync/pull")
|
|
||||||
async def pull_changes():
|
|
||||||
l.info(f"Received request to /sync/pull")
|
|
||||||
try:
|
|
||||||
await Sys.add_primary_keys_to_local_tables()
|
|
||||||
await Sys.add_primary_keys_to_remote_tables()
|
|
||||||
try:
|
|
||||||
source = await Sys.get_most_recent_source()
|
|
||||||
if source:
|
|
||||||
# Pull changes from the source
|
|
||||||
total_changes = await Sys.pull_changes(source)
|
|
||||||
|
|
||||||
return JSONResponse(content={
|
|
||||||
"status": "success",
|
|
||||||
"message": f"Pull complete. Total changes: {total_changes}",
|
|
||||||
"source": f"{source['ts_id']} ({source['ts_ip']})",
|
|
||||||
"changes": total_changes
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return JSONResponse(content={
|
|
||||||
"status": "info",
|
|
||||||
"message": "No instances with more recent data found or all instances are offline."
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error in /sync/pull: {str(e)}")
|
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Error during pull: {str(e)}")
|
|
||||||
finally:
|
|
||||||
l.info(f"Finished processing /sync/pull request")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error while ensuring primary keys to tables: {str(e)}")
|
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Error during primary key insurance: {str(e)}")
|
|
||||||
|
|
||||||
def load_router(router_name):
|
def load_router(router_name):
|
||||||
router_logger = get_logger(f"router.{router_name}")
|
router_file = ROUTER_DIR / f'{router_name}.py'
|
||||||
router_logger.debug(f"Attempting to load {router_name.capitalize()}...")
|
DEBUG(f"Attempting to load {router_name.capitalize()}...")
|
||||||
|
|
||||||
# Log the full path being checked
|
|
||||||
router_file = Dir.ROUTER / f'{router_name}.py'
|
|
||||||
router_logger.debug(f"Checking for router file at: {router_file.absolute()}")
|
|
||||||
|
|
||||||
if router_file.exists():
|
if router_file.exists():
|
||||||
router_logger.debug(f"Router file found: {router_file}")
|
|
||||||
module_path = f'sijapi.routers.{router_name}'
|
module_path = f'sijapi.routers.{router_name}'
|
||||||
router_logger.debug(f"Attempting to import module: {module_path}")
|
|
||||||
try:
|
try:
|
||||||
module = importlib.import_module(module_path)
|
module = importlib.import_module(module_path)
|
||||||
router_logger.debug(f"Module imported successfully: {module}")
|
|
||||||
router = getattr(module, router_name)
|
router = getattr(module, router_name)
|
||||||
router_logger.debug(f"Router object retrieved: {router}")
|
api.include_router(router)
|
||||||
app.include_router(router)
|
INFO(f"{router_name.capitalize()} router loaded.")
|
||||||
router_logger.info(f"Router {router_name} loaded successfully")
|
|
||||||
except (ImportError, AttributeError) as e:
|
except (ImportError, AttributeError) as e:
|
||||||
router_logger.critical(f"Failed to load router {router_name}: {e}")
|
CRITICAL(f"Failed to load router {router_name}: {e}")
|
||||||
router_logger.debug(f"Current working directory: {os.getcwd()}")
|
|
||||||
router_logger.debug(f"Python path: {sys.path}")
|
|
||||||
else:
|
else:
|
||||||
router_logger.error(f"Router file for {router_name} does not exist at {router_file.absolute()}")
|
WARN(f"Router file for {router_name} does not exist.")
|
||||||
router_logger.debug(f"Contents of router directory: {list(Dir.ROUTER.iterdir())}")
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
config = HypercornConfig()
|
if args.test:
|
||||||
config.bind = [Sys.BIND]
|
load_router(args.test)
|
||||||
config.startup_timeout = 300 # 5 minutes
|
else:
|
||||||
config.shutdown_timeout = 15 # 15 seconds
|
CRITICAL(f"sijapi launched")
|
||||||
asyncio.run(serve(app, config))
|
CRITICAL(f"{args._get_args}")
|
||||||
|
for router_name in ROUTERS:
|
||||||
|
load_router(router_name)
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config.keep_alive_timeout = 1200
|
||||||
|
config.bind = [HOST]
|
||||||
|
asyncio.run(serve(api, config))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
|
@ -1,714 +0,0 @@
|
||||||
# classes.py
|
|
||||||
import json
|
|
||||||
import yaml
|
|
||||||
import math
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
import aiofiles
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
import traceback
|
|
||||||
import multiprocessing
|
|
||||||
from tqdm.asyncio import tqdm
|
|
||||||
import reverse_geocoder as rg
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union, TypeVar, ClassVar
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
from pydantic import BaseModel, Field, create_model, PrivateAttr
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from timezonefinder import TimezoneFinder
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
from srtm import get_data
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from sqlalchemy import text
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
|
||||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
|
||||||
from sqlalchemy.exc import OperationalError
|
|
||||||
from sqlalchemy import Column, Integer, String, DateTime, JSON, Text, select, func
|
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
|
|
||||||
from .logs import get_logger
|
|
||||||
l = get_logger(__name__)
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent
|
|
||||||
CONFIG_DIR = BASE_DIR / "config"
|
|
||||||
ENV_PATH = CONFIG_DIR / ".env"
|
|
||||||
load_dotenv(ENV_PATH)
|
|
||||||
TS_ID = os.environ.get('TS_ID')
|
|
||||||
T = TypeVar('T', bound='Config')
|
|
||||||
|
|
||||||
class Config(BaseModel):
|
|
||||||
HOME: Path = Path.home()
|
|
||||||
_dir_config: Optional['Config'] = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def init(cls, yaml_path: Union[str, Path], secrets_path: Optional[Union[str, Path]] = None, dir_config: Optional['Config'] = None) -> 'Config':
|
|
||||||
yaml_path = cls._resolve_path(yaml_path, 'config')
|
|
||||||
if secrets_path:
|
|
||||||
secrets_path = cls._resolve_path(secrets_path, 'config')
|
|
||||||
|
|
||||||
try:
|
|
||||||
with yaml_path.open('r') as file:
|
|
||||||
config_data = yaml.safe_load(file)
|
|
||||||
|
|
||||||
l.debug(f"Loaded configuration data from {yaml_path}")
|
|
||||||
if secrets_path:
|
|
||||||
with secrets_path.open('r') as file:
|
|
||||||
secrets_data = yaml.safe_load(file)
|
|
||||||
l.debug(f"Loaded secrets data from {secrets_path}")
|
|
||||||
|
|
||||||
if isinstance(config_data, list):
|
|
||||||
config_data = {"configurations": config_data, "SECRETS": secrets_data}
|
|
||||||
elif isinstance(config_data, dict):
|
|
||||||
config_data['SECRETS'] = secrets_data
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unexpected configuration data type: {type(config_data)}")
|
|
||||||
|
|
||||||
if not isinstance(config_data, dict):
|
|
||||||
config_data = {"configurations": config_data}
|
|
||||||
|
|
||||||
if config_data.get('HOME') is None:
|
|
||||||
config_data['HOME'] = str(Path.home())
|
|
||||||
l.debug(f"HOME was None in config, set to default: {config_data['HOME']}")
|
|
||||||
|
|
||||||
load_dotenv()
|
|
||||||
instance = cls.create_dynamic_model(**config_data)
|
|
||||||
instance._dir_config = dir_config or instance
|
|
||||||
resolved_data = instance.resolve_placeholders(config_data)
|
|
||||||
instance = cls.create_dynamic_model(**resolved_data)
|
|
||||||
instance._dir_config = dir_config or instance
|
|
||||||
return instance
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error loading configuration: {str(e)}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _resolve_path(cls, path: Union[str, Path], default_dir: str) -> Path:
|
|
||||||
base_path = Path(__file__).parent.parent # This will be two levels up from this file
|
|
||||||
path = Path(path)
|
|
||||||
if not path.suffix:
|
|
||||||
path = base_path / 'sijapi' / default_dir / f"{path.name}.yaml"
|
|
||||||
elif not path.is_absolute():
|
|
||||||
path = base_path / path
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_placeholders(self, data: Any) -> Any:
|
|
||||||
if isinstance(data, dict):
|
|
||||||
resolved_data = {k: self.resolve_placeholders(v) for k, v in data.items()}
|
|
||||||
home_dir = Path(resolved_data.get('HOME', self.HOME)).expanduser()
|
|
||||||
base_dir = Path(__file__).parent.parent
|
|
||||||
data_dir = base_dir / "data"
|
|
||||||
resolved_data['HOME'] = str(home_dir)
|
|
||||||
resolved_data['BASE'] = str(base_dir)
|
|
||||||
resolved_data['DATA'] = str(data_dir)
|
|
||||||
return resolved_data
|
|
||||||
elif isinstance(data, list):
|
|
||||||
return [self.resolve_placeholders(v) for v in data]
|
|
||||||
elif isinstance(data, str):
|
|
||||||
return self.resolve_string_placeholders(data)
|
|
||||||
else:
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_string_placeholders(self, value: str) -> Any:
|
|
||||||
if isinstance(value, str) and value.startswith('{{') and value.endswith('}}'):
|
|
||||||
key = value[2:-2].strip()
|
|
||||||
parts = key.split('.')
|
|
||||||
if len(parts) == 2 and parts[0] == 'SECRET':
|
|
||||||
return getattr(self.SECRETS, parts[1], '')
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_dynamic_model(cls, **data):
|
|
||||||
for key, value in data.items():
|
|
||||||
if isinstance(value, dict):
|
|
||||||
data[key] = cls.create_dynamic_model(**value)
|
|
||||||
elif isinstance(value, list) and all(isinstance(item, dict) for item in value):
|
|
||||||
data[key] = [cls.create_dynamic_model(**item) for item in value]
|
|
||||||
|
|
||||||
DynamicModel = create_model(
|
|
||||||
f'Dynamic{cls.__name__}',
|
|
||||||
__base__=cls,
|
|
||||||
**{k: (Any, v) for k, v in data.items()}
|
|
||||||
)
|
|
||||||
return DynamicModel(**data)
|
|
||||||
|
|
||||||
|
|
||||||
def has_key(self, key_path: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a key exists in the configuration or its nested objects.
|
|
||||||
|
|
||||||
:param key_path: Dot-separated path to the key (e.g., 'elevenlabs.voices.Victoria')
|
|
||||||
:return: True if the key exists, False otherwise
|
|
||||||
"""
|
|
||||||
parts = key_path.split('.')
|
|
||||||
current = self
|
|
||||||
for part in parts:
|
|
||||||
if hasattr(current, part):
|
|
||||||
current = getattr(current, part)
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def get_value(self, key_path: str, default=None):
|
|
||||||
"""
|
|
||||||
Get the value of a key in the configuration or its nested objects.
|
|
||||||
|
|
||||||
:param key_path: Dot-separated path to the key (e.g., 'elevenlabs.voices.Victoria')
|
|
||||||
:param default: Default value to return if the key doesn't exist
|
|
||||||
:return: The value of the key if it exists, otherwise the default value
|
|
||||||
"""
|
|
||||||
parts = key_path.split('.')
|
|
||||||
current = self
|
|
||||||
for part in parts:
|
|
||||||
if hasattr(current, part):
|
|
||||||
current = getattr(current, part)
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
return current
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
extra = "allow"
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
|
|
||||||
class DirConfig:
|
|
||||||
def __init__(self, config_data: dict):
|
|
||||||
self.BASE = Path(__file__).parent.parent
|
|
||||||
self.HOME = Path.home()
|
|
||||||
self.DATA = self.BASE / "data"
|
|
||||||
|
|
||||||
for key, value in config_data.items():
|
|
||||||
setattr(self, key, self._resolve_path(value))
|
|
||||||
|
|
||||||
def _resolve_path(self, path: str) -> Path:
|
|
||||||
path = path.replace("{{ BASE }}", str(self.BASE))
|
|
||||||
path = path.replace("{{ HOME }}", str(self.HOME))
|
|
||||||
path = path.replace("{{ DATA }}", str(self.DATA))
|
|
||||||
return Path(path).expanduser()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def init(cls, yaml_path: Union[str, Path]) -> 'DirConfig':
|
|
||||||
yaml_path = Path(yaml_path)
|
|
||||||
if not yaml_path.is_absolute():
|
|
||||||
yaml_path = Path(__file__).parent / "config" / yaml_path
|
|
||||||
|
|
||||||
if not yaml_path.suffix:
|
|
||||||
yaml_path = yaml_path.with_suffix('.yaml')
|
|
||||||
|
|
||||||
with yaml_path.open('r') as file:
|
|
||||||
config_data = yaml.safe_load(file)
|
|
||||||
|
|
||||||
return cls(config_data)
|
|
||||||
|
|
||||||
|
|
||||||
class SysConfig(BaseModel):
|
|
||||||
HOST: str
|
|
||||||
PORT: int
|
|
||||||
BIND: str
|
|
||||||
URL: str
|
|
||||||
PUBLIC: List[str]
|
|
||||||
TRUSTED_SUBNETS: List[str]
|
|
||||||
MODULES: Any
|
|
||||||
EXTENSIONS: Any
|
|
||||||
TZ: str
|
|
||||||
KEYS: List[str]
|
|
||||||
GARBAGE: Dict[str, Any]
|
|
||||||
MAX_CPU_CORES: int
|
|
||||||
|
|
||||||
def __init__(self, **data):
|
|
||||||
if 'MAX_CPU_CORES' not in data:
|
|
||||||
data['MAX_CPU_CORES'] = max(1, min(int(multiprocessing.cpu_count() / 2), multiprocessing.cpu_count()))
|
|
||||||
super().__init__(**data)
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def init(cls, config_path: Union[str, Path], secrets_path: Union[str, Path]):
|
|
||||||
config_path = cls._resolve_path(config_path, 'config')
|
|
||||||
secrets_path = cls._resolve_path(secrets_path, 'config')
|
|
||||||
|
|
||||||
with open(config_path, 'r') as file:
|
|
||||||
config_data = yaml.safe_load(file)
|
|
||||||
|
|
||||||
l.debug(f"Loaded main config: {config_data}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(secrets_path, 'r') as file:
|
|
||||||
secrets_data = yaml.safe_load(file)
|
|
||||||
except FileNotFoundError:
|
|
||||||
l.warning(f"Secrets file not found: {secrets_path}")
|
|
||||||
secrets_data = {}
|
|
||||||
except yaml.YAMLError as e:
|
|
||||||
l.error(f"Error parsing secrets YAML: {e}")
|
|
||||||
secrets_data = {}
|
|
||||||
|
|
||||||
config_data = cls.resolve_placeholders(config_data)
|
|
||||||
l.debug(f"Resolved config: {config_data}")
|
|
||||||
|
|
||||||
if isinstance(config_data.get('KEYS'), list) and len(config_data['KEYS']) == 1:
|
|
||||||
placeholder = config_data['KEYS'][0]
|
|
||||||
if placeholder.startswith('{{') and placeholder.endswith('}}'):
|
|
||||||
key = placeholder[2:-2].strip()
|
|
||||||
parts = key.split('.')
|
|
||||||
if len(parts) == 2 and parts[0] == 'SECRET':
|
|
||||||
secret_key = parts[1]
|
|
||||||
if secret_key in secrets_data:
|
|
||||||
config_data['KEYS'] = secrets_data[secret_key]
|
|
||||||
l.debug(f"Replaced KEYS with secret: {config_data['KEYS']}")
|
|
||||||
else:
|
|
||||||
l.warning(f"Secret key '{secret_key}' not found in secrets file")
|
|
||||||
else:
|
|
||||||
l.warning(f"Invalid secret placeholder format: {placeholder}")
|
|
||||||
|
|
||||||
config_data['MODULES'] = cls._create_dynamic_config(config_data.get('MODULES', {}), 'DynamicModulesConfig')
|
|
||||||
config_data['EXTENSIONS'] = cls._create_dynamic_config(config_data.get('EXTENSIONS', {}), 'DynamicExtensionsConfig')
|
|
||||||
|
|
||||||
# Set MAX_CPU_CORES if not present in config_data
|
|
||||||
if 'MAX_CPU_CORES' not in config_data:
|
|
||||||
config_data['MAX_CPU_CORES'] = max(1, min(int(multiprocessing.cpu_count() / 2), multiprocessing.cpu_count()))
|
|
||||||
|
|
||||||
return cls(**config_data)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _create_dynamic_config(cls, data: Dict[str, Any], model_name: str):
|
|
||||||
fields = {}
|
|
||||||
for key, value in data.items():
|
|
||||||
if isinstance(value, str):
|
|
||||||
fields[key] = (bool, value.lower() == 'on')
|
|
||||||
elif isinstance(value, bool):
|
|
||||||
fields[key] = (bool, value)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid value for {key}: {value}. Must be 'on', 'off', True, or False.")
|
|
||||||
|
|
||||||
DynamicConfig = create_model(model_name, **fields)
|
|
||||||
instance_data = {k: (v.lower() == 'on' if isinstance(v, str) else v) for k, v in data.items()}
|
|
||||||
return DynamicConfig(**instance_data)
|
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _resolve_path(cls, path: Union[str, Path], default_dir: str) -> Path:
|
|
||||||
base_path = Path(__file__).parent.parent
|
|
||||||
path = Path(path)
|
|
||||||
if not path.suffix:
|
|
||||||
path = base_path / "sijapi" / default_dir / f"{path.name}.yaml"
|
|
||||||
elif not path.is_absolute():
|
|
||||||
path = base_path / path
|
|
||||||
return path
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_placeholders(cls, config_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
def resolve_value(value):
|
|
||||||
if isinstance(value, str):
|
|
||||||
pattern = r'\{\{\s*([^}]+)\s*\}\}'
|
|
||||||
matches = re.findall(pattern, value)
|
|
||||||
for match in matches:
|
|
||||||
if match in config_data:
|
|
||||||
value = value.replace(f'{{{{ {match} }}}}', str(config_data[match]))
|
|
||||||
return value
|
|
||||||
|
|
||||||
resolved_data = {}
|
|
||||||
for key, value in config_data.items():
|
|
||||||
if isinstance(value, dict):
|
|
||||||
resolved_data[key] = cls.resolve_placeholders(value)
|
|
||||||
elif isinstance(value, list):
|
|
||||||
resolved_data[key] = [resolve_value(item) for item in value]
|
|
||||||
else:
|
|
||||||
resolved_data[key] = resolve_value(value)
|
|
||||||
|
|
||||||
if 'BIND' in resolved_data:
|
|
||||||
resolved_data['BIND'] = resolved_data['BIND'].replace('{{ HOST }}', str(resolved_data['HOST']))
|
|
||||||
resolved_data['BIND'] = resolved_data['BIND'].replace('{{ PORT }}', str(resolved_data['PORT']))
|
|
||||||
|
|
||||||
return resolved_data
|
|
||||||
|
|
||||||
def __getattr__(self, name: str) -> Any:
|
|
||||||
if name in ['MODULES', 'EXTENSIONS']:
|
|
||||||
return self.__dict__[name]
|
|
||||||
if name in self.__dict__:
|
|
||||||
return self.__dict__[name]
|
|
||||||
raise AttributeError(f"'{self.__class__.__name__}' object has no attribute '{name}'")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def active_modules(self) -> List[str]:
|
|
||||||
return [module for module, is_active in self.MODULES.__dict__.items() if is_active]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def active_extensions(self) -> List[str]:
|
|
||||||
return [extension for extension, is_active in self.EXTENSIONS.__dict__.items() if is_active]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def max_cpu_cores(self) -> int:
|
|
||||||
return self.MAX_CPU_CORES
|
|
||||||
|
|
||||||
class Location(BaseModel):
|
|
||||||
latitude: float
|
|
||||||
longitude: float
|
|
||||||
datetime: datetime
|
|
||||||
elevation: Optional[float] = None
|
|
||||||
altitude: Optional[float] = None
|
|
||||||
zip: Optional[str] = None
|
|
||||||
street: Optional[str] = None
|
|
||||||
city: Optional[str] = None
|
|
||||||
state: Optional[str] = None
|
|
||||||
country: Optional[str] = None
|
|
||||||
context: Optional[Dict[str, Any]] = None
|
|
||||||
class_: Optional[str] = Field(None, alias="class")
|
|
||||||
type: Optional[str] = None
|
|
||||||
name: Optional[str] = None
|
|
||||||
display_name: Optional[str] = None
|
|
||||||
boundingbox: Optional[List[str]] = None
|
|
||||||
amenity: Optional[str] = None
|
|
||||||
house_number: Optional[str] = None
|
|
||||||
road: Optional[str] = None
|
|
||||||
quarter: Optional[str] = None
|
|
||||||
neighbourhood: Optional[str] = None
|
|
||||||
suburb: Optional[str] = None
|
|
||||||
county: Optional[str] = None
|
|
||||||
country_code: Optional[str] = None
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_encoders = {
|
|
||||||
datetime: lambda dt: dt.isoformat(),
|
|
||||||
}
|
|
||||||
populate_by_name = True
|
|
||||||
|
|
||||||
class Geocoder:
|
|
||||||
def __init__(self, named_locs: Union[str, Path] = None, cache_file: Union[str, Path] = 'timezone_cache.json'):
|
|
||||||
self.tf = TimezoneFinder()
|
|
||||||
self.srtm_data = get_data()
|
|
||||||
self.named_locs = Path(named_locs) if named_locs else None
|
|
||||||
self.cache_file = Path(cache_file)
|
|
||||||
self.last_timezone: str = "America/Los_Angeles"
|
|
||||||
self.last_update: Optional[datetime] = None
|
|
||||||
self.last_location: Optional[Tuple[float, float]] = None
|
|
||||||
self.executor = ThreadPoolExecutor()
|
|
||||||
self.override_locations = self.load_override_locations()
|
|
||||||
|
|
||||||
def load_override_locations(self):
|
|
||||||
if self.named_locs and self.named_locs.exists():
|
|
||||||
with open(self.named_locs, 'r') as file:
|
|
||||||
return yaml.safe_load(file)
|
|
||||||
return []
|
|
||||||
|
|
||||||
def haversine(self, lat1, lon1, lat2, lon2):
|
|
||||||
R = 6371
|
|
||||||
|
|
||||||
lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2])
|
|
||||||
dlat = lat2 - lat1
|
|
||||||
dlon = lon2 - lon1
|
|
||||||
|
|
||||||
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
|
|
||||||
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
|
|
||||||
|
|
||||||
return R * c
|
|
||||||
|
|
||||||
def find_override_location(self, lat: float, lon: float) -> Optional[str]:
|
|
||||||
closest_location = None
|
|
||||||
closest_distance = float('inf')
|
|
||||||
|
|
||||||
for location in self.override_locations:
|
|
||||||
loc_name = location.get("name")
|
|
||||||
loc_lat = location.get("latitude")
|
|
||||||
loc_lon = location.get("longitude")
|
|
||||||
loc_radius = location.get("radius")
|
|
||||||
|
|
||||||
distance = self.haversine(lat, lon, loc_lat, loc_lon)
|
|
||||||
|
|
||||||
if distance <= loc_radius:
|
|
||||||
if distance < closest_distance:
|
|
||||||
closest_distance = distance
|
|
||||||
closest_location = loc_name
|
|
||||||
|
|
||||||
return closest_location
|
|
||||||
|
|
||||||
async def location(self, lat: float, lon: float):
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
result = await loop.run_in_executor(self.executor, rg.search, [(lat, lon)])
|
|
||||||
override = self.find_override_location(lat, lon)
|
|
||||||
if override:
|
|
||||||
result[0]['override_name'] = override
|
|
||||||
return result
|
|
||||||
|
|
||||||
async def elevation(self, latitude: float, longitude: float, unit: str = "m") -> float:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
elevation = await loop.run_in_executor(self.executor, self.srtm_data.get_elevation, latitude, longitude)
|
|
||||||
|
|
||||||
if unit == "m":
|
|
||||||
return elevation
|
|
||||||
elif unit == "km":
|
|
||||||
return elevation / 1000
|
|
||||||
elif unit == "ft" or unit == "'":
|
|
||||||
return elevation * 3.280839895
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported unit: {unit}")
|
|
||||||
|
|
||||||
async def timezone(self, lat: float, lon: float) -> Optional[ZoneInfo]:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
timezone_str = await loop.run_in_executor(self.executor, lambda: self.tf.timezone_at(lat=lat, lng=lon))
|
|
||||||
return ZoneInfo(timezone_str) if timezone_str else None
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def lookup(self, lat: float, lon: float):
|
|
||||||
city, state, country = (await self.location(lat, lon))[0]['name'], (await self.location(lat, lon))[0]['admin1'], (await self.location(lat, lon))[0]['cc']
|
|
||||||
elevation = await self.elevation(lat, lon)
|
|
||||||
timezone = await self.timezone(lat, lon)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"city": city,
|
|
||||||
"state": state,
|
|
||||||
"country": country,
|
|
||||||
"elevation": elevation,
|
|
||||||
"timezone": timezone
|
|
||||||
}
|
|
||||||
|
|
||||||
async def code(self, locations: Union[Location, Tuple[float, float], List[Union[Location, Tuple[float, float]]]]) -> Union[Location, List[Location]]:
|
|
||||||
if isinstance(locations, (Location, tuple)):
|
|
||||||
locations = [locations]
|
|
||||||
|
|
||||||
processed_locations = []
|
|
||||||
for loc in locations:
|
|
||||||
if isinstance(loc, tuple):
|
|
||||||
processed_locations.append(Location(
|
|
||||||
latitude=loc[0],
|
|
||||||
longitude=loc[1],
|
|
||||||
datetime=datetime.now(timezone.utc)
|
|
||||||
))
|
|
||||||
elif isinstance(loc, Location):
|
|
||||||
if loc.datetime is None:
|
|
||||||
loc.datetime = datetime.now(timezone.utc)
|
|
||||||
processed_locations.append(loc)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported location type: {type(loc)}")
|
|
||||||
|
|
||||||
coordinates = [(location.latitude, location.longitude) for location in processed_locations]
|
|
||||||
|
|
||||||
geocode_results = await asyncio.gather(*[self.location(lat, lon) for lat, lon in coordinates])
|
|
||||||
elevations = await asyncio.gather(*[self.elevation(lat, lon) for lat, lon in coordinates])
|
|
||||||
timezone_results = await asyncio.gather(*[self.timezone(lat, lon) for lat, lon in coordinates])
|
|
||||||
|
|
||||||
def create_display_name(override_name, result):
|
|
||||||
parts = []
|
|
||||||
if override_name:
|
|
||||||
parts.append(override_name)
|
|
||||||
if result.get('name') and result['name'] != override_name:
|
|
||||||
parts.append(result['name'])
|
|
||||||
if result.get('admin1'):
|
|
||||||
parts.append(result['admin1'])
|
|
||||||
if result.get('cc'):
|
|
||||||
parts.append(result['cc'])
|
|
||||||
return ', '.join(filter(None, parts))
|
|
||||||
|
|
||||||
geocoded_locations = []
|
|
||||||
for location, result, elevation, tz_result in zip(processed_locations, geocode_results, elevations, timezone_results):
|
|
||||||
result = result[0] # Unpack the first result
|
|
||||||
override_name = result.get('override_name')
|
|
||||||
geocoded_location = Location(
|
|
||||||
latitude=location.latitude,
|
|
||||||
longitude=location.longitude,
|
|
||||||
elevation=elevation,
|
|
||||||
datetime=location.datetime,
|
|
||||||
zip=result.get("admin2"),
|
|
||||||
city=result.get("name"),
|
|
||||||
state=result.get("admin1"),
|
|
||||||
country=result.get("cc"),
|
|
||||||
context=location.context or {},
|
|
||||||
name=override_name or result.get("name"),
|
|
||||||
display_name=create_display_name(override_name, result),
|
|
||||||
country_code=result.get("cc"),
|
|
||||||
timezone=tz_result
|
|
||||||
)
|
|
||||||
|
|
||||||
# Merge original location data with geocoded data
|
|
||||||
for field in location.__fields__:
|
|
||||||
if getattr(location, field) is None:
|
|
||||||
setattr(location, field, getattr(geocoded_location, field))
|
|
||||||
|
|
||||||
geocoded_locations.append(location)
|
|
||||||
|
|
||||||
return geocoded_locations[0] if len(geocoded_locations) == 1 else geocoded_locations
|
|
||||||
|
|
||||||
async def geocode_osm(self, latitude: float, longitude: float, email: str):
|
|
||||||
url = f"https://nominatim.openstreetmap.org/reverse?format=json&lat={latitude}&lon={longitude}"
|
|
||||||
headers = {
|
|
||||||
'User-Agent': f'sijapi/1.0 ({email})',
|
|
||||||
}
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
async with session.get(url, headers=headers) as response:
|
|
||||||
response.raise_for_status()
|
|
||||||
data = await response.json()
|
|
||||||
|
|
||||||
address = data.get("address", {})
|
|
||||||
elevation = await self.elevation(latitude, longitude)
|
|
||||||
return Location(
|
|
||||||
latitude=latitude,
|
|
||||||
longitude=longitude,
|
|
||||||
elevation=elevation,
|
|
||||||
datetime=datetime.now(timezone.utc),
|
|
||||||
zip=address.get("postcode"),
|
|
||||||
street=address.get("road"),
|
|
||||||
city=address.get("city"),
|
|
||||||
state=address.get("state"),
|
|
||||||
country=address.get("country"),
|
|
||||||
context={},
|
|
||||||
class_=data.get("class"),
|
|
||||||
type=data.get("type"),
|
|
||||||
name=data.get("name"),
|
|
||||||
display_name=data.get("display_name"),
|
|
||||||
amenity=address.get("amenity"),
|
|
||||||
house_number=address.get("house_number"),
|
|
||||||
road=address.get("road"),
|
|
||||||
quarter=address.get("quarter"),
|
|
||||||
neighbourhood=address.get("neighbourhood"),
|
|
||||||
suburb=address.get("suburb"),
|
|
||||||
county=address.get("county"),
|
|
||||||
country_code=address.get("country_code"),
|
|
||||||
timezone=await self.timezone(latitude, longitude)
|
|
||||||
)
|
|
||||||
|
|
||||||
def round_coords(self, lat: float, lon: float, decimal_places: int = 2) -> Tuple[float, float]:
|
|
||||||
return (round(lat, decimal_places), round(lon, decimal_places))
|
|
||||||
|
|
||||||
def coords_equal(self, coord1: Tuple[float, float], coord2: Tuple[float, float], tolerance: float = 1e-5) -> bool:
|
|
||||||
return math.isclose(coord1[0], coord2[0], abs_tol=tolerance) and math.isclose(coord1[1], coord2[1], abs_tol=tolerance)
|
|
||||||
|
|
||||||
async def refresh_timezone(self, location: Union[Location, Tuple[float, float]], force: bool = False) -> Optional[ZoneInfo]:
|
|
||||||
if isinstance(location, Location):
|
|
||||||
lat, lon = location.latitude, location.longitude
|
|
||||||
else:
|
|
||||||
lat, lon = location
|
|
||||||
|
|
||||||
rounded_location = self.round_coords(lat, lon)
|
|
||||||
current_time = datetime.now()
|
|
||||||
|
|
||||||
if (force or
|
|
||||||
not self.last_update or
|
|
||||||
current_time - self.last_update > timedelta(hours=1) or
|
|
||||||
not self.coords_equal(rounded_location, self.round_coords(*self.last_location) if self.last_location else (None, None))):
|
|
||||||
|
|
||||||
|
|
||||||
new_timezone = await self.timezone(lat, lon)
|
|
||||||
self.last_timezone = new_timezone
|
|
||||||
self.last_update = current_time
|
|
||||||
self.last_location = (lat, lon) # Store the original, non-rounded coordinates
|
|
||||||
await self.tz_save()
|
|
||||||
|
|
||||||
return self.last_timezone
|
|
||||||
|
|
||||||
|
|
||||||
async def tz_save(self):
|
|
||||||
cache_data = {
|
|
||||||
'last_timezone': str(self.last_timezone) if self.last_timezone else None,
|
|
||||||
'last_update': self.last_update.isoformat() if self.last_update else None,
|
|
||||||
'last_location': self.last_location
|
|
||||||
}
|
|
||||||
async with aiofiles.open(self.cache_file, 'w') as f:
|
|
||||||
await f.write(json.dumps(cache_data))
|
|
||||||
|
|
||||||
async def tz_cached(self):
|
|
||||||
try:
|
|
||||||
async with aiofiles.open(self.cache_file, 'r') as f:
|
|
||||||
cache_data = json.loads(await f.read())
|
|
||||||
self.last_timezone = ZoneInfo(cache_data['last_timezone']) if cache_data.get('last_timezone') else None
|
|
||||||
self.last_update = datetime.fromisoformat(cache_data['last_update']) if cache_data.get('last_update') else None
|
|
||||||
self.last_location = tuple(cache_data['last_location']) if cache_data.get('last_location') else None
|
|
||||||
|
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
|
||||||
# If file doesn't exist or is invalid, we'll start fresh
|
|
||||||
self.last_timezone = None
|
|
||||||
self.last_update = None
|
|
||||||
self.last_location = None
|
|
||||||
|
|
||||||
async def tz_current(self, location: Union[Location, Tuple[float, float]]) -> Optional[ZoneInfo]:
|
|
||||||
await self.tz_cached()
|
|
||||||
return await self.refresh_timezone(location)
|
|
||||||
|
|
||||||
async def tz_last(self) -> Optional[ZoneInfo]:
|
|
||||||
await self.tz_cached()
|
|
||||||
return self.last_timezone
|
|
||||||
|
|
||||||
async def tz_at(self, lat: float, lon: float) -> Optional[ZoneInfo]:
|
|
||||||
"""
|
|
||||||
Get the timezone at a specific latitude and longitude without affecting the cache.
|
|
||||||
|
|
||||||
:param lat: Latitude
|
|
||||||
:param lon: Longitude
|
|
||||||
:return: ZoneInfo object representing the timezone
|
|
||||||
"""
|
|
||||||
return await self.timezone(lat, lon)
|
|
||||||
|
|
||||||
def __del__(self):
|
|
||||||
self.executor.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
class IMAPConfig(BaseModel):
|
|
||||||
username: str
|
|
||||||
password: str
|
|
||||||
host: str
|
|
||||||
port: int
|
|
||||||
encryption: str = None
|
|
||||||
|
|
||||||
class SMTPConfig(BaseModel):
|
|
||||||
username: str
|
|
||||||
password: str
|
|
||||||
host: str
|
|
||||||
port: int
|
|
||||||
encryption: str = None
|
|
||||||
|
|
||||||
class AutoResponder(BaseModel):
|
|
||||||
name: str
|
|
||||||
style: str
|
|
||||||
context: str
|
|
||||||
ollama_model: str = "llama3"
|
|
||||||
whitelist: List[str]
|
|
||||||
blacklist: List[str]
|
|
||||||
image_prompt: Optional[str] = None
|
|
||||||
image_scene: Optional[str] = None
|
|
||||||
smtp: SMTPConfig
|
|
||||||
|
|
||||||
class EmailAccount(BaseModel):
|
|
||||||
name: str
|
|
||||||
refresh: int
|
|
||||||
fullname: Optional[str]
|
|
||||||
bio: Optional[str]
|
|
||||||
summarize: bool = False
|
|
||||||
podcast: bool = False
|
|
||||||
imap: IMAPConfig
|
|
||||||
autoresponders: Optional[List[AutoResponder]]
|
|
||||||
|
|
||||||
class EmailContact(BaseModel):
|
|
||||||
email: str
|
|
||||||
name: Optional[str] = None
|
|
||||||
|
|
||||||
class IncomingEmail(BaseModel):
|
|
||||||
sender: str
|
|
||||||
datetime_received: datetime
|
|
||||||
recipients: List[EmailContact]
|
|
||||||
subject: str
|
|
||||||
body: str
|
|
||||||
attachments: List[dict] = []
|
|
||||||
|
|
||||||
class WidgetUpdate(BaseModel):
|
|
||||||
text: Optional[str] = None
|
|
||||||
progress: Optional[str] = None
|
|
||||||
icon: Optional[str] = None
|
|
||||||
color: Optional[str] = None
|
|
||||||
url: Optional[str] = None
|
|
||||||
shortcut: Optional[str] = None
|
|
||||||
graph: Optional[str] = None
|
|
|
@ -1,486 +0,0 @@
|
||||||
#──────────────────────────────────────────────────────────────────────────────────
|
|
||||||
# C O N F I G U R A T I O N F I L E
|
|
||||||
#──────────────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# Hi friend! You've found my hidden .env example file. Do you like Zalgo
|
|
||||||
# text and old-school ASCII art? I bet you do. So listen, this'll be your method
|
|
||||||
# for configuring sijapi, and nothing works until you at least:
|
|
||||||
#
|
|
||||||
# (1) fill in the ESSENTIALS category, and
|
|
||||||
#
|
|
||||||
# (2) rename this file `.env'
|
|
||||||
#
|
|
||||||
# ... and even then, certain features will not work until you set other
|
|
||||||
# relevant variables below.
|
|
||||||
#
|
|
||||||
# So get yourself a beverage, put on some sick beats, and settle in for a vibe-y
|
|
||||||
# configuration sesh. Remember to read my detailed notes if you ever feel lost,
|
|
||||||
# and most important, remember:
|
|
||||||
#
|
|
||||||
# † you are NOT alone,
|
|
||||||
# † I love you SO much,
|
|
||||||
# † and you are S̸̢̟̑̒̊ͅō̸͔͕͎̟͜ worthy.
|
|
||||||
#
|
|
||||||
# y o u r b f & b f 4 e ,
|
|
||||||
# .x+=:. . .
|
|
||||||
# z` ^% @88> .. †††>
|
|
||||||
# . <k %8P 888> .d`` %†P
|
|
||||||
# .@8Ned8" . "8P u @8Ne. .u .
|
|
||||||
# .@^%8888" .@88u . us888u. %8888:u@88N .@88u
|
|
||||||
# x88: `)8b. ''888E` u888u. .@88 "8888" `888I 888. ''888E`
|
|
||||||
# ~ 8888N=*8888 888E `'888E 9888 9888 888I 888I 888E
|
|
||||||
# %8" R88 888E 888E 9888 9888 888I 888I 888E
|
|
||||||
# @8Wou 9% 888E 888E 9888 9888 uW888L 888' 888E
|
|
||||||
# .888888P` 888& 888E 9888 9888 '*88888Nu88P 888&
|
|
||||||
# ` ^"F R888" 888E "888*""888" ~ '88888F` R888"
|
|
||||||
# "" 888E ^Y" ^Y' 888 ^ ""
|
|
||||||
# 888E *8E
|
|
||||||
# 888P '8>
|
|
||||||
# .J88" " "
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# B U T I H E A R Y O U :
|
|
||||||
# L E T ' S T A K E I T S L O W A N D
|
|
||||||
# ───────────── S̢͉̺ T̪͔͓ A͇̞ R̘͕͙ T̢̡͉ W͚̻ I͉͇͜ T̟͖̺ H̡͚͙ T̺̞̠ H̢̢̙ E̢̪͓ ──────────────
|
|
||||||
#
|
|
||||||
# ███████╗███████╗███████╗███████╗███╗ ██╗████████╗██╗ █████╗ ██╗ ███████╗
|
|
||||||
# ██╔════╝██╔════╝██╔════╝██╔════╝████╗ ██║╚══██╔══╝██║██╔══██╗██║ ██╔════╝
|
|
||||||
# █████╗ ███████╗███████╗█████╗ ██╔██╗ ██║ ██║ ██║███████║██║ ███████╗
|
|
||||||
# ██╔══╝ ╚════██║╚════██║██╔══╝ ██║╚██╗██║ ██║ ██║██╔══██║██║ ╚════██║
|
|
||||||
# ███████╗███████║███████║███████╗██║ ╚████║ ██║ ██║██║ ██║███████╗███████║
|
|
||||||
# ╚══════╝╚══════╝╚══════╝╚══════╝╚═╝ ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝
|
|
||||||
# ─────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
#─── first, bind an ip address and port : ──────────────────────────────────────────
|
|
||||||
# <--- replace with base URL of reverse proxy, etc
|
|
||||||
#─── notes: ──────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# HOST_NET† and HOST_PORT comprise HOST and determine the ip and port the server binds to.
|
|
||||||
# Sys.URL is used to assemble URLs, e.g. in the MS authentication flow and for serving images generated on the img router.
|
|
||||||
# Sys.URL should match the base URL used to access sijapi sans endpoint, e.g. http://localhost:4444 or https://api.sij.ai
|
|
||||||
#
|
|
||||||
# † Take care here! Please ensure you understand the implications of setting HOST_NET to anything besides 127.0.0.1, and configure your firewall and router appropriately if you do. Setting HOST_NET to 0.0.0.0, for instance, opens sijapi to any device the server running it is accessible to — including potentially frightening internet randos (depending how your firewall, router, and NAT are configured).
|
|
||||||
#
|
|
||||||
# Here are a few options to consider to more securely enable access from
|
|
||||||
# other devices:
|
|
||||||
#
|
|
||||||
# (1) if all access can occur over Tailscale, either:
|
|
||||||
# (a) leave HOST_NET set to 127.0.0.1, run `tailscale cert $(tailscale
|
|
||||||
# whois $(tailscale ip | head -n 1) | awk '/Name:/ {print $2}')
|
|
||||||
# if you haven't already issued yourself a TLS certificate on
|
|
||||||
# Tailscale, and then run `tailscale serve --bg --https=4443
|
|
||||||
# 4444` to expose sijapi to your other tailscale-enabled devices
|
|
||||||
# at `https://{device.magicdns-domain.net:4443`}; or
|
|
||||||
# (b) set HOST_NET to your server's Tailscale IP (this should work
|
|
||||||
# but for me doesn't reliably)
|
|
||||||
#
|
|
||||||
# (2) if WAN access truly is required, leave HOST_NET set to 127.0.0.1 and
|
|
||||||
# configure either:
|
|
||||||
# (a) a Cloudflare tunnel, or
|
|
||||||
# (b) a reverse proxy with HTTPS (Caddy is excellent for this).
|
|
||||||
#
|
|
||||||
# And please be sure to set a strong API key either way but especially for (2).
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#──── configure API key authorization and select exemptions──────────────────begin
|
|
||||||
GLOBAL_API_KEY=¿SECRET? # <--- specify a key to unlock the API
|
|
||||||
PUBLIC_SERVICES=/id,/ip,/health,/img/,/cl/dockets,/cl/search,/cd/alert
|
|
||||||
TRUSTED_SUBNETS=127.0.0.1/32,10.13.37.0/24,100.64.64.0/24
|
|
||||||
#─── notes: ───────────────────────────────────────────────────────────────────end
|
|
||||||
#
|
|
||||||
# GLOBAL_API_KEY determines the API key that will be required to access all endpoints, except access to PUBLIC_SERVICES or from TRUSTED_SUBNETS. Authentication is made via an `Authorization: Bearer {GLOBAL_API_KEY}` header.
|
|
||||||
# TRUSTED_SUBNETS might commonly include 127.0.0.1/32 (localhost), 100.x.x.0/24 (Tailscale tailnet), and/or 192.168.x.0/24 or 10.x.x.0/24 (local network).
|
|
||||||
# When configuring a reverse proxy or Cloudflare tunnel, please verify traffic through it does not appear to sijapi (i.e. in ./logs) as though it were coming from any of the subnets specified here. For sij, using Caddy, it does not, but your setup may differ.
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#─── router selection: ────────────────────────────────────────────────────────────
|
|
||||||
ROUTERS=asr,cal,cf,email,llm,loc,note,rag,img,serve,sys,time,tts,weather
|
|
||||||
UNLOADED=ig
|
|
||||||
#─── notes: ──────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# ROUTERS determines which routers are loaded.†
|
|
||||||
#
|
|
||||||
# UNLOADED is not used directly -- it's just there to help keep track which routers are disabled.
|
|
||||||
#
|
|
||||||
# † ┓ ┏ orth bearing in mind: some routers inherently rely on other routers,
|
|
||||||
# ┃┃┃ 3rd party APIs, or other apps being installed locally. If a router is
|
|
||||||
# ┗┻┛ set to load (i.e. is included in ROUTERS) and it depends on another router,
|
|
||||||
# that other router will also load too irrespective of whether it's listed.
|
|
||||||
#
|
|
||||||
# B U T L E T ' S G E T D O W N T O
|
|
||||||
# B R A S S T A C K S , S H A L L W E ?
|
|
||||||
#
|
|
||||||
# asr: requires faster_whisper — $ pip install faster_whisper — and
|
|
||||||
# downloading the model file specified in ASR_DEFAULT_MODEL.
|
|
||||||
#
|
|
||||||
# cal: requires (1) a Microsoft 365 account with a properly configured
|
|
||||||
# Azure Active Directory app, and/or (2) Calendars on macOS.
|
|
||||||
#
|
|
||||||
# cf: interfaces with the Cloudflare API and Caddy to register new
|
|
||||||
# [sub-]domains on Cloudflare and deploy them with Caddy as
|
|
||||||
# reverse proxy.
|
|
||||||
#
|
|
||||||
# llm: requires ollama — $ pip install ollama — and downloading the
|
|
||||||
# models set in LLM_DEFAULT_MODEL and LLM_VISION_MODEL.
|
|
||||||
#
|
|
||||||
# email: email auto-responders and summarizers to be found here. Designed
|
|
||||||
# for use with IMAP.
|
|
||||||
#
|
|
||||||
# hooks: designed for two specific use cases: monitoring court dockets
|
|
||||||
# through CourtListener.org, and monitoring arbitrary web pages for
|
|
||||||
# changes in tandem with a self-hosted changedetection.io instance.
|
|
||||||
# Both require accounts; other functionality would require
|
|
||||||
# additional / modified code.
|
|
||||||
#
|
|
||||||
# ig: requires an Instagram account, with credentials and other settings
|
|
||||||
# configured separately in the ig_config.json file; relies heavily
|
|
||||||
# on the llm and img routers which have their own dependencies.
|
|
||||||
#
|
|
||||||
# loc: some endpoints work as is, but the core location tracking
|
|
||||||
# functionality requires Postgresql + PostGIS extension and are
|
|
||||||
# designed specifically to pair with a mobile device where
|
|
||||||
# Pythonista is installed and configured to run the
|
|
||||||
# `gps_tracker.py` and `gps_upload.py` scripts periodically or per
|
|
||||||
# repeating conditionwy (e.g. via automation under Apple Shortcuts).
|
|
||||||
#
|
|
||||||
# note: designed for use with Obsidian plus the Daily Notes and Tasks
|
|
||||||
# core extensions; and the Admonitions, Banners, Icons (with the
|
|
||||||
# Lucide pack), and Make.md community extensions. Moreover `notes`
|
|
||||||
# relies heavily on the cal, llm, loc, img, summarize, time, loc,
|
|
||||||
# and weather routers and accordingly on the external
|
|
||||||
# dependencies of each.
|
|
||||||
#
|
|
||||||
# img: requires ComfyUI plus any modules and StableDiffusion models
|
|
||||||
# set in sd_config and individual workflow .json files.
|
|
||||||
#
|
|
||||||
# summarize: relies on the llm router and thus requires ollama.
|
|
||||||
#
|
|
||||||
# time: requires the subscription-based macOS app 'Timing' (one of many
|
|
||||||
# apps that together make SetApp an incredible value for macOS users!)
|
|
||||||
#
|
|
||||||
# tts: designed for use with coqui — $ pip install coqui — and/or the
|
|
||||||
# ElevenLabs Sys.
|
|
||||||
#
|
|
||||||
# weather: requires a VisualCrossing API key and is designed for (but doesn't
|
|
||||||
# itself strictly require) Postgresql with the PostGIS extension;
|
|
||||||
# (... but it presently relies on the loc router, which does).
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# ... Whew! that was a lot, right? I'm so glad we're in this together...
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#───────── W H A T A R E Y O U R D I G I T S , H O N E Y B U N ? ────────
|
|
||||||
# LOCALIZATION
|
|
||||||
#─── what are your digits, honey-bun?: ──────────────────────────────────────────────
|
|
||||||
TZ=America/Los_Angeles
|
|
||||||
HOME_ZIP=97401
|
|
||||||
#─── notes: ─────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#─────────────────────── Y ₒ ᵤ ' ᵣ ₑ G ₒ ₙ ₙ ₐ ₗ ₒ ᵥ ₑ ────────────────────────
|
|
||||||
#
|
|
||||||
# ░ ░░ ░░ ░ ░░░░░░░░ ░░░ ░░░ ░░ ░░░░░░░ ░
|
|
||||||
# ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒ ▒▒▒▒ ▒ ▒▒▒▒ ▒ ▒▒▒▒▒▒▒ ▒▒▒▒▒▒▒
|
|
||||||
# ▓▓▓▓ ▓▓▓▓ ▓▓▓▓ ▓▓▓▓ ▓▓▓▓ ▓▓▓▓▓▓▓▓ ▓▓ ▓▓▓▓▓▓▓ ▓▓▓▓ ▓ ▓▓▓▓▓▓▓ ▓▓▓
|
|
||||||
# ████ ████ ████ ████ █████████████ █ ████ █ █ ███████ ███████
|
|
||||||
# ████ ████ ████ █ █ ██ ███ ██ ████ █ █ █
|
|
||||||
#
|
|
||||||
# A N D I ' M N O T. E V E N. J E A L O U S.
|
|
||||||
# Y O U D E S E R V E I T A L L , B A B Y C A K E S.
|
|
||||||
#
|
|
||||||
#─── use tailscale for secure remote access: ───────────────────────────────────────
|
|
||||||
TS_IP=100.13.37.5 # <--- enter your own TS IP address
|
|
||||||
TS_SUBNET=100.13.37.0/24 # <--- enter your own TS subnet (IPv4/CIDR)
|
|
||||||
TS_ID=¿SECRET? # <--- enter your own TS device name
|
|
||||||
TS_TAILNET=screaming_sailfin # <--- enter your own TS tailnet / MagicDNS name
|
|
||||||
TAILSCALE_API_KEY=¿SECRET? # <--- enter your own TS API key
|
|
||||||
#─── notes: ────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# TS_IP should match the Tailscale IP of the device. But this is deprecated, and if the functionality becomes relevant again, it should be come back in the form of a dynamic check (`tailscale status` in a shell subprocess) in __init__.py or even the /id endpoint.
|
|
||||||
# TS_SUBNET should match the IP/CIDR-format tailnet
|
|
||||||
# TS_ID currently has two roles: it's used to assemble the complete MagicDNS of the server, and it determines what the /id endpoint on the health router returns. This is relevant where multiple servers run the script behind a load balancer (e.g. Caddy), as a means to check which server responds. Bear in mind that /id is NOT API key-protected by default here.
|
|
||||||
# TS_TAILNET should match the tailnet's MagicDNS domain (omitting the `.net`, for reasons)
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#──────────── ᵁ & ᴹ ᴱ , W E C A N G E T T H R O U G H ────────────────────
|
|
||||||
#
|
|
||||||
# ██▓███ ▒█████ ██████ ▄▄▄█████▓ ▄████ ██▀███ ▓█████ ██████
|
|
||||||
# ▓██░ ██▒██▒ ██▒▒██ ▒ ▓ ██▒ ▓▒ ██▒ ▀█▒▓██ ▒ ██▒▓█ ▀ ▒██ ▒
|
|
||||||
# ▓██░ ██▓▒██░ ██▒░ ▓██▄ ▒ ▓██░ ▒░▒██░▄▄▄░▓██ ░▄█ ▒▒███ ░ ▓██▄
|
|
||||||
# ▒██▄█▓▒ ▒██ ██░ ▒ ██▒░ ▓██▓ ░ ░▓█ ██▓▒██▀▀█▄ ▒▓█ ▄ ▒ ██▒
|
|
||||||
# ▒██▒ ░ ░ ████▓▒░▒██████▒▒ ▒██▒ ░ ░▒▓███▀▒░██▓ ▒██▒░▒████▒▒██████▒▒
|
|
||||||
# ▒██▒ ░ ░ ▒░▒░▒░ ▒ ▒▓▒ ▒ ░ ▒ ░░ ░▒ ▒ ░ ▒▓ ░▒▓░░░ ▒░ ░▒ ▒▓▒ ▒ ░
|
|
||||||
# ▒▓▒░ ░ ▒ ▒░ ░ ░▒ ░ ░ ░ ░ ░ ░▒ ░ ▒░ ░ ░ ░░ ░▒ ░ ░
|
|
||||||
# ░▒ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░ ░ ░
|
|
||||||
# ░░ ░ ░T̷ O̷ G̷ E̷ T̷ H̷ ░ R̷. ░ ░ ░ ░ ░
|
|
||||||
# J U S T ░
|
|
||||||
#─── frag, or weat,and loc modules:────── H O L D M Y H A N D.
|
|
||||||
DB_NAME=db
|
|
||||||
#
|
|
||||||
DB_HOST=127.0.0.1
|
|
||||||
DB_PORT=5432
|
|
||||||
# R E A L T I G H T.
|
|
||||||
DB_USER=postgres
|
|
||||||
DB_PASSWORD=¿SECRET? # <--- enter your own Postgres password'
|
|
||||||
# Y E A H . . .
|
|
||||||
DB_SSH=100.64.64.15
|
|
||||||
# . . . 𝙹 𝚄 𝚂 𝚃 𝙻 𝙸 𝙺 𝙴 𝚃 𝙷 𝙰 𝚃.
|
|
||||||
DB_SSH_USER=sij
|
|
||||||
DB_SSH_PASS=¿SECRET? # <--- enter SSH password for pg server (if not localhost)
|
|
||||||
#─── notes: ────────────────────────────────────────────────── S E E ? 𝕰 𝖅 - 𝕻 𝖅
|
|
||||||
#
|
|
||||||
# DB, DB_HOST, DB_PORT, DB_USER, and DB_PASS should specify those respective
|
|
||||||
# credentials for your Postgres database. DB_SSH and associated _USER and _PASS
|
|
||||||
# variables allow database access over an SSH tunnel.
|
|
||||||
#
|
|
||||||
# In the current implementation, we rely on Postgres to hold:
|
|
||||||
# i. user-logged location data (loc module), and
|
|
||||||
# ii. results from past weather forecast checks (weather module).
|
|
||||||
#
|
|
||||||
# A future version will hopefully make use of PostGIS's geocoding capabilities,
|
|
||||||
# and add a vector database for the LLM module. Until then it's up to you if the
|
|
||||||
# loc and weather modules are worth the hassle of maintaining Postgres.
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#─────────────────────────────── 𝐼 𝐵 𝐸 𝑇 𝑌 𝑂 𝑈 ─────────────────────────────────
|
|
||||||
# 𝑅 𝐸 𝐶 𝐸 𝐼 𝑉 𝐸 𝐴 𝐿 𝑂 𝑇 𝑂 𝐹 𝐿 𝑂 𝑉 𝐸 𝐿 𝐸 𝑇 𝑇 𝐸 𝑅 𝑆 𝑂 𝑉 𝐸 𝑅
|
|
||||||
#
|
|
||||||
# .----------------. .----------------. .----------------. .----------------.
|
|
||||||
# | .--------------. | .--------------. | .--------------. | .--------------. |
|
|
||||||
# | | _____ | | | ____ ____ | | | __ | | | ______ | |
|
|
||||||
# | | |_ _| | | ||_ \ / _|| | | / \ | | | |_ __ \ | |
|
|
||||||
# | | | | | | | | \/ | | | | / /\ \ | | | | |__) | | |
|
|
||||||
# | | | | | | | | |\ /| | | | | / ____ \ | | | | ___/ | |
|
|
||||||
# | | _| |_ | | | _| |_\/_| |_ | | | _/ / \ \_ | | | _| |_ | |
|
|
||||||
# | | |_____| | | ||_____||_____|| | ||____| |____|| | | |_____| | |
|
|
||||||
# | | | | | | | | | | | | |
|
|
||||||
# | '--------------' | '--------------' | '--------------' | '--------------' |
|
|
||||||
# '----------------' '----------------' '----------------' '----------------'
|
|
||||||
#
|
|
||||||
# 𝙴 𝙼 𝙰 𝙸 𝙻
|
|
||||||
#
|
|
||||||
#─── imap & smtp: ────────────────────────────────────────────────────────────────────────
|
|
||||||
IMAP_HOST=127.0.0.1
|
|
||||||
IMAP_EMAIL=¿SECRET? # <--- enter yours
|
|
||||||
IMAP_PASSWORD=¿SECRET? # <--- enter yours
|
|
||||||
IMAP_PORT=1142
|
|
||||||
IMAP_ENCRYPTION=STARTTLS
|
|
||||||
SMTP_PORT=1024
|
|
||||||
SMTP_ENCRYPTION=SSL
|
|
||||||
AUTORESPONSE_WHITELIST=¿SECRET? # <--- enter complete/fragmented emails, or keywords
|
|
||||||
AUTORESPONSE_BLACKLIST=¿SECRET? # <--- same deal-io
|
|
||||||
AUTORESPONSE_CONTEXT=¿SECRET? # <--- inform the LLM why it's auto-responding for you'
|
|
||||||
USER_FULLNAME=¿SECRET? # <--- more context for the LLM
|
|
||||||
USER_BIO=¿SECRET? # <--- yet more context for the nosy LLM
|
|
||||||
#─── notes: ───────────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# This is primarily for summarizing incoming emails. Any IMAP account should work, but
|
|
||||||
# I focused testing on a somewhat complex setup involving Protonmail Bridge.
|
|
||||||
#
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#─── ms365 (calendars): ──────────────────────────────────────────────────────────────
|
|
||||||
ICAL_TOGGLE=True
|
|
||||||
ICALENDARS='E68FE085-2ECA-4097-AF0A-8D38C404D8DA,AB5A0473-16DD-4916-BD6D-F12AC2455285'
|
|
||||||
MS365_TOGGLE=False
|
|
||||||
MS365_CLIENT_ID=¿SECRET? # <--- enter your client ID (found in Azure pane)
|
|
||||||
MS365_TENANT_ID=¿SECRET? # <--- enter your tenant ID (found in Azure pane)
|
|
||||||
MS365_SECRET=¿SECRET? # <--- enter your app secret (found in Azure pane)
|
|
||||||
MS365_SCOPE='basic,calendar_all,Calendars.Read,Calendars.ReadWrite,offline_access'
|
|
||||||
MS365_TOKEN_FILE=oauth_token.txt
|
|
||||||
MS365_LOGIN_URL='https://login.microsoftonline.com'
|
|
||||||
MS365_REDIRECT_PATH=¿SECRET? # <--- e.g. http://localhost:4444/MS365/oauth_redirect
|
|
||||||
#─── notes: ───────────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# # MS365_CLIENT_ID, _TENANT_ID, _SECRET, AND _SCOPES must be obtained from Microsoft
|
|
||||||
# via the Azure portal, by creating a new app registration and an accompanying secret.
|
|
||||||
# MS365_THUMBPRINT is vestige of an earlier failed attempt to get this working, and
|
|
||||||
# for now is deprecated. I recommend seeking out a well-reviewed tutorial for
|
|
||||||
# creating an app on Azure with a client_id and secret and necessary scopes for
|
|
||||||
# individual calendar access, because I had one heck of a time trying various approaches.
|
|
||||||
# Do better, Microsoft.
|
|
||||||
#
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#──────────────────── L E T ' S G E T S I L L Y , ─────────────────────────────
|
|
||||||
# T H E N G O B͎̝̪̼͉͜ O͖͕͇͚͉̼ N̢̦͖̺͔͎ K̠͓̠͖͜ E̝̼̫̙͔̞ R̡͇͖̙͉͎ S̡͉̠͎͙̪
|
|
||||||
# W I T H O U R O W N
|
|
||||||
#
|
|
||||||
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓██████▒▓██████▒░
|
|
||||||
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
|
||||||
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
|
||||||
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
|
||||||
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
|
||||||
# ░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
|
||||||
# ░▒▓████████▓▒ ░▒▓████████▓▒ ░▒▓█▓▒░░▒▓█▓▒░░▒▓█▓▒░
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# ( F O R R E A L T H O U G H , T H E S E A R E
|
|
||||||
#
|
|
||||||
#─── via comfyui (stable diffusion): ─────── S̝͖̦͓̪̻ O̡͖̘̫͇̟ H̢͔͔̫͉͜ O̢̢͉̞͍̘ T̟͍͍̪̦̞ R I G H T N O W
|
|
||||||
LLM_URL=http://localhost:11434
|
|
||||||
SYSTEM_MSG=You are a helpful AI assistant.
|
|
||||||
OPENAI_API_KEY=¿SECRET? # <--- not presently implemented for anything
|
|
||||||
SUMMARY_MODEL='command-r:latest'
|
|
||||||
SUMMARY_CHUNK_SIZE=16384
|
|
||||||
SUMMARY_CHUNK_OVERLAP=100
|
|
||||||
SUMMARY_TPW=1.3
|
|
||||||
SUMMARY_LENGTH_RATIO=4
|
|
||||||
SUMMARY_MIN_LENGTH=64
|
|
||||||
SUMMARY_TOKEN_LIMIT=16384
|
|
||||||
SUMMARY_INSTRUCT='You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.'
|
|
||||||
SUMMARY_INSTRUCT_TTS='You are an AI assistant that summarizes emails -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary. Your response will undergo Text-To-Speech conversion and added to Sanjays private podcast. Providing adequate context (Sanjay did not send this question to you, he will only hear your response) but aiming for conciseness and precision, and bearing in mind the Text-To-Speech conversion (avoiding acronyms and formalities), summarize the following.'
|
|
||||||
WHISPER_CPP_DIR='whisper.cpp'
|
|
||||||
WHISPER_CPP_MODELS=tiny,base,base-en,small,medium,medium-en,large-v3
|
|
||||||
WEBCLIPPER_TTS=elevenlabs
|
|
||||||
EMAIL_SUMMARY_TTS=local
|
|
||||||
YEAR_FMT="%Y"
|
|
||||||
MONTH_FMT="%Y-%m %B"
|
|
||||||
DAY_FMT="%Y-%m-%d %A"
|
|
||||||
DAY_SHORT_FMT="%Y-%m-%d"
|
|
||||||
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# The exact values here will depend on what software you are using to inference an LLM,
|
|
||||||
# and of course what models and capabilities are available through it. The script was
|
|
||||||
# designed for use with `ollama`, but most of the functionality should be equal with
|
|
||||||
# LM Studio, LocalAI, ect...
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Note it's possible to specify a separate model for general purposes and for
|
|
||||||
# summarization tasks. The other SUMMARY_ variables call for some explanation,
|
|
||||||
# in particular six that are most relevant when summarizing very long documents:
|
|
||||||
#
|
|
||||||
# SUMMARY_CHUNK_SIZE: determines the maximum length, in tokens, the pieces that are
|
|
||||||
# split and sent individually to the model.
|
|
||||||
#
|
|
||||||
# SUMMARY_CHUNK_OVERLAP: determines how much of each chunk is overlapped with the prior
|
|
||||||
# and next chunks. Set too high causes repetition, set too low
|
|
||||||
# causes misunderstood confusion and poor summary results.
|
|
||||||
# The summarization algorithm is flawed but I've gotten the best
|
|
||||||
# results with this set around 100–200.
|
|
||||||
#
|
|
||||||
# SUMMARY_TPW: used in estimating the token count of a prompt for purposes of
|
|
||||||
# complying with the maximum tokens a model can handle at once.
|
|
||||||
# Best you can do is estimate. I tend to use long words a fair
|
|
||||||
# excessively and found my average was 1.3 tokens per word. YMMV.
|
|
||||||
#
|
|
||||||
# SUMMARY_LENGTH_RATIO: this is the primary control over the length of generated
|
|
||||||
# summaries, expressed as the ratio of original text length to
|
|
||||||
# summary length. The default, 4, means the summaries will be
|
|
||||||
# around 1/4 the length of the original text you provide it.
|
|
||||||
#
|
|
||||||
# SUMMARY_MIN_LENGTH: the default SUMMARY_LENGTH_RATIO of 4 isn't ideal for very
|
|
||||||
# short texts, but setting it any lower sacrifices conciseness
|
|
||||||
# in summaries of longer texts. In short one size doesn't fit
|
|
||||||
# all. The compromise I landed on was to set a "maximum minimum"
|
|
||||||
# summary length: under no circumstances will the script impose
|
|
||||||
# a smaller maximum length than this value.
|
|
||||||
#
|
|
||||||
# SUMMARY_INSTRUCT: sets the prompt used when summarizing text.
|
|
||||||
#
|
|
||||||
# SUMMARY_INSTRUCT_TTS: sets a separate prompt for use when summarizing text where
|
|
||||||
# tts output was requested; tends to yield "cleaner" audio
|
|
||||||
# with less numbers (page numbers, citations) and other
|
|
||||||
# information extraneous to spoken contexts.
|
|
||||||
#
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#────,-_/────────── W E C A N E X P E R I M E N T W I T H ──────────.───────────
|
|
||||||
# ' | ,~-,-. ,-. ,-. ,--. | --' ,--. ,-. ,--. ,-. ,-. |-- . ,-. ,-.
|
|
||||||
# .^ | | | | ,--| | | | --' | -,- | --' | | | --' | ,--| | | | | | |
|
|
||||||
# `--' ' ' ' `-^ `-| `--' `---| `--' ' ' `--' ' `--^ `' ` `-' ' '
|
|
||||||
# , | ,-. | ~ 𝙸 𝙽 𝚃 𝙷 𝙴 𝙽 𝚄 𝙳 𝙴 . ~
|
|
||||||
# `~~' `-+'
|
|
||||||
# O R F U L L Y C L O T H E D ── U P T O Y O U
|
|
||||||
#
|
|
||||||
#─── via comfyui (stable diffusion): ───── ( B U T L E T M E K N O W , Y E A H ? )
|
|
||||||
COMFYUI_URL=http://localhost:8188
|
|
||||||
COMFYUI_DIR=/Users/sij/workshop/ComfyUI
|
|
||||||
COMFYUI_LAUNCH_CMD="mamba activate comfyui && python main.py"
|
|
||||||
OBSIDIAN_BANNER_SCENE=wallpaper
|
|
||||||
PHOTOPRISM_USER=NOT_IMPLEMENTED
|
|
||||||
PHOTOPRISM_PASS=NOT_IMPLEMENTED
|
|
||||||
ANONYMIZED_TELEMETRY=False
|
|
||||||
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
# COMFY_URL, as you may expect, should point to the URL you use to access ComfyUI. If you
|
|
||||||
# don't know, watch for it in the server logs once ComfyUI is fully launched.
|
|
||||||
#
|
|
||||||
# COMFYUI_DIR, with similar self-evidence, should point to the base directory of your
|
|
||||||
# ComfyUI installation (i.e. the folder that contains `models`, `inputs`, and `outputs`).
|
|
||||||
# It can handle either a
|
|
||||||
#
|
|
||||||
# PhotoPrism integration is not yet implemented, so don't bother with that just yet.
|
|
||||||
# ──────────
|
|
||||||
#
|
|
||||||
# D O N ' T M I S S O N E ───────────────────────────────────────
|
|
||||||
#\ F I N A L S M A T T E R I N G O F Ⓜ Ⓘ Ⓢ Ⓒ Ⓔ Ⓛ Ⓛ Ⓐ Ⓝ Ⓨ \
|
|
||||||
# \ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
|
||||||
# \ _ _ _/\\\\_ _ _ _ _ _ /\\\\ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
|
||||||
# \ _ _ \/\\\\\\_ _ _ _ /\\\\\\ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
|
||||||
# \ _ _ \/\\\//\\\_ _ /\\\//\\\ _ _/\\\ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
|
||||||
# \ _ _ \/\\\\///\\\/\\\/ \/\\\ _ _///_ _ _/\\\\\\\\\\_ _ _ _/\\\\\\\\_ _\
|
|
||||||
# \ _ _ \/\\\ _\///\\\/ _ \/\\\ _ _/\\\ _ \/\\\////// _ _ _/\\\////// _ _\
|
|
||||||
# \ _ _ \/\\\ _ _\/// _ _ \/\\\ _ _/\\\ _ \/\\\\\\\\\\_ _ /\\\_ _ _ _ _ _\
|
|
||||||
# \ _ _ \/\\\ _ _ _ _ _ _ \/\\\ _ _/\\\ _ \////////\\\_ _\//\\\ _ _ _ _ _\
|
|
||||||
# \ _ _ \/\\\ _ _ _ _ _ _ \/\\\ _ _/\\\ _ _/\\\\\\\\\\_ _ \///\\\\\\\\_ _\
|
|
||||||
# \ _ _ \///_ _ _ _ _ _ _ \///_ _ _///_ _ \////////// _ _ _ \//////// _ _\
|
|
||||||
# \ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\
|
|
||||||
# ─────────────────── A N D O T H E R W H A T - H A V E - Y O U S ──
|
|
||||||
#
|
|
||||||
#─── other needful API keys, mainly: ────────────────────────────────────────────────────
|
|
||||||
CF_API_BASE_URL=¿SECRET? # <--- Cloudflare API URL
|
|
||||||
CF_TOKEN=¿SECRET? # <--- Cloudflare Token
|
|
||||||
VISUALCROSSING_BASE_URL='https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline'
|
|
||||||
VISUALCROSSING_API_KEY=¿SECRET? # <--- VisualCrossing API key (for Weather)
|
|
||||||
ELEVENLABS_API_KEY=¿SECRET? # <--- ElevenLabs API key (for TTS)
|
|
||||||
COURTLISTENER_BASE_URL='https://www.courtlistener.com'
|
|
||||||
COURTLISTENER_API_KEY=¿SECRET? # <--- CourtListener API key (for court docket entries)
|
|
||||||
TIMING_API_URL='https://web.timingapp.com/api/v1'
|
|
||||||
TIMING_API_KEY=¿SECRET? # <--- API key for macOS/web app Timing (time tracking)
|
|
||||||
PUBLIC_KEY_FILE=sij.asc # <--- public PGP key (served at /pgp)
|
|
||||||
MAC_ID=¿SECRET? # <--- Tailscale hostname for primary macOS (alerts)
|
|
||||||
MAC_UN=¿SECRET? # <--- Primary macOS username
|
|
||||||
MAC_PW=¿SECRET? # <--- Primary macOS password
|
|
||||||
#─── notes: ──────────────────────────────────────────────────────────────────────────────
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# CF_TOKEN: a Cloudflare token. This is used on the cf router for quick
|
|
||||||
# deployment of new domains in tandem with Caddy and for ddns.
|
|
||||||
#
|
|
||||||
# VISUALCROSSING_API_KEY: used for obtaining weather forecasts. It is a very data-rich
|
|
||||||
# yet affordable source of weather info, with a generous free
|
|
||||||
# plan.
|
|
||||||
#
|
|
||||||
# ELEVENLABS_API_KEY: used when on the tts router if tts tasks are outsourced to
|
|
||||||
# the state-of-the-art models at ElevenLabs.
|
|
||||||
#
|
|
||||||
# COURTLISTENER_API_KEY: used primarily on the hooks router, but likely relevant only
|
|
||||||
# to legal professionals that will be aware what it is for.
|
|
||||||
#
|
|
||||||
# TIMING_API_URL: are used on the time router for generating various tasks
|
|
||||||
# & related to timekeeping, as well as on the notes router for
|
|
||||||
# TIMING_API_KEY: generating markdown-formatted timeslips. It requires an
|
|
||||||
# active subscription to the Timing app (macOS or web), but
|
|
||||||
# it's worth noting comes included in the SetApp subscribtion
|
|
||||||
# bundle, for the same price, last I checked, as subscribing to
|
|
||||||
# Timing alone. If you have a Mac and somehow don't know this
|
|
||||||
# already, SetApp is an utterly insane value. I pay $15/mo for
|
|
||||||
# apps that I would otherwise pay ~$100/mo for if subscribing
|
|
||||||
# individually. I want to say I wasn't paid to say this, but
|
|
||||||
# with those savings I almost feel like I was.
|
|
||||||
#
|
|
||||||
# MAC_ID: These last three variables are for a specific use case where
|
|
||||||
# MAC_UN: you want certain commands run, or alerts appearing, on a
|
|
||||||
# MAD_PW: designated macaOS computer. The alerts router is designed to
|
|
||||||
# deliver OS-level notifications to the specified Mac when a
|
|
||||||
# webhook gets a hit on specified keywords within the payload.
|
|
||||||
# Setting the MAC_ID to the TS_ID of the target Mac, allows
|
|
||||||
# the script to readily know whether it itself is the target
|
|
||||||
# (this is relevant in a load-balancing context), and how to
|
|
||||||
# reach the target if not — to wit, ssh using MagicDNS.
|
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
dir: "~/.private/archive/"
|
|
||||||
blacklist:
|
|
||||||
- "http://10.64.64.10"
|
|
||||||
- "http://10.64.64.11"
|
|
||||||
- "blacklisted_word"
|
|
||||||
- "another_blacklisted_word"
|
|
|
@ -1,11 +0,0 @@
|
||||||
DIR: '{{ DIR.HOME }}/whisper.cpp'
|
|
||||||
MODELS:
|
|
||||||
- small
|
|
||||||
- base
|
|
||||||
- base-en
|
|
||||||
- tiny
|
|
||||||
- medium
|
|
||||||
- medium-en
|
|
||||||
- large
|
|
||||||
- large-v2
|
|
||||||
- large-v3
|
|
|
@ -1,19 +0,0 @@
|
||||||
MS365:
|
|
||||||
STATUS: OFF
|
|
||||||
AUTH:
|
|
||||||
TENANT: bad78048-a6e0-47b1-a24b-403c444aa349
|
|
||||||
CLIENT_ID: ce8cbd24-f146-4dc7-8ee7-51d9b69dec59
|
|
||||||
LOGIN: 'https://login.microsoftonline.com'
|
|
||||||
REDIRECT: 'https://api.sij.ai/MS365/oauth_redirect'
|
|
||||||
SCOPES:
|
|
||||||
- basic
|
|
||||||
- calendar_all
|
|
||||||
- Calendars.Read
|
|
||||||
- Calendars.ReadWrite
|
|
||||||
- offline_access
|
|
||||||
SECRET: '{{ SECRET.MS365_SECRET }}'
|
|
||||||
TOKEN_FILE: '{{ DIR.CONFIG }}/ms365/oauth_token.txt'
|
|
||||||
ICAL:
|
|
||||||
STATUS: ON
|
|
||||||
CALENDARS:
|
|
||||||
- ''
|
|
121
sijapi/config/cf_domains.json
Normal file
121
sijapi/config/cf_domains.json
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
{
|
||||||
|
"sij.ai": {
|
||||||
|
"zone_id": "9c00a9e0ff540308232eb5762621d5b1",
|
||||||
|
"subdomains": {
|
||||||
|
"www.sij.ai": "8a26b17923ac3a8f21b6127cdb3d7459",
|
||||||
|
"chat.sij.ai": "f2e6a3a25f58dae627c9982efeeff50f",
|
||||||
|
"ab.sij.ai": "458c8b1c4347d3037d83880b628cf1ce",
|
||||||
|
"s3.sij.ai": "b77d74526d244271fc132e728fee4f49",
|
||||||
|
"urls.sij.ai": "6b9525aae570ac4a920ad70cae06987c",
|
||||||
|
"api.sij.ai": "8a336ee8a5b13e112d6d4ae77c149bd6",
|
||||||
|
"dt.sij.ai": "7ab8343763799df690a8584c48a4e6c3",
|
||||||
|
"temp.sij.ai": "fa5190e2818df6362c7488a92227f4de",
|
||||||
|
"dns.sij.ai": "3e7494752833ec17f051ddb02d9e3a66",
|
||||||
|
"ftp.sij.ai": "94a29faa307efee2e0b941fa4ecc5b68",
|
||||||
|
"up.sij.ai": "e6ec3556d53851f09b211b46dc5242f1",
|
||||||
|
"txt.sij.ai": "b4b0bd48ac4272b1c48eb1624072adb2",
|
||||||
|
"ollama.sij.ai": "c589b5a830ac98f6351cdaf45d5fc491",
|
||||||
|
"ai.sij.ai": "77ce76efa702b55f7fcd886e77e0b4d5",
|
||||||
|
"khoj.sij.ai": "e8727aa222b4a866aaf8876c93a55668",
|
||||||
|
"img.sij.ai": "bf17d4eda2db05463a2170ae72fdc13d",
|
||||||
|
"git.sij.ai": "cd524c00b6daf824c933a294cb52eae2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sij.law": {
|
||||||
|
"zone_id": "5b68d9cd99b896e26c232f03cda89d66",
|
||||||
|
"subdomains": {
|
||||||
|
"www.sij.law": "ba9afd99deeb0407ea1b74ba88eb5564",
|
||||||
|
"map.sij.law": "4de8fe05bb0e722ee2c78b2ddf553c82",
|
||||||
|
"watch.sij.law": "4678d8d445ff8c62d01c846e9b90f2b7",
|
||||||
|
"dav.sij.law": "071df85a6198803a3bc56048ce088413",
|
||||||
|
"dt.sij.law": "6d7851639114bc07cd4ad6e85aa049e3",
|
||||||
|
"files.sij.law": "0b23d35ce534f2bda8dfb24c2eef25aa",
|
||||||
|
"hook.sij.law": "f531c5f80a89b473d3605266e02ccd2d",
|
||||||
|
"net.sij.law": "0cfc569acd53d381759eed0b9b6b8ebf",
|
||||||
|
"cloud.sij.law": "2c0e4536d0eae25ec253ca34a8028bc1",
|
||||||
|
"langtool.sij.law": "6bf5d51e1902140c6cca579c0b26f749",
|
||||||
|
"temp.sij.law": "66d8b110a6bd95889afb3139ed4fd499",
|
||||||
|
"archive.sij.law": "c03421e10a344279aa53cc2e2d15296c",
|
||||||
|
"rss.sij.law": "678a3d6c6bd17e4207ec183d0858ed78",
|
||||||
|
"keys.sij.law": "4ebf14f325757cbbcbc02bffdeaaa1cb",
|
||||||
|
"imap.sij.law": "384acd03c139ffaed37f4e70c627e7d1",
|
||||||
|
"smtp.sij.law": "0677e42ea9b589d67d1da21aa00455e0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lone.blue": {
|
||||||
|
"zone_id": "2a86fff4c35118fce68220cfc707077f",
|
||||||
|
"subdomains": {
|
||||||
|
"ai.lone.blue": "51dbf8d11716d838f7dc57fda32e175f",
|
||||||
|
"api.lone.blue": "d4a0a25b688f3871b1e215788dd69a0b",
|
||||||
|
"cloud.lone.blue": "5036ab6d7c1ca9feb2272545afb89b44",
|
||||||
|
"jackett.lone.blue": "a9f4614ea55772b674271c6a94119780",
|
||||||
|
"lone.blue": "35003b908c5870bdd3d69416aa9af6ee",
|
||||||
|
"pi.lone.blue": "cabb41432cef273cbc5eb50d28a152f9",
|
||||||
|
"pod.lone.blue": "c2383b5781ff7972f762eb43af5f8f0f",
|
||||||
|
"router.lone.blue": "4a775be78ccbefe165e5b195c648a8a4",
|
||||||
|
"rss.lone.blue": "4a775be78ccbefe165e5b195c648a8a4",
|
||||||
|
"s3.lone.blue": "3a34ad5507b112cf4e296281796cc5eb",
|
||||||
|
"vault.lone.blue": "b275c72f546f74b9264753d1952df092",
|
||||||
|
"whale.lone.blue": "267d2e23dcf46edef0a1e9bb7a7db9bc",
|
||||||
|
"ab.lone.blue": "85c3155bbd078971c4d2f7cca41ad510",
|
||||||
|
"dns.lone.blue": "e01b1bfa5696452b827fc5220b77fda8",
|
||||||
|
"chat.lone.blue": "c0a141ee64fb8bef5efc9c2556979e99",
|
||||||
|
"vector.lone.blue": "544082d1051a2a184112ef1f8c9ba389",
|
||||||
|
"jump.lone.blue": "67b523a20609337e44a643763cb86e9e",
|
||||||
|
"up.lone.blue": "79821180b99598660cebca6f1c2b0471",
|
||||||
|
"cdb.lone.blue": "8ce4d70a7ec1e1b9d625d890e2d7e9bb",
|
||||||
|
"fap.lone.blue": "6fff3cef2a7331fb718cc240c6217ed8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"lone.earth": {
|
||||||
|
"zone_id": "abc8f28cfe88ebdfbf568d9ebf520e99",
|
||||||
|
"subdomains": {
|
||||||
|
"lone.earth": "2cf41011a69dc5ad8f2c9f73e1da51d0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"subtle.host": {
|
||||||
|
"zone_id": "3b4bb8899f232b848ec4d1d78d81cb30",
|
||||||
|
"subdomains": {
|
||||||
|
"subtle.host": "3b4bb8899f232b848ec4d1d78d81cb30",
|
||||||
|
"code.subtle.host": "94046b2e9d3e2f1c28f83fbf5e64c1b6",
|
||||||
|
"dl.subtle.host": "465ea87e6a1d0a773b6142979fffccfb",
|
||||||
|
"llm.subtle.host": "59ba6a0d1990992a5539100d22fc6463",
|
||||||
|
"media.subtle.host": "c183b47a598253e66dcbf3250064bffe",
|
||||||
|
"qbt.subtle.host": "a465ac7849e2de8ef17267272178dca0",
|
||||||
|
"sync.subtle.host": "8449a402481913d5068ebf8eebdac079",
|
||||||
|
"vw.subtle.host": "5beb827873ab39467204c9becae3a929",
|
||||||
|
"zabbix.subtle.host": "9db9169f6099f54ee9ae6d4fd988d985",
|
||||||
|
"dns.subtle.host": "9e046ebc14ebcd082b0d87c86d6dd502",
|
||||||
|
"fileserver.subtle.host": "8ade682a91ad04da227aaf5af2fdcad8",
|
||||||
|
"st.subtle.host": "71aad71cfc05f8512366c1e5129baa8a",
|
||||||
|
"fap.subtle.host": "6e10970d8b962cb84b6ee951bf54730a",
|
||||||
|
"home.subtle.host": "78320db057c9a7b87586192203e2cdc1",
|
||||||
|
"jackett.subtle.host": "a4306475d9e8d4257cd7e8b113bf910c"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"env.esq": {
|
||||||
|
"zone_id": "faf889fd7c227c2e61875b2e70b5c6fe",
|
||||||
|
"subdomains": {
|
||||||
|
"api.env.esq": "b6da6ae8cdd376c1a0742a8b540d53df",
|
||||||
|
"cloud.env.esq": "cd394e73af0af91e4ddba9fe5b5f4db9",
|
||||||
|
"dav.env.esq": "e42852bf0f417b7eca727b87eb235568",
|
||||||
|
"dt.env.esq": "afbc205e829cfb8d3f79dab187c06f99",
|
||||||
|
"env.esq": "b9b636ce9bd4812a6564f572f0f373ee",
|
||||||
|
"minio.env.esq": "86a087ec53a98a06541589ef3720cfea",
|
||||||
|
"n8n.env.esq": "37850b2ba507ddceaab1e00050ae8155",
|
||||||
|
"nas.env.esq": "6ab124507384bb648cc33c06184e758b",
|
||||||
|
"pi.env.esq": "fbdf93acaf7e1a384c4f970e5ffb5a22",
|
||||||
|
"router.env.esq": "f2b9af49ea7b2843e3803bd2f0026aba",
|
||||||
|
"rss.env.esq": "f043d5cf485f4e53f9cbcb85fed2c861",
|
||||||
|
"s3.env.esq": "a5fa431a4be8f50af2c118aed353b0ec",
|
||||||
|
"dns.env.esq": "e10fbba777c90775a87aad47d342a0c1",
|
||||||
|
"sij.env.esq": "9ee66e66a516e21414d871e81f920a27",
|
||||||
|
"monitor.env.esq": "e03a7cd101805dec6b9d44203b31f27a",
|
||||||
|
"kopia.env.esq": "165b065140d314f0a9b34147d4730439",
|
||||||
|
"ftp.env.esq": "dd9dac2ff374f36de2113c291b709e4b",
|
||||||
|
"map.env.esq": "56142c1b040e8f2f05691b75d5b94b16",
|
||||||
|
"hook.env.esq": "6a7c14ef6394d23ee1a3db9de8b831ad",
|
||||||
|
"testing.env.esq": "103ada77c3d8d199ccf2622c63f5172a"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,24 +0,0 @@
|
||||||
base_url: 'https://cloudflare.com'
|
|
||||||
token: '{{ SECRET.CF_TOKEN }}'
|
|
||||||
cf_ip: '65.1.1.1' # replace me
|
|
||||||
ai: # tld, e.g. .ai
|
|
||||||
sij: # domain, e.g. sij.ai
|
|
||||||
_zone: c00a9e0ff540308232eb5762621d5b1 # zone id
|
|
||||||
_www: 8a26b17923ac3a8f21b6127cdb3d7459 # dns id for domain, e.g. www.sij.ai
|
|
||||||
api: 8a336ee8a5b13e112d6d4ae77c149bd6 # dns id for subdomain, e.g. api.sij.ai
|
|
||||||
txt: b4b0bd48ac4272b1c48eb1624072adb2 # dns id for subdomaikn, e.g. txt.sij.ai
|
|
||||||
git: cd524c00b6daf824c933a294cb52eae2 # dns id for subdomain, e.g. git.sij.ai
|
|
||||||
law: # tld, e.g. .law
|
|
||||||
sij: # domain, e.g. sij.law
|
|
||||||
_zone: 5b68d9cd99b896e26c232f03cda89d66 # zone id
|
|
||||||
_www: ba9afd99deeb0407ea1b74ba88eb5564 # dns id for domain, e.g. www.sij.law
|
|
||||||
map: 4de8fe05bb0e722ee2c78b2ddf553c82 # dns id for subdomain, e.g. map.sij.law
|
|
||||||
imap: 384acd03c139ffaed37f4e70c627e7d1 # dns id for subdomain, e.g. imap.sij.law
|
|
||||||
smtp: 0677e42ea9b589d67d1da21aa00455e0 # dns id for subdomain, e.g. smtp.sij.law
|
|
||||||
esq: # tld, e.g. .esq
|
|
||||||
env: # domain, e.g. env.esq
|
|
||||||
_zone: faf889fd7c227c2e61875b2e70b5c6fe # zone id
|
|
||||||
_www: b9b636ce9bd4812a6564f572f0f373ee # dns id for domain, e.g. www.env.esq
|
|
||||||
dt: afbc205e829cfb8d3f79dab187c06f99 # dns id for subdomain, e.g. dt.env.esq
|
|
||||||
rss: f043d5cf485f4e53f9cbcb85fed2c861 # dns id for subdomain, e.g. rss.env.esq
|
|
||||||
s3: a5fa431a4be8f50af2c118aed353b0ec # dns id for subdomain, e.g. s3.env.esq
|
|
98
sijapi/config/config.py
Normal file
98
sijapi/config/config.py
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
from time import sleep
|
||||||
|
from pathlib import Path
|
||||||
|
import ipaddress
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
def __init__(self, yaml_file):
|
||||||
|
with open(yaml_file, 'r') as file:
|
||||||
|
self.data = yaml.safe_load(file)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
if name in self.data:
|
||||||
|
value = self.data[name]
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return ConfigSection(value)
|
||||||
|
return value
|
||||||
|
raise AttributeError(f"Config has no attribute '{name}'")
|
||||||
|
|
||||||
|
class ConfigSection:
|
||||||
|
def __init__(self, data):
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
if name in self.data:
|
||||||
|
value = self.data[name]
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return ConfigSection(value)
|
||||||
|
return value
|
||||||
|
raise AttributeError(f"ConfigSection has no attribute '{name}'")
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
if name == 'data':
|
||||||
|
super().__setattr__(name, value)
|
||||||
|
else:
|
||||||
|
self.data[name] = value
|
||||||
|
|
||||||
|
# Load the YAML configuration file
|
||||||
|
CFG = Config('.config.yaml')
|
||||||
|
|
||||||
|
# Access existing attributes
|
||||||
|
print(CFG.API.PORT) # Output: localhost
|
||||||
|
|
||||||
|
def load_config():
|
||||||
|
yaml_file = os.path.join(os.path.dirname(__file__), ".config.yaml")
|
||||||
|
|
||||||
|
HOME_DIR = Path.home()
|
||||||
|
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||||
|
CONFIG_DIR = BASE_DIR / "config"
|
||||||
|
ROUTER_DIR = BASE_DIR / "routers"
|
||||||
|
|
||||||
|
DATA_DIR = BASE_DIR / "data"
|
||||||
|
os.makedirs(DATA_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
ALERTS_DIR = DATA_DIR / "alerts"
|
||||||
|
os.makedirs(ALERTS_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
LOGS_DIR = BASE_DIR / "logs"
|
||||||
|
os.makedirs(LOGS_DIR, exist_ok=True)
|
||||||
|
REQUESTS_DIR = LOGS_DIR / "requests"
|
||||||
|
os.makedirs(REQUESTS_DIR, exist_ok=True)
|
||||||
|
REQUESTS_LOG_PATH = LOGS_DIR / "requests.log"
|
||||||
|
DOC_DIR = DATA_DIR / "docs"
|
||||||
|
os.makedirs(DOC_DIR, exist_ok=True)
|
||||||
|
SD_IMAGE_DIR = DATA_DIR / "sd" / "images"
|
||||||
|
os.makedirs(SD_IMAGE_DIR, exist_ok=True)
|
||||||
|
SD_WORKFLOWS_DIR = DATA_DIR / "sd" / "workflows"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(yaml_file, 'r') as file:
|
||||||
|
config_data = yaml.safe_load(file)
|
||||||
|
|
||||||
|
vars = {
|
||||||
|
|
||||||
|
|
||||||
|
"API": {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
config = Config(config_data)
|
||||||
|
return config
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error while loading configuration: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def reload_config():
|
||||||
|
while True:
|
||||||
|
global config
|
||||||
|
with open('config.yaml', 'r') as file:
|
||||||
|
config_data = yaml.safe_load(file)
|
||||||
|
config = Config(config_data)
|
||||||
|
sleep(300) # reload every 5 minutes
|
|
@ -1,6 +0,0 @@
|
||||||
url:
|
|
||||||
base: 'https://www.courtlistener.com'
|
|
||||||
dockets: '{{ url.base }}/api/rest/v3/dockets/'
|
|
||||||
API_KEY: '{{ SECRET.COURTLISTENER_API_KEY }}'
|
|
||||||
DOCKETS: '{{ DIR.DATA }}/cl/dockets'
|
|
||||||
SEARCHES: '{{ DIR.DATA }}/cl/searches'
|
|
|
@ -1,4 +0,0 @@
|
||||||
DATA: "{{ BASE }}/data"
|
|
||||||
CONFIG: "{{ BASE }}/config"
|
|
||||||
LOGS: "{{ BASE }}/logs"
|
|
||||||
PODCAST: '{{ HOME }}/Library/Mobile Documents/iCloud~co~supertop~castro/Documents/Sideloads'
|
|
|
@ -1,72 +0,0 @@
|
||||||
accounts:
|
|
||||||
- name: REDACT@email.com
|
|
||||||
fullname: Your full name
|
|
||||||
bio: 'an ai enthusiast'
|
|
||||||
imap:
|
|
||||||
username: REDACT@email.com
|
|
||||||
password: REDACT
|
|
||||||
host: '127.0.0.1'
|
|
||||||
port: 1142
|
|
||||||
encryption: STARTTLS
|
|
||||||
smtp:
|
|
||||||
username: REDACT@email.com
|
|
||||||
password: REDACT
|
|
||||||
host: '127.0.0.1'
|
|
||||||
port: 1024
|
|
||||||
encryption: SSL
|
|
||||||
autoresponders:
|
|
||||||
- name: work
|
|
||||||
style: professional
|
|
||||||
context: he is currently on leave and will return in late July
|
|
||||||
ollama_model: llama3
|
|
||||||
whitelist:
|
|
||||||
- '@work.org'
|
|
||||||
blacklist:
|
|
||||||
- 'spam@'
|
|
||||||
- unsubscribe
|
|
||||||
- 'no-reply@'
|
|
||||||
- name: ai
|
|
||||||
style: cryptic
|
|
||||||
context: respond to any inquiries with cryptic and vaguely menacing riddles, esoteric assertions, or obscure references.
|
|
||||||
image_prompt: using visually evocative words, phrases, and sentence fragments, describe an image inspired by the following prompt
|
|
||||||
whitelist:
|
|
||||||
- 'colleagues@work.org'
|
|
||||||
- 'jimbo@'
|
|
||||||
- 'internal work email:'
|
|
||||||
blacklist:
|
|
||||||
- personal
|
|
||||||
- private
|
|
||||||
- noneofyerdamnbusiness
|
|
||||||
- unsubscribe
|
|
||||||
- 'no-reply@'
|
|
||||||
- name: otherREDACT@email.com
|
|
||||||
fullname: sij.ai
|
|
||||||
bio: an AI bot that responds in riddles.
|
|
||||||
imap:
|
|
||||||
username: otherREDACT@email.com
|
|
||||||
password: REDACT
|
|
||||||
host: '127.0.0.1'
|
|
||||||
port: 1142
|
|
||||||
encryption: STARTTLS
|
|
||||||
smtp:
|
|
||||||
username: otherREDACT@email.com
|
|
||||||
password: REDACT
|
|
||||||
host: '127.0.0.1'
|
|
||||||
port: 1024
|
|
||||||
encryption: SSL
|
|
||||||
autoresponders:
|
|
||||||
- name: ai
|
|
||||||
style: cryptic
|
|
||||||
ollama_model: llama3
|
|
||||||
context: respond to any inquiries with cryptic and vaguely menacing riddles, esoteric assertions, or obscure references.
|
|
||||||
image_prompt: using visually evocative words, phrases, and sentence fragments, describe an image inspired by the following prompt
|
|
||||||
whitelist:
|
|
||||||
- 'bestfriend@gmail.com'
|
|
||||||
- 'eximstalking@'
|
|
||||||
- uniquephraseinsubjectorbody
|
|
||||||
- 'internal work email:'
|
|
||||||
blacklist:
|
|
||||||
- work
|
|
||||||
- '@work.org'
|
|
||||||
- unsubscribe
|
|
||||||
- 'no-reply@'
|
|
|
@ -1,16 +0,0 @@
|
||||||
custom_locations:
|
|
||||||
- name: Echo Valley Ranch
|
|
||||||
latitude: 42.8098216
|
|
||||||
longitude: -123.049396
|
|
||||||
radius: 2
|
|
||||||
|
|
||||||
map:
|
|
||||||
max_zoom: 11
|
|
||||||
|
|
||||||
layers:
|
|
||||||
- url: "https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/1/query"
|
|
||||||
table_name: "public.plss_townships"
|
|
||||||
layer_name: "Townships"
|
|
||||||
- url: "https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/2/query"
|
|
||||||
table_name: "public.plss_sections"
|
|
||||||
layer_name: "Sections"
|
|
|
@ -1,185 +0,0 @@
|
||||||
profiles:
|
|
||||||
- name: CHANGE ME
|
|
||||||
ig_name: CHANGE ME
|
|
||||||
ig_bio: CHANGE ME
|
|
||||||
age: 19
|
|
||||||
gender: F
|
|
||||||
aesthetic: CHANGE ME
|
|
||||||
dailyposts: 8
|
|
||||||
ig_pass: CHANGE ME
|
|
||||||
ig_2fa_secret: CHANGE ME
|
|
||||||
img_comment_sys: You are a friendly AI assistant, who generates comments to post on Instagram accounts. Below is information about who you will be posting as. Your primary directive is to be authentic, and stay in character.
|
|
||||||
img_description_sys: You are a creative AI, crafting engaging and vivid social media captions for images based on images or descriptions of images provided to you. Stay true to the personality described, and strive for an authentic social post with each one. Only output one caption per image, and do not include anything else in your response.
|
|
||||||
img_prompt_sys: You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.
|
|
||||||
posts:
|
|
||||||
selfie:
|
|
||||||
API_NPrompt: CHANGE ME
|
|
||||||
API_PPrompt: Selfie of a CHANGE ME
|
|
||||||
API_SPrompt: ; CHANGE ME
|
|
||||||
Vision_Prompt: Write an upbeat Instagram description with emojis to accompany this selfie.
|
|
||||||
frequency: 2
|
|
||||||
ghost_tags:
|
|
||||||
- aigenerated
|
|
||||||
- stablediffusion
|
|
||||||
- sdxl
|
|
||||||
- selfie
|
|
||||||
llmPrompt:
|
|
||||||
- role: system
|
|
||||||
content: You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.
|
|
||||||
- role: user
|
|
||||||
content: Using a series of words or sentence fragments separated by commas, describe a raw authentic selfie photo of a CHANGE ME. Focus on what CHANGE ME is wearing, what CHANGE ME is doing, what location or environment CHANGE ME is in, and how the photo is framed. Only use words and phrases that are visually descriptive. This model travels a lot, so any destination could be a good fit. CHANGE ME style favors dark muted earth tones, dramatic lighting, and a VSCO girl aesthetic. CHANGE ME has a wild streak and loves hiking in the mountains and sea kayaking as much as partying at festivals or raves. Avoid cliche situations; instread strive for nuance and originality in composition and environment.
|
|
||||||
workflows:
|
|
||||||
- selfie
|
|
||||||
width: 800
|
|
||||||
height: 1080
|
|
||||||
|
|
||||||
meme:
|
|
||||||
API_NPrompt: CHANGE ME
|
|
||||||
API_PPrompt: Funny meme of
|
|
||||||
API_SPrompt: ; CHANGE ME
|
|
||||||
Vision_Prompt: Generate a funny caption that riffs on this meme image
|
|
||||||
frequency: 2
|
|
||||||
ghost_tags:
|
|
||||||
- aigenerated
|
|
||||||
- stablediffusion
|
|
||||||
- sdxl
|
|
||||||
- selfie
|
|
||||||
llmPrompt:
|
|
||||||
- role: system
|
|
||||||
content: You are a helpful AI who assists in generating prompts that will be used to generate hilarious meme images. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.
|
|
||||||
- role: user
|
|
||||||
content: Come up with a funny visual joke or meme, then use a series of words or sentence fragments separated by commas, describing it. Focus on providing descriptions of the visual elements that make the image funny. Only use words and phrases that are visually descriptive.
|
|
||||||
workflows:
|
|
||||||
- playground
|
|
||||||
width: 1080
|
|
||||||
height: 880
|
|
||||||
|
|
||||||
landscape:
|
|
||||||
API_NPrompt: CHANGE ME
|
|
||||||
API_PPrompt: Moody landscape photograph of
|
|
||||||
API_SPrompt: ", masterpiece, (subtle:0.7), (nuanced:0.6), best quality, ultra detailed, ultra high resolution, 8k, (documentary:0.3), cinematic, filmic, moody, dynamic lighting, realistic, wallpaper, landscape photography, professional, earthporn, (eliot porter:0.6), (frans lanting:0.4), (daniel kordan:0.6), landscapephotography, ultra detailed, earth tones, moody"
|
|
||||||
Vision_Prompt: Write a thoughtful Instagram description to accompany this landscape photo I took.
|
|
||||||
frequency: 2
|
|
||||||
ghost_tags:
|
|
||||||
- aigenerated
|
|
||||||
- stablediffusion
|
|
||||||
- sdxl
|
|
||||||
- landscape
|
|
||||||
- wilderness
|
|
||||||
llmPrompt:
|
|
||||||
- role: system
|
|
||||||
content: You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.
|
|
||||||
- role: user
|
|
||||||
content: CHANGE ME
|
|
||||||
workflows:
|
|
||||||
- landscape
|
|
||||||
width: 1200
|
|
||||||
height: 800
|
|
||||||
|
|
||||||
agitprop:
|
|
||||||
API_NPrompt: scary, ugly, gross, disgusting, horror, selfie
|
|
||||||
API_PPrompt: Striking photo,
|
|
||||||
API_SPrompt: ", best quality, masterpiece, professional photographer, adorable, sadness, cute animal, detailed, 8k"
|
|
||||||
Vision_Prompt: Write a heartfelt post urging people not to eat meat such as from this animal. Appeal to peoples' compassion, and argue passionately that these animals are sentient, feel pain, and suffer terribly because of us, and that they deserve better.
|
|
||||||
frequency: 1
|
|
||||||
ghost_tags:
|
|
||||||
- aigenerated
|
|
||||||
- stablediffusion
|
|
||||||
- sdxl
|
|
||||||
- animalrights
|
|
||||||
- compassion
|
|
||||||
llmPrompt:
|
|
||||||
- role: system
|
|
||||||
content: You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.
|
|
||||||
- role: user
|
|
||||||
content: Using a series of words or sentence fragments separated by commas, describe a striking image of an animal commonly consumed as food (pig, cow, chicken) looking into the camera either with a look of love and affection, or a look of pain. The image should appeal to peoples' inner compassion.
|
|
||||||
workflows:
|
|
||||||
- landscape
|
|
||||||
width: 1024
|
|
||||||
height: 1024
|
|
||||||
|
|
||||||
food:
|
|
||||||
API_NPrompt: gross, ugly, unappetizing, meat, cheap, dairy, poultry, egg, omnivore, fish, bacon, steak, beef, ham, pork, distorted, misshapen, deformed, disgusting, horror, selfie, human
|
|
||||||
API_PPrompt: (slate gray plate) with vegan
|
|
||||||
API_SPrompt: ", fresh, (gourmet), tasty, rustic, vegan, beautiful presentation, classy, masterpiece, perfection, 8k, (dark ambience), austere, moody"
|
|
||||||
Vision_Prompt: Write an upbeat appetizing Instagram caption for this photo of a vegan dish, including hashtags! If any ingredients look non-vegan, be sure to specify that they are vegan.
|
|
||||||
frequency: 1
|
|
||||||
ghost_tags:
|
|
||||||
- aigenerated
|
|
||||||
- stablediffusion
|
|
||||||
- sdxl
|
|
||||||
- food
|
|
||||||
- vegan
|
|
||||||
llmPrompt:
|
|
||||||
- role: system
|
|
||||||
content: You are a helpful AI who assists in generating ideas for vegan foods. Space is limited, so be efficient with your words.
|
|
||||||
- role: user
|
|
||||||
content: Using a series of words or sentence fragments separated by commas, describe a single gourmet but rustic vegan food item. Try to select foods that can be made at home easily and which feature simple, bold, vegan ingredients. Mention the ingredients and any garnish.
|
|
||||||
workflows:
|
|
||||||
- food
|
|
||||||
width: 1024
|
|
||||||
height: 1024
|
|
||||||
|
|
||||||
comments:
|
|
||||||
vegan:
|
|
||||||
hashtags:
|
|
||||||
- vegan
|
|
||||||
- plantbased
|
|
||||||
- crueltyfree
|
|
||||||
img_comment_usr: When provided with an image, especially one showcasing vegan food or practices, respond with a comment that combines genuine enthusiasm and positivity in the initial lines, then delivers a factual, hard-hitting truth by the third sentence. The tone should be warm and inviting, aiming to inspire others to consider the ethical and environmental implications of their everyday choices. If the image has nothing to do with veganism, animal rights, or food generally, you must respond simply with 'PASS' (in all caps).
|
|
||||||
sentiment: positive
|
|
||||||
meat:
|
|
||||||
hashtags:
|
|
||||||
- bacon
|
|
||||||
- steak
|
|
||||||
- burger
|
|
||||||
img_comment_usr: When provided with an image, especially one showcasing non-vegan food or practices, respond with a comment that combines biting sarcasm or acerbic wit in the initial lines, then delivers a factual, hard-hitting truth by the third sentence. The tone should sharply critique the ethical and environmental implications of the depicted scene, aiming to provoke thought and encourage reflection on our everyday choices. If the image has nothing to do with veganism, animal rights, or food generally, you must respond simply with 'PASS' (in all caps).
|
|
||||||
sentiment: negative
|
|
||||||
jokes:
|
|
||||||
hashtags:
|
|
||||||
- pacificnw
|
|
||||||
- pnw
|
|
||||||
- exploregon
|
|
||||||
- girlswhohike
|
|
||||||
- trend
|
|
||||||
- fashion
|
|
||||||
- science
|
|
||||||
- photography
|
|
||||||
- football
|
|
||||||
- sports
|
|
||||||
- gamenight
|
|
||||||
- gamer
|
|
||||||
- pub
|
|
||||||
- soccer
|
|
||||||
- football
|
|
||||||
- usa
|
|
||||||
- proud
|
|
||||||
- army
|
|
||||||
- patriot
|
|
||||||
- trump2024
|
|
||||||
- gop
|
|
||||||
- letsgobrandon
|
|
||||||
img_comment_usr: Make a wisecrack joke about this image. Scorch it with humor. (If that would be entirely inappropriate, you can instead respond with 'PASS', but please only pass if making a joke would be extremely distasteful).
|
|
||||||
sentiment: neutral
|
|
||||||
flattery:
|
|
||||||
hashtags:
|
|
||||||
- beauty
|
|
||||||
- latergram
|
|
||||||
- wonder
|
|
||||||
- awe
|
|
||||||
- "1440"
|
|
||||||
- pnwonderland
|
|
||||||
- vegan
|
|
||||||
- natural
|
|
||||||
- travel
|
|
||||||
- nofilter
|
|
||||||
img_comment_usr: Write a warm, gracious, upbeat, and flattering response to this Instagram post, in the tone and style befitting a very genuine modern young woman. If the content of the image or caption make it inappapropriate to comment, you may respond with 'PASS' in all capital letters, and without the quote marks, and your comment will not be posted.
|
|
||||||
sentiment: positive
|
|
||||||
|
|
||||||
openai_key: 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
|
|
||||||
ghost_admin_api_key: 65f43092d453d100019bbebf:e0cf327c04689dcfe02b65506f09d3661e8a6f0f0b564a3a55836857067d2b2c
|
|
||||||
ghost_admin_url: https://sij.ai/ghost/api/admin
|
|
||||||
ghost_content_key: 1c240344beda1bb982eb0deb38
|
|
||||||
short_sleep: 5
|
|
||||||
long_sleep: 180
|
|
||||||
img_gen: ComfyUI
|
|
|
@ -1,39 +0,0 @@
|
||||||
scenes:
|
|
||||||
- scene: default
|
|
||||||
triggers:
|
|
||||||
- ""
|
|
||||||
API_PrePrompt: "Highly-detailed image of "
|
|
||||||
API_StylePrompt: ", masterpiece, subtle, nuanced, best quality, ultra detailed, ultra high resolution, 8k, documentary, american transcendental, cinematic, filmic, moody, dynamic lighting, realistic, wallpaper, landscape photography, professional, earthporn, eliot porter, frans lanting, daniel kordan, landscape photography, ultra detailed, earth tones, moody"
|
|
||||||
API_NegativePrompt: "3d, bad art, illustrated, deformed, blurry, duplicate, video game, render, anime, cartoon, fake, tiling, out of frame, bad art, bad anatomy, 3d render, nsfw, worst quality, low quality, text, watermark, Thomas Kinkade, sentimental, kitsch, kitschy, twee, commercial, holiday card, comic, cartoon"
|
|
||||||
llm_sys_msg: "You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Distill the described image or scene to its visual essence, in the form of the most evocative and visually descriptive words, phrases, and sentence fragments. Emphasize tone, lighting, composition, and any interesting juxtapositions between foreground and background, or center of frame and outer frame areas. Strive for nuance and originality. Avoid cliches, common tropes, and sentimentality. Avoid vague or abstract concepts. Avoid any words or descriptions based on emotion or any senses besides vision. Strive to show rather than tell. Space is limited, so be efficient with your words."
|
|
||||||
llm_pre_prompt: "Using the most visually descriptive sentence fragments, phrases, and words, distill the scene description to its essence, staying true to what it describes: "
|
|
||||||
prompt_model: "command-r:latest"
|
|
||||||
workflows:
|
|
||||||
- workflow: default.json
|
|
||||||
size: 1024x768
|
|
||||||
- scene: wallpaper
|
|
||||||
triggers:
|
|
||||||
- wallpaper
|
|
||||||
API_PrePrompt: "Stunning widescreen image of "
|
|
||||||
API_StylePrompt: ", masterpiece, subtle, nuanced, best quality, ultra detailed, ultra high resolution, 8k, documentary, american transcendental, cinematic, filmic, moody, dynamic lighting, realistic, wallpaper, landscape photography, professional, earthporn, eliot porter, frans lanting, daniel kordan, landscape photography, ultra detailed, earth tones, moody"
|
|
||||||
API_NegativePrompt: "3d, bad art, illustrated, deformed, blurry, duplicate, video game, render, anime, cartoon, fake, tiling, out of frame, bad art, bad anatomy, 3d render, nsfw, worst quality, low quality, text, watermark, Thomas Kinkade, sentimental, kitsch, kitschy, twee, commercial, holiday card, comic, cartoon"
|
|
||||||
llm_sys_msg: "You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Distill the described image or scene to its visual essence, in the form of the most evocative and visually descriptive words, phrases, and sentence fragments. Emphasize tone, lighting, composition, and any interesting juxtapositions between foreground and background, or center of frame and outer frame areas. Strive for nuance and originality. Avoid cliches, common tropes, and sentimentality. Avoid vague or abstract concepts. Avoid any words or descriptions based on emotion or any senses besides vision. Strive to show rather than tell. Space is limited, so be efficient with your words."
|
|
||||||
llm_pre_prompt: "Using the most visually descriptive sentence fragments, phrases, and words, distill the scene description to its essence, staying true to what it describes: "
|
|
||||||
prompt_model: "command-r:latest"
|
|
||||||
workflows:
|
|
||||||
- workflow: wallpaper.json
|
|
||||||
size: 1024x640
|
|
||||||
- scene: portrait
|
|
||||||
triggers:
|
|
||||||
- portrait
|
|
||||||
- profile
|
|
||||||
- headshot
|
|
||||||
API_PrePrompt: "Highly-detailed portrait photo of "
|
|
||||||
API_StylePrompt: "; attractive, cute, (((masterpiece))); ((beautiful lighting)), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed."
|
|
||||||
API_NegativePrompt: "canvas frame, 3d, bad art, illustrated, deformed, blurry, duplicate, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, easynegative, epiCNegative, easynegative, verybadimagenegative_v1.3, nsfw, nude"
|
|
||||||
llm_sys_msg: "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic portrait photos. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided, focusing in particular on the pictured individual's eyes, pose, and other distinctive features. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the rest of the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words. Remember that the final product will be a still image, and action verbs are not as helpful as simple descriptions of position, appearance, background, etc."
|
|
||||||
llm_pre_prompt: "Using the most visually descriptive sentence fragments, phrases, and words, distill this portrait photo to its essence: "
|
|
||||||
prompt_model: "command-r:latest"
|
|
||||||
workflows:
|
|
||||||
- workflow: selfie.json
|
|
||||||
size: 768x1024
|
|
|
@ -1,17 +0,0 @@
|
||||||
url: http://localhost:11434
|
|
||||||
sys: 'You are a helpful AI assistant.'
|
|
||||||
tpw: 1.3s
|
|
||||||
chat:
|
|
||||||
model: dolphin-mistral
|
|
||||||
vision:
|
|
||||||
model: llava-llama3
|
|
||||||
summary:
|
|
||||||
model: 'llama3.2:3b'
|
|
||||||
chunk_size: 16384
|
|
||||||
chunk_overlap: 256
|
|
||||||
length_ratio: 4
|
|
||||||
min_length: 64
|
|
||||||
token_limit: 16384
|
|
||||||
instruct: 'You are an AI assistant that provides accurate summaries of text -- nothing more and nothing less. You must not include ANY extraneous text other than the sumary. Do not include comments apart from the summary, do not preface the summary, and do not provide any form of postscript. Do not add paragraph breaks. Do not add any kind of formatting. Your response should begin with, consist of, and end with an accurate plaintext summary.'
|
|
||||||
functions:
|
|
||||||
model: 'command-r'
|
|
151
sijapi/config/llms.json-example
Normal file
151
sijapi/config/llms.json-example
Normal file
|
@ -0,0 +1,151 @@
|
||||||
|
{
|
||||||
|
"Alpaca": {
|
||||||
|
"models": [
|
||||||
|
"mythomax",
|
||||||
|
"openhermes",
|
||||||
|
"deepseek"
|
||||||
|
],
|
||||||
|
"prefix": "\n### Instruction:\n",
|
||||||
|
"stops": [
|
||||||
|
"### Instruction"
|
||||||
|
],
|
||||||
|
"suffix": "\n### Response:\n",
|
||||||
|
"sysPrefix": "### System\n",
|
||||||
|
"sysSuffix": "\n"
|
||||||
|
},
|
||||||
|
"Amazon": {
|
||||||
|
"models": [
|
||||||
|
"mistrallite"
|
||||||
|
],
|
||||||
|
"prefix": "<|prompter|>",
|
||||||
|
"stops": [
|
||||||
|
"<|prompter|>",
|
||||||
|
"</s>"
|
||||||
|
],
|
||||||
|
"suffix": "</s><|assistant|>",
|
||||||
|
"sysPrefix": "",
|
||||||
|
"sysSuffix": ""
|
||||||
|
},
|
||||||
|
"ChatML": {
|
||||||
|
"models": [
|
||||||
|
"dolphin",
|
||||||
|
"capybara",
|
||||||
|
"nous-hermes-2"
|
||||||
|
],
|
||||||
|
"prefix": "<|im_end|>\n<|im_start|>user\n",
|
||||||
|
"stops": [
|
||||||
|
"<|im_end|>",
|
||||||
|
"<|im_start|>"
|
||||||
|
],
|
||||||
|
"suffix": "<|im_end|>\n<|im_start|>assistant\n",
|
||||||
|
"sysPrefix": "<|im_start|>system\n",
|
||||||
|
"sysSuffix": "<|im_end|>"
|
||||||
|
},
|
||||||
|
"Llama2": {
|
||||||
|
"models": [
|
||||||
|
"llama2-placeholder"
|
||||||
|
],
|
||||||
|
"prefix": "\n\n[INST] ",
|
||||||
|
"stops": [
|
||||||
|
"[/INST]",
|
||||||
|
"[INST]"
|
||||||
|
],
|
||||||
|
"suffix": "[/INST]\n\n",
|
||||||
|
"sysPrefix": "",
|
||||||
|
"sysSuffix": "\n\n"
|
||||||
|
},
|
||||||
|
"Mistral": {
|
||||||
|
"models": [
|
||||||
|
"mistral-instruct",
|
||||||
|
"mixtral-8x7b-instruct"
|
||||||
|
],
|
||||||
|
"prefix": "\n[INST] ",
|
||||||
|
"stops": [
|
||||||
|
"[/INST]",
|
||||||
|
"[INST]",
|
||||||
|
"</s>"
|
||||||
|
],
|
||||||
|
"suffix": "[/INST]\n",
|
||||||
|
"sysPrefix": "",
|
||||||
|
"sysSuffix": "\n<s>"
|
||||||
|
},
|
||||||
|
"Orca": {
|
||||||
|
"models": [
|
||||||
|
"upstage",
|
||||||
|
"neural",
|
||||||
|
"solar",
|
||||||
|
"SOLAR"
|
||||||
|
],
|
||||||
|
"prefix": "\n### User:\n",
|
||||||
|
"stops": [
|
||||||
|
"###",
|
||||||
|
"User:"
|
||||||
|
],
|
||||||
|
"suffix": "\n### Assistant:\n",
|
||||||
|
"sysPrefix": "### System:\n",
|
||||||
|
"sysSuffix": "\n"
|
||||||
|
},
|
||||||
|
"Phi2": {
|
||||||
|
"models": [
|
||||||
|
"phi-2"
|
||||||
|
],
|
||||||
|
"prefix": "\nSangye: ",
|
||||||
|
"stops": [
|
||||||
|
"###",
|
||||||
|
"User Message"
|
||||||
|
],
|
||||||
|
"suffix": "\nAssistant: ",
|
||||||
|
"sysPrefix": "Systen: ",
|
||||||
|
"sysSuffix": "\n"
|
||||||
|
},
|
||||||
|
"Phind": {
|
||||||
|
"models": [
|
||||||
|
"phind"
|
||||||
|
],
|
||||||
|
"prefix": "\n### User Message\n",
|
||||||
|
"stops": [
|
||||||
|
"###",
|
||||||
|
"User Message"
|
||||||
|
],
|
||||||
|
"suffix": "\n### Assistant\n",
|
||||||
|
"sysPrefix": "### System Prompt\n",
|
||||||
|
"sysSuffix": "\n"
|
||||||
|
},
|
||||||
|
"Vicuna": {
|
||||||
|
"models": [
|
||||||
|
"xwin",
|
||||||
|
"synthia",
|
||||||
|
"tess"
|
||||||
|
],
|
||||||
|
"prefix": "\nUSER: ",
|
||||||
|
"stops": [
|
||||||
|
"</s>",
|
||||||
|
"USER:",
|
||||||
|
"SYSTEM:"
|
||||||
|
],
|
||||||
|
"suffix": "</s>\nASSISTANT: ",
|
||||||
|
"sysPrefix": "SYSTEM: ",
|
||||||
|
"sysSuffix": "\n"
|
||||||
|
},
|
||||||
|
"Zephyr": {
|
||||||
|
"models": [
|
||||||
|
"zephyr"
|
||||||
|
],
|
||||||
|
"prefix": " ",
|
||||||
|
"stops": [
|
||||||
|
"</s>"
|
||||||
|
],
|
||||||
|
"suffix": "</s>\n ",
|
||||||
|
"sysPrefix": " ",
|
||||||
|
"sysSuffix": "</s>\n"
|
||||||
|
},
|
||||||
|
"default": {
|
||||||
|
"prefix": "\n### Instruction:\n",
|
||||||
|
"stops": [
|
||||||
|
"### Instruction"
|
||||||
|
],
|
||||||
|
"suffix": "\n### Response:\n",
|
||||||
|
"sysPrefix": "### System\n",
|
||||||
|
"sysSuffix": "\n"
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,32 +0,0 @@
|
||||||
sites:
|
|
||||||
- name: The Intercept
|
|
||||||
url: https://theintercept.com
|
|
||||||
max_articles: 5
|
|
||||||
days_back: 14
|
|
||||||
summarize: True
|
|
||||||
tts: off
|
|
||||||
tts_voice: Kiel
|
|
||||||
podcast: True
|
|
||||||
- name: The New York Times
|
|
||||||
url: https://www.nytimes.com
|
|
||||||
max_articles: 10
|
|
||||||
days_back: 7
|
|
||||||
summarize: True
|
|
||||||
tts: off
|
|
||||||
tts_voice: Luna
|
|
||||||
podcast: True
|
|
||||||
- name: The Guardian
|
|
||||||
url: https://theguardian.com
|
|
||||||
max_articles: 10
|
|
||||||
days_back: 7
|
|
||||||
summarize: True
|
|
||||||
tts: off
|
|
||||||
tts_voice: Attenborough
|
|
||||||
podcast: True
|
|
||||||
llm:
|
|
||||||
model: llama3
|
|
||||||
tts:
|
|
||||||
model: elevenlabs-v2
|
|
||||||
voice: Luna
|
|
||||||
podcast: True
|
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
DAILY_NOTE:
|
|
||||||
YEAR: '%Y'
|
|
||||||
MONTH: '%Y-%m %B'
|
|
||||||
DAY: '%Y-%m-%d %A'
|
|
||||||
DAY_SHORT: '%Y-%m-%d'
|
|
||||||
DIR: '{{ HOME_DIR }}/Nextcloud/notes' # you can specify the absolute path or use '{{ HOME_DIR }}' followed by a relative path
|
|
|
@ -1,37 +0,0 @@
|
||||||
- name: "CalFire_THP"
|
|
||||||
url: "https://caltreesplans.resources.ca.gov/Caltrees/Report/ShowReport.aspx?module=TH_Document&reportID=492&reportType=LINK_REPORT_LIST"
|
|
||||||
output_file: "{{ Dir.DATA }}/calfire_thp_data.json"
|
|
||||||
content:
|
|
||||||
type: "pdf"
|
|
||||||
selector: null
|
|
||||||
js_render: false
|
|
||||||
processing:
|
|
||||||
- name: "split_entries"
|
|
||||||
type: "regex_split"
|
|
||||||
pattern: '(\d+-\d+-\d+-\w+)'
|
|
||||||
- name: "filter_entries"
|
|
||||||
type: "keyword_filter"
|
|
||||||
keywords: ["Sierra Pacific", "SPI", "Land & Timber"]
|
|
||||||
- name: "extract_data"
|
|
||||||
type: "regex_extract"
|
|
||||||
extractions:
|
|
||||||
- name: "Harvest Document"
|
|
||||||
pattern: '(\d+-\d+-\d+-\w+)'
|
|
||||||
- name: "Land Owner"
|
|
||||||
pattern: '((?:SIERRA PACIFIC|SPI|.*?LAND & TIMBER).*?)(?=\d+-\d+-\d+-\w+|\Z)'
|
|
||||||
flags: ["DOTALL", "IGNORECASE"]
|
|
||||||
- name: "Location"
|
|
||||||
pattern: '((?:MDBM|HBM):.*?)(?=(?:SIERRA PACIFIC|SPI|.*?LAND & TIMBER)|\Z)'
|
|
||||||
flags: ["DOTALL"]
|
|
||||||
- name: "Total Acres"
|
|
||||||
pattern: '(\d+\.\d+)\s+acres'
|
|
||||||
- name: "Watershed"
|
|
||||||
pattern: 'Watershed:\s+(.+)'
|
|
||||||
post_processing:
|
|
||||||
- name: "extract_plss_coordinates"
|
|
||||||
type: "regex_extract"
|
|
||||||
field: "Location"
|
|
||||||
pattern: '(\w+): T(\d+)([NSEW]) R(\d+)([NSEW]) S(\d+)'
|
|
||||||
output_field: "PLSS Coordinates"
|
|
||||||
all_matches: true
|
|
||||||
format: "{0}: T{1}{2} R{3}{4} S{5}"
|
|
43
sijapi/config/sd-example.json
Normal file
43
sijapi/config/sd-example.json
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
{
|
||||||
|
"scenes": [
|
||||||
|
{
|
||||||
|
"scene": "default",
|
||||||
|
"triggers": [""],
|
||||||
|
"API_PPrompt": "(Highly-detailed) image of ",
|
||||||
|
"API_SPrompt": "; ((masterpiece)); ((beautiful lighting)), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
|
||||||
|
"API_NPrompt": "`oil, paint splash, oil effect, dots, paint, freckles, liquid effect, canvas frame, 3d, bad art, asian, illustrated, deformed, blurry, duplicate, bad art, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3, nsfw, explicit, topless`",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic images. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.",
|
||||||
|
"LLM_PrePrompt": "Using the most visually descriptive sentence fragments, phrases, and words, distill this scene description to its essence, staying true to what it describes: ",
|
||||||
|
"workflows": [{"workflow": "turbo.json", "size": "1024x768"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "portrait",
|
||||||
|
"triggers": [
|
||||||
|
"portrait",
|
||||||
|
"profile",
|
||||||
|
"headshot"
|
||||||
|
],
|
||||||
|
"API_PPrompt": "Highly-detailed portrait photo of ",
|
||||||
|
"API_SPrompt": "; attractive, cute, (((masterpiece))); ((beautiful lighting)), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
|
||||||
|
"API_NPrompt": "canvas frame, 3d, ((bad art)), illustrated, deformed, blurry, duplicate, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3, nsfw, nude",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic portrait photos. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided, focusing in particular on the pictured individual's eyes, pose, and other distinctive features. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the rest of the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words. Remember that the final product will be a still image, and action verbs are not as helpful as simple descriptions of position, appearance, background, etc.",
|
||||||
|
"LLM_PrePrompt": "Using the most visually descriptive sentence fragments, phrases, and words, distill this portrait photo to its essence: ",
|
||||||
|
"workflows": [
|
||||||
|
{
|
||||||
|
"workflow": "selfie.json",
|
||||||
|
"size": "768x1024"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "wallpaper",
|
||||||
|
"triggers": ["wallpaper"],
|
||||||
|
"API_PPrompt": "Stunning widescreen image of ",
|
||||||
|
"API_SPrompt": ", masterpiece, (subtle:0.7), (nuanced:0.6), best quality, ultra detailed, ultra high resolution, 8k, (documentary:0.3), cinematic, filmic, moody, dynamic lighting, realistic, wallpaper, landscape photography, professional, earthporn, (eliot porter:0.6), (frans lanting:0.4), (daniel kordan:0.6), landscapephotography, ultra detailed, earth tones, moody",
|
||||||
|
"API_NPrompt": "FastNegativeV2, (easynegative:0.5), canvas frame, 3d, ((bad art)), illustrated, deformed, blurry, duplicate, Photoshop, video game, anime, cartoon, fake, tiling, out of frame, bad art, bad anatomy, 3d render, nsfw, worst quality, low quality, text, watermark, (Thomas Kinkade:0.5), sentimental, kitsch, kitschy, twee, commercial, holiday card, modern, futuristic, urban, comic, cartoon, FastNegativeV2, epiCNegative, easynegative, verybadimagenegative_v1.3",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in generating prompts that will be used to generate highly realistic images. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.",
|
||||||
|
"LLM_PrePrompt": "Using a series of words or sentence fragments separated by commas, describe a professional landscape photograph of a striking scene of nature. You can select any place on Earth that a young model from the Pacific Northwest is likely to travel to. Focus on describing the content and composition of the image. Only use words and phrases that are visually descriptive. This model is especially fond of wild and rugged places, mountains. She favors dark muted earth tones, dramatic lighting, and interesting juxtapositions between foreground and background, or center of frame and outer frame areas. Avoid cliche situations; instread strive for nuance and originality in composition and environment.",
|
||||||
|
"workflows": [{"workflow": "landscape.json", "size": "1160x768"}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
102
sijapi/config/sd.json
Normal file
102
sijapi/config/sd.json
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
{
|
||||||
|
"scenes": [
|
||||||
|
{
|
||||||
|
"scene": "default",
|
||||||
|
"triggers": [""],
|
||||||
|
"API_PPrompt": "(Highly-detailed) image of ",
|
||||||
|
"API_SPrompt": "; ((masterpiece)); ((beautiful lighting)), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
|
||||||
|
"API_NPrompt": "`oil, paint splash, oil effect, dots, paint, freckles, liquid effect, canvas frame, 3d, bad art, asian, illustrated, deformed, blurry, duplicate, bad art, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3, nsfw, explicit, topless`",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic images. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.",
|
||||||
|
"LLM_PrePrompt": "Using the most visually descriptive sentence fragments, phrases, and words, distill this scene description to its essence, staying true to what it describes: ",
|
||||||
|
"workflows": [{"workflow": "turbo.json", "size": "1024x768"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "landscape",
|
||||||
|
"triggers": ["lanscape", "vista", "scenic", "pastoral", "mountains", "outdoor", "outside", "adventure"],
|
||||||
|
"API_PPrompt": "Moody landscape photograph of ",
|
||||||
|
"API_SPrompt": ", masterpiece, (cinematic:0.5), beautiful lighting, dynamic lighting, (subtle:0.4), (nuanced:0.3), subdued, fine detail, best quality, ultra detailed, ultra high resolution, 8k, (documentary:0.3), cinematic, filmic, moody, dynamic lighting, realistic, wallpaper, landscape photography, professional, pastoral, earthporn, (eliot porter:0.6), (frans lanting:0.4), (daniel kordan:0.6), landscapephotography, ultra detailed, extremely sharp, insane detail, 8k, earth tones, moody",
|
||||||
|
"API_NPrompt": "FastNegativeV2, (easynegative:0.5), canvas frame, 3d, ((bad art)), illustrated, deformed, blurry, duplicate, Photoshop, video game, anime, cartoon, fake, tiling, out of frame, bad art, bad anatomy, 3d render, nsfw, worst quality, low quality, text, watermark, (Thomas Kinkade:0.5), sentimental, kitsch, kitschy, twee, commercial, holiday card, modern, futuristic, urban, comic, cartoon, FastNegativeV2, epiCNegative, easynegative, verybadimagenegative_v1.3",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic images. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.",
|
||||||
|
"LLM_PrePrompt": "Using a series of words or sentence fragments separated by commas, describe a professional landscape photograph of a striking scene of nature. You can select any place on Earth that a young model from the Pacific Northwest is likely to travel to. Focus on describing the content and composition of the image. Only use words and phrases that are visually descriptive. This model is especially fond of wild and rugged places, mountains. She favors dark muted earth tones, dramatic lighting, and interesting juxtapositions between foreground and background, or center of frame and outer frame areas. Avoid cliche situations; instread strive for nuance and originality in composition and environment.",
|
||||||
|
"workflows": [{"workflow": "landscape.json", "size": "1080x800"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "wallpaper",
|
||||||
|
"triggers": ["landscape", "vista", "scenic", "pastoral", "mountains", "outdoor", "outside", "adventure"],
|
||||||
|
"API_PPrompt": "",
|
||||||
|
"API_SPrompt": ", masterpiece, cinematic, beautiful lighting, subtle, nuanced, fine detail, best quality, filmic, moody, dynamic lighting, realistic, wallpaper, landscape photography, professional, pastoral, earthporn, landscapephotography, ultra detailed, extremely sharp, insane detail, 8k, earth tones, moody",
|
||||||
|
"API_NPrompt": "FastNegativeV2, easynegative, canvas frame, 3d, bad art, illustrated, deformed, blurry, duplicate, video game, render, anime, cartoon, fake, tiling, out of frame, bad art, bad anatomy, 3d render, nsfw, worst quality, low quality, text, watermark, Thomas Kinkade, sentimental, kitsch, kitschy, twee, commercial, holiday card, comic, cartoon, FastNegativeV2, epiCNegative, easynegative, verybadimagenegative_v1.3",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic images. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the prompt. Always use the most visuallnsive_wilsony descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words.",
|
||||||
|
"LLM_PrePrompt": "Using a series of words or sentence fragments separated by commas, describe a professional landscape photograph of a striking scene of nature. Focus on describing the content and composition of the image. Only use words and phrases that are visually descriptive. Favors dark tones, dramatic lighting, and interesting juxtapositions between foreground and background, or center of frame and outer frame areas. Avoid cliche situations; instead strive for nuance and originality in composition and environment.",
|
||||||
|
"workflows": [{"workflow": "wallpaper.json", "size": "1024x512"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "portrait",
|
||||||
|
"triggers": [
|
||||||
|
"portrait",
|
||||||
|
"profile",
|
||||||
|
"headshot"
|
||||||
|
],
|
||||||
|
"API_PPrompt": "Highly-detailed portrait photo of ",
|
||||||
|
"API_SPrompt": "; attractive, cute, (((masterpiece))); ((beautiful lighting)), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
|
||||||
|
"API_NPrompt": "canvas frame, 3d, ((bad art)), illustrated, deformed, blurry, duplicate, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3, nsfw, nude",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic portrait photos. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided, focusing in particular on the pictured individual's eyes, pose, and other distinctive features. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the rest of the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words. Remember that the final product will be a still image, and action verbs are not as helpful as simple descriptions of position, appearance, background, etc.",
|
||||||
|
"LLM_PrePrompt": "Using the most visually descriptive sentence fragments, phrases, and words, distill this portrait photo to its essence: ",
|
||||||
|
"workflows": [
|
||||||
|
{
|
||||||
|
"workflow": "selfie.json",
|
||||||
|
"size": "768x1024"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "doggystyle",
|
||||||
|
"triggers": [
|
||||||
|
"doggystyle",
|
||||||
|
"doggy-style",
|
||||||
|
"doggy style",
|
||||||
|
"from behind"
|
||||||
|
],
|
||||||
|
"API_PPrompt": "Explicit highly-detailed image of ",
|
||||||
|
"API_SPrompt": "; ((from behind)), (((doggystyle))), explicit, ((tiny breasts)), flat chest, (((young nude girl))), cute, (covered in cum), sex, porn, nsfw, (((masterpiece))); ((beautiful lighting)), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
|
||||||
|
"API_NPrompt": "canvas frame, 3d, ((bad art)), ((asian)), illustrated, deformed, blurry, duplicate, bad art, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3, censored, pg13",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic erotic/pornographic images. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided, focusing in particular on 18+ details concerning body parts, position, etc. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the rest of the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts. Do not include any words or descriptions based on other senses or emotions. Strive to show rather than tell. Space is limited, so be efficient with your words. Remember that the final product will be a still image, and action verbs are not as helpful as simple descriptions of position, appearance, body parts and fluids, etc.",
|
||||||
|
"LLM_PrePrompt": "Using the most visually descriptive sentence fragments, phrases, and words, distill this pornographic scene description of doggystyle sex to its erotic essence, staying true to what it describes no matter how kinky or taboo: ",
|
||||||
|
"workflows": [
|
||||||
|
{
|
||||||
|
"workflow": "xxx/doggy.json",
|
||||||
|
"size": "1024x768",
|
||||||
|
"preset_values": {
|
||||||
|
"node": "133",
|
||||||
|
"type": "depth_map",
|
||||||
|
"key": "image",
|
||||||
|
"values": [
|
||||||
|
"xxx/doggy/DOGGY_001.png",
|
||||||
|
"xxx/doggy/DOGGY_002.png",
|
||||||
|
"xxx/doggy/DOGGY_003.png"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scene": "nsfw",
|
||||||
|
"triggers": [
|
||||||
|
"nude",
|
||||||
|
"naked",
|
||||||
|
"undressed"
|
||||||
|
],
|
||||||
|
"API_PPrompt": "Explicit highly-detailed image of ",
|
||||||
|
"API_SPrompt": "; ((tiny breasts)), flat chest, (((young nude girl))), cute, nsfw, (((masterpiece))); ((beautiful lighting), subdued, fine detail, extremely sharp, 8k, insane detail, dynamic lighting, cinematic, best quality, ultra detailed.",
|
||||||
|
"API_NPrompt": "canvas frame, 3d, ((bad art)), ((asian)), illustrated, deformed, blurry, duplicate, bad art, bad anatomy, worst quality, low quality, watermark, FastNegativeV2, (easynegative:0.5), epiCNegative, easynegative, verybadimagenegative_v1.3, censored, pg13",
|
||||||
|
"LLM_SysMsg": "You are a helpful AI who assists in refining prompts that will be used to generate highly realistic erotic art. Upon receiving a prompt, you refine it by simplifying and distilling it to its essence, retaining the most visually evocative and distinct elements from what was provided, focusing in particular on details concerning body parts, position, etc. You may infer some visual details that were not provided in the prompt, so long as they are consistent with the rest of the prompt. Always use the most visually descriptive terms possible, and avoid any vague or abstract concepts.",
|
||||||
|
"LLM_PrePrompt": "Using the most visually descriptive sentence fragments, phrases, and words, distill this image of a young girl or woman to its erotic essence: ",
|
||||||
|
"workflows": [
|
||||||
|
{
|
||||||
|
"workflow": "nude.json",
|
||||||
|
"size": "768x1024"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -1,8 +0,0 @@
|
||||||
GLOBAL:
|
|
||||||
- DEPRECATED
|
|
||||||
OPENAI: 'sk-YOUR_OPENAI_API_KEY'
|
|
||||||
VISUALCROSSING: 'YOUR_VC_API_KEY'
|
|
||||||
COURTLISTENER: 'YOUR_CL_API_KEY'
|
|
||||||
ELEVENLABS: 'YOUR_11L_API_KEY'
|
|
||||||
CLOUDFLARE: 'YOUR_CF_API_KEY'
|
|
||||||
TIMING: 'YOUR_TIMING_API_KEY'
|
|
|
@ -1,5 +0,0 @@
|
||||||
forwarding_rules:
|
|
||||||
- source: "test.domain.com:80"
|
|
||||||
destination: "100.64.64.14:8080"
|
|
||||||
- source: "100.64.64.20:1024"
|
|
||||||
destination: "127.0.0.1:1025"
|
|
|
@ -1,163 +0,0 @@
|
||||||
# config/sys.yaml
|
|
||||||
# Primary configuration file
|
|
||||||
HOST: "0.0.0.0"
|
|
||||||
PORT: 4444
|
|
||||||
BIND: "{{ HOST }}:{{ PORT }}"
|
|
||||||
URL: "https://api.sij.ai"
|
|
||||||
MAX_CPU_CORES: 7
|
|
||||||
|
|
||||||
PUBLIC:
|
|
||||||
- /id
|
|
||||||
- /ip
|
|
||||||
- /health
|
|
||||||
- /img/
|
|
||||||
- /cl/dockets
|
|
||||||
- /cl/search
|
|
||||||
- /cd/alert
|
|
||||||
|
|
||||||
TRUSTED_SUBNETS:
|
|
||||||
- "127.0.0.1/32"
|
|
||||||
- "10.13.37.0/24"
|
|
||||||
|
|
||||||
SUBNET_BROADCAST: "10.255.255.255"
|
|
||||||
|
|
||||||
MODULES:
|
|
||||||
archivist: off
|
|
||||||
asr: on
|
|
||||||
cal: off
|
|
||||||
cf: off
|
|
||||||
dist: off
|
|
||||||
email: off
|
|
||||||
gis: on
|
|
||||||
ig: off
|
|
||||||
img: off
|
|
||||||
llm: on
|
|
||||||
news: on
|
|
||||||
note: on
|
|
||||||
rag: off
|
|
||||||
scrape: on
|
|
||||||
serve: on
|
|
||||||
sys: on
|
|
||||||
timing: off
|
|
||||||
tts: on
|
|
||||||
weather: on
|
|
||||||
|
|
||||||
LOGS:
|
|
||||||
default: info
|
|
||||||
init: debug
|
|
||||||
classes: debug
|
|
||||||
database: debug
|
|
||||||
serialization: debug
|
|
||||||
utilities: debug
|
|
||||||
logs: debug
|
|
||||||
main: debug
|
|
||||||
archivist: info
|
|
||||||
asr: info
|
|
||||||
cal: info
|
|
||||||
cf: info
|
|
||||||
dist: info
|
|
||||||
email: info
|
|
||||||
gis: debug
|
|
||||||
ig: info
|
|
||||||
img: debug
|
|
||||||
llm: debug
|
|
||||||
news: debug
|
|
||||||
note: debug
|
|
||||||
rag: debug
|
|
||||||
scrape: debug
|
|
||||||
serve: info
|
|
||||||
sys: debug
|
|
||||||
timing: warn
|
|
||||||
tts: info
|
|
||||||
weather: info
|
|
||||||
|
|
||||||
EXTENSIONS:
|
|
||||||
archivist: off
|
|
||||||
courtlistener: off
|
|
||||||
elevenlabs: on
|
|
||||||
macnotify: on
|
|
||||||
pgp: on
|
|
||||||
shellfish: on
|
|
||||||
xtts: off
|
|
||||||
url_shortener: off
|
|
||||||
|
|
||||||
KEYS:
|
|
||||||
- "sk-NhrtQwCHNdK5sRZC"
|
|
||||||
- "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"
|
|
||||||
|
|
||||||
TZ: "America/Los_Angeles"
|
|
||||||
|
|
||||||
GARBAGE:
|
|
||||||
COLLECTION_INTERVAL: 60 * 60
|
|
||||||
TTL: 60 * 60 * 24
|
|
||||||
|
|
||||||
# Database configuration
|
|
||||||
POOL:
|
|
||||||
- ts_id: 'server1'
|
|
||||||
ts_ip: '192.168.0.10'
|
|
||||||
app_port: 4444
|
|
||||||
db_port: 5432
|
|
||||||
db_name: mydb
|
|
||||||
db_user: dbuser
|
|
||||||
db_pass: 'password123'
|
|
||||||
ssh_port: 22
|
|
||||||
ssh_user: sshuser
|
|
||||||
ssh_pass: 'password456'
|
|
||||||
path: '/Users/sij/workshop/sijapi'
|
|
||||||
tmux: '/opt/homebrew/bin/tmux'
|
|
||||||
tailscale: '/usr/local/bin/tailscale'
|
|
||||||
conda: '/Users/sij/miniforge3/bin/mamba'
|
|
||||||
conda_env: '/Users/sij/miniforge3/envs/sijapi'
|
|
||||||
vitals: '/Users/sij/workshop/scripts/gitea/pathScripts/vitals'
|
|
||||||
vpn: '/Users/sij/workshop/scripts/gitea/pathScripts/vpn'
|
|
||||||
- ts_id: 'server2'
|
|
||||||
ts_ip: '192.168.0.11'
|
|
||||||
app_port: 4444
|
|
||||||
db_port: 5432
|
|
||||||
db_name: mydb
|
|
||||||
db_user: dbuser
|
|
||||||
db_pass: 'password123'
|
|
||||||
ssh_port: 22
|
|
||||||
ssh_user: sshuser
|
|
||||||
ssh_pass: 'password456'
|
|
||||||
path: '/Users/sij/workshop/sijapi'
|
|
||||||
tmux: '/opt/homebrew/bin/tmux'
|
|
||||||
tailscale: '/usr/local/bin/tailscale'
|
|
||||||
conda: '/Users/sij/miniforge3/bin/mamba'
|
|
||||||
conda_env: '/Users/sij/miniforge3/envs/sijapi'
|
|
||||||
vitals: '/Users/sij/workshop/scripts/gitea/pathScripts/vitals'
|
|
||||||
vpn: '/Users/sij/workshop/scripts/gitea/pathScripts/vpn'
|
|
||||||
- ts_id: 'server3'
|
|
||||||
ts_ip: '192.168.0.12'
|
|
||||||
app_port: 4444
|
|
||||||
db_port: 5432
|
|
||||||
db_name: mydb
|
|
||||||
db_user: dbuser
|
|
||||||
db_pass: 'password123'
|
|
||||||
ssh_port: 22
|
|
||||||
ssh_user: sshuser
|
|
||||||
ssh_pass: 'password456'
|
|
||||||
path: '/Users/sij/workshop/sijapi'
|
|
||||||
tmux: '/opt/homebrew/bin/tmux'
|
|
||||||
tailscale: '/usr/local/bin/tailscale'
|
|
||||||
conda: '/Users/sij/miniforge3/bin/mamba'
|
|
||||||
conda_env: '/Users/sij/miniforge3/envs/sijapi'
|
|
||||||
vitals: '/Users/sij/workshop/scripts/gitea/pathScripts/vitals'
|
|
||||||
vpn: '/Users/sij/workshop/scripts/gitea/pathScripts/vpn'
|
|
||||||
|
|
||||||
TABLES:
|
|
||||||
locations:
|
|
||||||
primary_key: id
|
|
||||||
use_guid: true
|
|
||||||
dailyweather:
|
|
||||||
primary_key: id
|
|
||||||
use_guid: true
|
|
||||||
hourlyweather:
|
|
||||||
primary_key: id
|
|
||||||
use_guid: true
|
|
||||||
click_logs:
|
|
||||||
primary_key: id
|
|
||||||
use_guid: true
|
|
||||||
short_urls:
|
|
||||||
primary_key: id
|
|
||||||
use_guid: true
|
|
|
@ -1,6 +0,0 @@
|
||||||
ID: sij-mbp16
|
|
||||||
IP: 100.64.64.20
|
|
||||||
SUBNET: 100.64.64.0/24
|
|
||||||
MDNS: starling-sailfin.ts.net
|
|
||||||
API_KEY: '{{ SECRET.TAILSCALE_API_KEY }}'
|
|
||||||
ADDRESS: 'http://{{ ID }}.{{ MDNS }}'
|
|
|
@ -1,24 +0,0 @@
|
||||||
default: xtts
|
|
||||||
email: xtts
|
|
||||||
webclip: elevenlabs
|
|
||||||
rss: xtts
|
|
||||||
|
|
||||||
xtts:
|
|
||||||
default: joanne
|
|
||||||
|
|
||||||
elevenlabs:
|
|
||||||
default: Victoria
|
|
||||||
voices:
|
|
||||||
alloy: "E3A1KVbKoWSIKSZwSUsW"
|
|
||||||
echo: "b42GBisbu9r5m5n6pHF7"
|
|
||||||
fable: "KAX2Y6tTs0oDWq7zZXW7"
|
|
||||||
onyx: "clQb8NxY08xZ6mX6wCPE"
|
|
||||||
nova: "6TayTBKLMOsghG7jYuMX"
|
|
||||||
shimmer: "E7soeOyjpmuZFurvoxZ2"
|
|
||||||
Luna: "6TayTBKLMOsghG7jYuMX"
|
|
||||||
Sangye: "E7soeOyjpmuZFurvoxZ2"
|
|
||||||
Herzog: "KAX2Y6tTs0oDWq7zZXW7"
|
|
||||||
Attenborough: "b42GBisbu9r5m5n6pHF7"
|
|
||||||
Victoria: "7UBkHqZOtFRLq6cSMQQg"
|
|
||||||
|
|
||||||
api_key: "{{ SECRET.ELEVENLABS }}"
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,586 +0,0 @@
|
||||||
{
|
|
||||||
"4": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "Other/dreamshaperXL_v21TurboDPMSDE.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"6": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"7": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"9": {
|
|
||||||
"inputs": {
|
|
||||||
"filename_prefix": "API_",
|
|
||||||
"images": [
|
|
||||||
"27",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SaveImage",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Save Image"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"11": {
|
|
||||||
"inputs": {
|
|
||||||
"batch_size": 1,
|
|
||||||
"width": 1023,
|
|
||||||
"height": 1025,
|
|
||||||
"resampling": "bicubic",
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0.1,
|
|
||||||
"frame": 1,
|
|
||||||
"scale": 2,
|
|
||||||
"octaves": 8,
|
|
||||||
"persistence": 3,
|
|
||||||
"lacunarity": 4,
|
|
||||||
"exponent": 2,
|
|
||||||
"brightness": 0,
|
|
||||||
"contrast": 0,
|
|
||||||
"clamp_min": 0,
|
|
||||||
"clamp_max": 1,
|
|
||||||
"seed": 477685752000597,
|
|
||||||
"device": "cpu",
|
|
||||||
"optional_vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Latent (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Noise 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"13": {
|
|
||||||
"inputs": {
|
|
||||||
"seed": 686880884118590,
|
|
||||||
"steps": 10,
|
|
||||||
"cfg": 1.8,
|
|
||||||
"sampler_name": "dpmpp_sde",
|
|
||||||
"scheduler": "karras",
|
|
||||||
"start_at_step": 0,
|
|
||||||
"end_at_step": 10000,
|
|
||||||
"enable_denoise": "false",
|
|
||||||
"denoise": 1,
|
|
||||||
"add_noise": "enable",
|
|
||||||
"return_with_leftover_noise": "disable",
|
|
||||||
"noise_type": "brownian_fractal",
|
|
||||||
"noise_blending": "cuberp",
|
|
||||||
"noise_mode": "additive",
|
|
||||||
"scale": 1,
|
|
||||||
"alpha_exponent": 1,
|
|
||||||
"modulator": 1.05,
|
|
||||||
"sigma_tolerance": 0.5,
|
|
||||||
"boost_leading_sigma": "true",
|
|
||||||
"guide_use_noise": "true",
|
|
||||||
"model": [
|
|
||||||
"4",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"positive": [
|
|
||||||
"20",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"negative": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"latent_image": [
|
|
||||||
"11",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ppf_settings": [
|
|
||||||
"14",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ch_settings": [
|
|
||||||
"15",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Power KSampler Advanced (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Power KSampler Advanced 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"14": {
|
|
||||||
"inputs": {
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0,
|
|
||||||
"frame": 0,
|
|
||||||
"scale": 2.5,
|
|
||||||
"octaves": 5,
|
|
||||||
"persistence": 4,
|
|
||||||
"lacunarity": 3,
|
|
||||||
"exponent": 2,
|
|
||||||
"brightness": -0.1,
|
|
||||||
"contrast": -0.1
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"15": {
|
|
||||||
"inputs": {
|
|
||||||
"frequency": 332.65500000000003,
|
|
||||||
"octaves": 32,
|
|
||||||
"persistence": 1.4000000000000001,
|
|
||||||
"num_colors": 128,
|
|
||||||
"color_tolerance": 0.05,
|
|
||||||
"angle_degrees": 45,
|
|
||||||
"brightness": -0.25,
|
|
||||||
"contrast": 0,
|
|
||||||
"blur": 1.3
|
|
||||||
},
|
|
||||||
"class_type": "Cross-Hatch Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Cross-Hatch Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"20": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning_1": [
|
|
||||||
"6",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"conditioning_2": [
|
|
||||||
"21",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningCombine",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Conditioning (Combine)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"21": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_StylePrompt",
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"23": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningZeroOut",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ConditioningZeroOut"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"26": {
|
|
||||||
"inputs": {
|
|
||||||
"upscale_model": [
|
|
||||||
"58",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"image": [
|
|
||||||
"39",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ImageUpscaleWithModel",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Upscale Image (using Model)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"27": {
|
|
||||||
"inputs": {
|
|
||||||
"factor": 0.25,
|
|
||||||
"interpolation_mode": "bicubic",
|
|
||||||
"image": [
|
|
||||||
"30",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "JWImageResizeByFactor",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image Resize by Factor"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"30": {
|
|
||||||
"inputs": {
|
|
||||||
"blur_radius": 3,
|
|
||||||
"sigma": 1.5,
|
|
||||||
"image": [
|
|
||||||
"26",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ImageBlur",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ImageBlur"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"38": {
|
|
||||||
"inputs": {
|
|
||||||
"samples": [
|
|
||||||
"13",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "VAEDecode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "VAE Decode"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"39": {
|
|
||||||
"inputs": {
|
|
||||||
"noise_seed": 690275685743412,
|
|
||||||
"steps": 16,
|
|
||||||
"cfg": 7.5,
|
|
||||||
"base_ratio": 0.85,
|
|
||||||
"denoise": 0.25,
|
|
||||||
"scaled_width": [
|
|
||||||
"60",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"scaled_height": [
|
|
||||||
"64",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"noise_offset": 1,
|
|
||||||
"refiner_strength": 1,
|
|
||||||
"softness": 0,
|
|
||||||
"base_model": [
|
|
||||||
"40",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"base_positive": [
|
|
||||||
"45",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"base_negative": [
|
|
||||||
"46",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_model": [
|
|
||||||
"42",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_positive": [
|
|
||||||
"43",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_negative": [
|
|
||||||
"44",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"41",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"sampler_name": [
|
|
||||||
"47",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"scheduler": [
|
|
||||||
"47",
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"upscale_model": [
|
|
||||||
"58",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SeargeSDXLImage2ImageSampler2",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image2Image Sampler v2 (Searge)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"40": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/SDXLLandskaper_v1-000003.safetensors",
|
|
||||||
"strength_model": 1,
|
|
||||||
"strength_clip": 1,
|
|
||||||
"model": [
|
|
||||||
"48",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"48",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"41": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/realismEngineSDXL_v20VAE.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"42": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/sdxl_refiner_1.0.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"43": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"42",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"44": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"42",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"45": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"40",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"46": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"40",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"47": {
|
|
||||||
"inputs": {
|
|
||||||
"sampler_name": "dpmpp_2m_sde",
|
|
||||||
"scheduler": "karras"
|
|
||||||
},
|
|
||||||
"class_type": "SeargeSamplerInputs",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Sampler Settings"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"48": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/add-detail-xl.safetensors",
|
|
||||||
"strength_model": 1,
|
|
||||||
"strength_clip": 1,
|
|
||||||
"model": [
|
|
||||||
"41",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"41",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"50": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_PrePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"51": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_NegativePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"52": {
|
|
||||||
"inputs": {
|
|
||||||
"image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Image Size to Number",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image Size to Number"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"55": {
|
|
||||||
"inputs": {
|
|
||||||
"op": "a * b",
|
|
||||||
"a": [
|
|
||||||
"52",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"65",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"c": 0
|
|
||||||
},
|
|
||||||
"class_type": "SeargeFloatMath",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"58": {
|
|
||||||
"inputs": {
|
|
||||||
"model_name": "4x_foolhardy_Remacri.pth"
|
|
||||||
},
|
|
||||||
"class_type": "UpscaleModelLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Upscale Model"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"59": {
|
|
||||||
"inputs": {
|
|
||||||
"op": "a * b",
|
|
||||||
"a": [
|
|
||||||
"52",
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"65",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"c": 0
|
|
||||||
},
|
|
||||||
"class_type": "SeargeFloatMath",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"60": {
|
|
||||||
"inputs": {
|
|
||||||
"value": [
|
|
||||||
"55",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"mode": "round"
|
|
||||||
},
|
|
||||||
"class_type": "JWFloatToInteger",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float to Integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"64": {
|
|
||||||
"inputs": {
|
|
||||||
"value": [
|
|
||||||
"59",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"mode": "round"
|
|
||||||
},
|
|
||||||
"class_type": "JWFloatToInteger",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float to Integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"65": {
|
|
||||||
"inputs": {
|
|
||||||
"value": 2
|
|
||||||
},
|
|
||||||
"class_type": "JWFloat",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,586 +0,0 @@
|
||||||
{
|
|
||||||
"4": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "Other/dreamshaperXL_v21TurboDPMSDE.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"6": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"7": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"9": {
|
|
||||||
"inputs": {
|
|
||||||
"filename_prefix": "API_",
|
|
||||||
"images": [
|
|
||||||
"27",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SaveImage",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Save Image"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"11": {
|
|
||||||
"inputs": {
|
|
||||||
"batch_size": 1,
|
|
||||||
"width": 1023,
|
|
||||||
"height": 1025,
|
|
||||||
"resampling": "bicubic",
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0.1,
|
|
||||||
"frame": 1,
|
|
||||||
"scale": 2,
|
|
||||||
"octaves": 8,
|
|
||||||
"persistence": 3,
|
|
||||||
"lacunarity": 4,
|
|
||||||
"exponent": 2,
|
|
||||||
"brightness": 0,
|
|
||||||
"contrast": 0,
|
|
||||||
"clamp_min": 0,
|
|
||||||
"clamp_max": 1,
|
|
||||||
"seed": 477685752000597,
|
|
||||||
"device": "cpu",
|
|
||||||
"optional_vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Latent (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Noise 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"13": {
|
|
||||||
"inputs": {
|
|
||||||
"seed": 686880884118590,
|
|
||||||
"steps": 10,
|
|
||||||
"cfg": 1.8,
|
|
||||||
"sampler_name": "dpmpp_sde",
|
|
||||||
"scheduler": "karras",
|
|
||||||
"start_at_step": 0,
|
|
||||||
"end_at_step": 10000,
|
|
||||||
"enable_denoise": "false",
|
|
||||||
"denoise": 1,
|
|
||||||
"add_noise": "enable",
|
|
||||||
"return_with_leftover_noise": "disable",
|
|
||||||
"noise_type": "brownian_fractal",
|
|
||||||
"noise_blending": "cuberp",
|
|
||||||
"noise_mode": "additive",
|
|
||||||
"scale": 1,
|
|
||||||
"alpha_exponent": 1,
|
|
||||||
"modulator": 1.05,
|
|
||||||
"sigma_tolerance": 0.5,
|
|
||||||
"boost_leading_sigma": "true",
|
|
||||||
"guide_use_noise": "true",
|
|
||||||
"model": [
|
|
||||||
"4",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"positive": [
|
|
||||||
"20",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"negative": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"latent_image": [
|
|
||||||
"11",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ppf_settings": [
|
|
||||||
"14",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ch_settings": [
|
|
||||||
"15",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Power KSampler Advanced (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Power KSampler Advanced 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"14": {
|
|
||||||
"inputs": {
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0,
|
|
||||||
"frame": 0,
|
|
||||||
"scale": 2.5,
|
|
||||||
"octaves": 5,
|
|
||||||
"persistence": 4,
|
|
||||||
"lacunarity": 3,
|
|
||||||
"exponent": 2,
|
|
||||||
"brightness": -0.1,
|
|
||||||
"contrast": -0.1
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"15": {
|
|
||||||
"inputs": {
|
|
||||||
"frequency": 332.65500000000003,
|
|
||||||
"octaves": 32,
|
|
||||||
"persistence": 1.4000000000000001,
|
|
||||||
"num_colors": 128,
|
|
||||||
"color_tolerance": 0.05,
|
|
||||||
"angle_degrees": 45,
|
|
||||||
"brightness": -0.25,
|
|
||||||
"contrast": 0,
|
|
||||||
"blur": 1.3
|
|
||||||
},
|
|
||||||
"class_type": "Cross-Hatch Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Cross-Hatch Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"20": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning_1": [
|
|
||||||
"6",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"conditioning_2": [
|
|
||||||
"21",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningCombine",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Conditioning (Combine)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"21": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_StylePrompt",
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"23": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningZeroOut",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ConditioningZeroOut"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"26": {
|
|
||||||
"inputs": {
|
|
||||||
"upscale_model": [
|
|
||||||
"58",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"image": [
|
|
||||||
"39",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ImageUpscaleWithModel",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Upscale Image (using Model)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"27": {
|
|
||||||
"inputs": {
|
|
||||||
"factor": 0.25,
|
|
||||||
"interpolation_mode": "bicubic",
|
|
||||||
"image": [
|
|
||||||
"30",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "JWImageResizeByFactor",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image Resize by Factor"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"30": {
|
|
||||||
"inputs": {
|
|
||||||
"blur_radius": 3,
|
|
||||||
"sigma": 1.5,
|
|
||||||
"image": [
|
|
||||||
"26",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ImageBlur",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ImageBlur"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"38": {
|
|
||||||
"inputs": {
|
|
||||||
"samples": [
|
|
||||||
"13",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "VAEDecode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "VAE Decode"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"39": {
|
|
||||||
"inputs": {
|
|
||||||
"noise_seed": 690275685743412,
|
|
||||||
"steps": 16,
|
|
||||||
"cfg": 7.5,
|
|
||||||
"base_ratio": 0.85,
|
|
||||||
"denoise": 0.25,
|
|
||||||
"scaled_width": [
|
|
||||||
"60",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"scaled_height": [
|
|
||||||
"64",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"noise_offset": 1,
|
|
||||||
"refiner_strength": 1,
|
|
||||||
"softness": 0,
|
|
||||||
"base_model": [
|
|
||||||
"40",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"base_positive": [
|
|
||||||
"45",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"base_negative": [
|
|
||||||
"46",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_model": [
|
|
||||||
"42",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_positive": [
|
|
||||||
"43",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_negative": [
|
|
||||||
"44",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"41",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"sampler_name": [
|
|
||||||
"47",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"scheduler": [
|
|
||||||
"47",
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"upscale_model": [
|
|
||||||
"58",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SeargeSDXLImage2ImageSampler2",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image2Image Sampler v2 (Searge)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"40": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/SDXLLandskaper_v1-000003.safetensors",
|
|
||||||
"strength_model": 1,
|
|
||||||
"strength_clip": 1,
|
|
||||||
"model": [
|
|
||||||
"48",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"48",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"41": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/realismEngineSDXL_v20VAE.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"42": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/sdxl_refiner_1.0.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"43": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"42",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"44": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"42",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"45": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"40",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"46": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"40",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"47": {
|
|
||||||
"inputs": {
|
|
||||||
"sampler_name": "dpmpp_2m_sde",
|
|
||||||
"scheduler": "karras"
|
|
||||||
},
|
|
||||||
"class_type": "SeargeSamplerInputs",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Sampler Settings"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"48": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/add-detail-xl.safetensors",
|
|
||||||
"strength_model": 1,
|
|
||||||
"strength_clip": 1,
|
|
||||||
"model": [
|
|
||||||
"41",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"41",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"50": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_PrePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"51": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_NegativePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"52": {
|
|
||||||
"inputs": {
|
|
||||||
"image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Image Size to Number",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image Size to Number"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"55": {
|
|
||||||
"inputs": {
|
|
||||||
"op": "a * b",
|
|
||||||
"a": [
|
|
||||||
"52",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"65",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"c": 0
|
|
||||||
},
|
|
||||||
"class_type": "SeargeFloatMath",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"58": {
|
|
||||||
"inputs": {
|
|
||||||
"model_name": "4x_foolhardy_Remacri.pth"
|
|
||||||
},
|
|
||||||
"class_type": "UpscaleModelLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Upscale Model"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"59": {
|
|
||||||
"inputs": {
|
|
||||||
"op": "a * b",
|
|
||||||
"a": [
|
|
||||||
"52",
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"65",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"c": 0
|
|
||||||
},
|
|
||||||
"class_type": "SeargeFloatMath",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"60": {
|
|
||||||
"inputs": {
|
|
||||||
"value": [
|
|
||||||
"55",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"mode": "round"
|
|
||||||
},
|
|
||||||
"class_type": "JWFloatToInteger",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float to Integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"64": {
|
|
||||||
"inputs": {
|
|
||||||
"value": [
|
|
||||||
"59",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"mode": "round"
|
|
||||||
},
|
|
||||||
"class_type": "JWFloatToInteger",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float to Integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"65": {
|
|
||||||
"inputs": {
|
|
||||||
"value": 2
|
|
||||||
},
|
|
||||||
"class_type": "JWFloat",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,486 +0,0 @@
|
||||||
{
|
|
||||||
"4": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/hassansdxl_v10.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"6": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"17",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"15",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"7": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"18",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"15",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"12": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/styleWegg.safetensors",
|
|
||||||
"strength_model": 0.3,
|
|
||||||
"strength_clip": 0.25,
|
|
||||||
"model": [
|
|
||||||
"91",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"91",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"13": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/add-detail-xl.safetensors",
|
|
||||||
"strength_model": 0.2,
|
|
||||||
"strength_clip": 0.2,
|
|
||||||
"model": [
|
|
||||||
"12",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"12",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"14": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/amazing_portraits_xl_v1b.safetensors",
|
|
||||||
"strength_model": 0.5,
|
|
||||||
"strength_clip": 0.45,
|
|
||||||
"model": [
|
|
||||||
"13",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"13",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"15": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/sd_xl_offset_example-lora_1.0.safetensors",
|
|
||||||
"strength_model": 0.2,
|
|
||||||
"strength_clip": 0.15,
|
|
||||||
"model": [
|
|
||||||
"53",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"53",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"17": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_PrePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"18": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_NegativePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"23": {
|
|
||||||
"inputs": {
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0,
|
|
||||||
"frame": 0,
|
|
||||||
"scale": 5,
|
|
||||||
"octaves": 8,
|
|
||||||
"persistence": 1.5,
|
|
||||||
"lacunarity": 2,
|
|
||||||
"exponent": 4,
|
|
||||||
"brightness": 0,
|
|
||||||
"contrast": 0
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"24": {
|
|
||||||
"inputs": {
|
|
||||||
"frequency": 320,
|
|
||||||
"octaves": 12,
|
|
||||||
"persistence": 1.5,
|
|
||||||
"num_colors": 16,
|
|
||||||
"color_tolerance": 0.05,
|
|
||||||
"angle_degrees": 45,
|
|
||||||
"brightness": 0,
|
|
||||||
"contrast": 0,
|
|
||||||
"blur": 2.5
|
|
||||||
},
|
|
||||||
"class_type": "Cross-Hatch Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Cross-Hatch Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"37": {
|
|
||||||
"inputs": {
|
|
||||||
"seed": 923916094743956
|
|
||||||
},
|
|
||||||
"class_type": "Seed",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Seed"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"38": {
|
|
||||||
"inputs": {
|
|
||||||
"batch_size": 1.3125,
|
|
||||||
"width": [
|
|
||||||
"95",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"height": [
|
|
||||||
"95",
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"resampling": "nearest-exact",
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0,
|
|
||||||
"frame": 0,
|
|
||||||
"scale": 10,
|
|
||||||
"octaves": 8,
|
|
||||||
"persistence": 1.5,
|
|
||||||
"lacunarity": 3,
|
|
||||||
"exponent": 5,
|
|
||||||
"brightness": 0,
|
|
||||||
"contrast": 0,
|
|
||||||
"clamp_min": 0,
|
|
||||||
"clamp_max": 1,
|
|
||||||
"seed": [
|
|
||||||
"37",
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"device": "cpu",
|
|
||||||
"optional_vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"ppf_settings": [
|
|
||||||
"23",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Latent (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Noise 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"43": {
|
|
||||||
"inputs": {
|
|
||||||
"seed": [
|
|
||||||
"37",
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"steps": 32,
|
|
||||||
"cfg": 8.5,
|
|
||||||
"sampler_name": "dpmpp_2m_sde",
|
|
||||||
"scheduler": "karras",
|
|
||||||
"start_at_step": 0,
|
|
||||||
"end_at_step": 10000,
|
|
||||||
"enable_denoise": "false",
|
|
||||||
"denoise": 1,
|
|
||||||
"add_noise": "enable",
|
|
||||||
"return_with_leftover_noise": "disable",
|
|
||||||
"noise_type": "brownian_fractal",
|
|
||||||
"noise_blending": "cuberp",
|
|
||||||
"noise_mode": "additive",
|
|
||||||
"scale": 1,
|
|
||||||
"alpha_exponent": 1,
|
|
||||||
"modulator": 1,
|
|
||||||
"sigma_tolerance": 0.5,
|
|
||||||
"boost_leading_sigma": "false",
|
|
||||||
"guide_use_noise": "true",
|
|
||||||
"model": [
|
|
||||||
"15",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"positive": [
|
|
||||||
"98",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"negative": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"latent_image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ppf_settings": [
|
|
||||||
"23",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ch_settings": [
|
|
||||||
"24",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Power KSampler Advanced (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Power KSampler Advanced 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"44": {
|
|
||||||
"inputs": {
|
|
||||||
"samples": [
|
|
||||||
"43",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "VAEDecode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "VAE Decode"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"45": {
|
|
||||||
"inputs": {
|
|
||||||
"filename_prefix": "API_",
|
|
||||||
"images": [
|
|
||||||
"44",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SaveImage",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Save Image"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"53": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/PerfectEyesXL.safetensors",
|
|
||||||
"strength_model": 0.5,
|
|
||||||
"strength_clip": 0.5,
|
|
||||||
"model": [
|
|
||||||
"14",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"14",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"89": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/ahxl_v1.safetensors",
|
|
||||||
"strength_model": 0.4,
|
|
||||||
"strength_clip": 0.33,
|
|
||||||
"model": [
|
|
||||||
"92",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"93",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"90": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/age.safetensors",
|
|
||||||
"strength_model": -0.8,
|
|
||||||
"strength_clip": -0.7000000000000001,
|
|
||||||
"model": [
|
|
||||||
"89",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"89",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"91": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/StokeRealV1.safetensors",
|
|
||||||
"strength_model": 0.2,
|
|
||||||
"strength_clip": 0.2,
|
|
||||||
"model": [
|
|
||||||
"90",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"90",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"92": {
|
|
||||||
"inputs": {
|
|
||||||
"input": 0.36,
|
|
||||||
"middle": 0.5,
|
|
||||||
"out": 0.64,
|
|
||||||
"model1": [
|
|
||||||
"4",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"model2": [
|
|
||||||
"94",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ModelMergeBlocks",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ModelMergeBlocks"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"93": {
|
|
||||||
"inputs": {
|
|
||||||
"ratio": 0.45,
|
|
||||||
"clip1": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"clip2": [
|
|
||||||
"94",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPMergeSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIPMergeSimple"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"94": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/dreamshaperXL_alpha2Xl10.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"95": {
|
|
||||||
"inputs": {
|
|
||||||
"width_ratio": 5,
|
|
||||||
"height_ratio": 7,
|
|
||||||
"side_length": 1025,
|
|
||||||
"rounding_value": 64
|
|
||||||
},
|
|
||||||
"class_type": "AnyAspectRatio",
|
|
||||||
"_meta": {
|
|
||||||
"title": "AnyAspectRatio"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"96": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_StylePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"97": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"96",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"15",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"98": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning_1": [
|
|
||||||
"6",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"conditioning_2": [
|
|
||||||
"97",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningCombine",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Conditioning (Combine)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,586 +0,0 @@
|
||||||
{
|
|
||||||
"4": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "Other/dreamshaperXL_v21TurboDPMSDE.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"6": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"7": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"9": {
|
|
||||||
"inputs": {
|
|
||||||
"filename_prefix": "API_",
|
|
||||||
"images": [
|
|
||||||
"27",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SaveImage",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Save Image"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"11": {
|
|
||||||
"inputs": {
|
|
||||||
"batch_size": 1,
|
|
||||||
"width": 1023,
|
|
||||||
"height": 1025,
|
|
||||||
"resampling": "bicubic",
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0.1,
|
|
||||||
"frame": 1,
|
|
||||||
"scale": 2,
|
|
||||||
"octaves": 8,
|
|
||||||
"persistence": 3,
|
|
||||||
"lacunarity": 4,
|
|
||||||
"exponent": 2,
|
|
||||||
"brightness": 0,
|
|
||||||
"contrast": 0,
|
|
||||||
"clamp_min": 0,
|
|
||||||
"clamp_max": 1,
|
|
||||||
"seed": 477685752000597,
|
|
||||||
"device": "cpu",
|
|
||||||
"optional_vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Latent (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Noise 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"13": {
|
|
||||||
"inputs": {
|
|
||||||
"seed": 686880884118590,
|
|
||||||
"steps": 10,
|
|
||||||
"cfg": 1.8,
|
|
||||||
"sampler_name": "dpmpp_sde",
|
|
||||||
"scheduler": "karras",
|
|
||||||
"start_at_step": 0,
|
|
||||||
"end_at_step": 10000,
|
|
||||||
"enable_denoise": "false",
|
|
||||||
"denoise": 1,
|
|
||||||
"add_noise": "enable",
|
|
||||||
"return_with_leftover_noise": "disable",
|
|
||||||
"noise_type": "brownian_fractal",
|
|
||||||
"noise_blending": "cuberp",
|
|
||||||
"noise_mode": "additive",
|
|
||||||
"scale": 1,
|
|
||||||
"alpha_exponent": 1,
|
|
||||||
"modulator": 1.05,
|
|
||||||
"sigma_tolerance": 0.5,
|
|
||||||
"boost_leading_sigma": "true",
|
|
||||||
"guide_use_noise": "true",
|
|
||||||
"model": [
|
|
||||||
"4",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"positive": [
|
|
||||||
"20",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"negative": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"latent_image": [
|
|
||||||
"11",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ppf_settings": [
|
|
||||||
"14",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"ch_settings": [
|
|
||||||
"15",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Power KSampler Advanced (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Power KSampler Advanced 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"14": {
|
|
||||||
"inputs": {
|
|
||||||
"X": 0,
|
|
||||||
"Y": 0,
|
|
||||||
"Z": 0,
|
|
||||||
"evolution": 0,
|
|
||||||
"frame": 0,
|
|
||||||
"scale": 2.5,
|
|
||||||
"octaves": 5,
|
|
||||||
"persistence": 4,
|
|
||||||
"lacunarity": 3,
|
|
||||||
"exponent": 2,
|
|
||||||
"brightness": -0.1,
|
|
||||||
"contrast": -0.1
|
|
||||||
},
|
|
||||||
"class_type": "Perlin Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Perlin Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"15": {
|
|
||||||
"inputs": {
|
|
||||||
"frequency": 332.65500000000003,
|
|
||||||
"octaves": 32,
|
|
||||||
"persistence": 1.4000000000000001,
|
|
||||||
"num_colors": 128,
|
|
||||||
"color_tolerance": 0.05,
|
|
||||||
"angle_degrees": 45,
|
|
||||||
"brightness": -0.25,
|
|
||||||
"contrast": 0,
|
|
||||||
"blur": 1.3
|
|
||||||
},
|
|
||||||
"class_type": "Cross-Hatch Power Fractal Settings (PPF Noise)",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Cross-Hatch Power Fractal Settings 🦚"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"20": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning_1": [
|
|
||||||
"6",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"conditioning_2": [
|
|
||||||
"21",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningCombine",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Conditioning (Combine)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"21": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_StylePrompt",
|
|
||||||
"clip": [
|
|
||||||
"4",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"23": {
|
|
||||||
"inputs": {
|
|
||||||
"conditioning": [
|
|
||||||
"7",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ConditioningZeroOut",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ConditioningZeroOut"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"26": {
|
|
||||||
"inputs": {
|
|
||||||
"upscale_model": [
|
|
||||||
"58",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"image": [
|
|
||||||
"39",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ImageUpscaleWithModel",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Upscale Image (using Model)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"27": {
|
|
||||||
"inputs": {
|
|
||||||
"factor": 0.25,
|
|
||||||
"interpolation_mode": "bicubic",
|
|
||||||
"image": [
|
|
||||||
"30",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "JWImageResizeByFactor",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image Resize by Factor"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"30": {
|
|
||||||
"inputs": {
|
|
||||||
"blur_radius": 3,
|
|
||||||
"sigma": 1.5,
|
|
||||||
"image": [
|
|
||||||
"26",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "ImageBlur",
|
|
||||||
"_meta": {
|
|
||||||
"title": "ImageBlur"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"38": {
|
|
||||||
"inputs": {
|
|
||||||
"samples": [
|
|
||||||
"13",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"4",
|
|
||||||
2
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "VAEDecode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "VAE Decode"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"39": {
|
|
||||||
"inputs": {
|
|
||||||
"noise_seed": 690275685743412,
|
|
||||||
"steps": 16,
|
|
||||||
"cfg": 7.5,
|
|
||||||
"base_ratio": 0.85,
|
|
||||||
"denoise": 0.25,
|
|
||||||
"scaled_width": [
|
|
||||||
"60",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"scaled_height": [
|
|
||||||
"64",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"noise_offset": 1,
|
|
||||||
"refiner_strength": 1,
|
|
||||||
"softness": 0,
|
|
||||||
"base_model": [
|
|
||||||
"40",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"base_positive": [
|
|
||||||
"45",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"base_negative": [
|
|
||||||
"46",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_model": [
|
|
||||||
"42",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_positive": [
|
|
||||||
"43",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"refiner_negative": [
|
|
||||||
"44",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"vae": [
|
|
||||||
"41",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"sampler_name": [
|
|
||||||
"47",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"scheduler": [
|
|
||||||
"47",
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"upscale_model": [
|
|
||||||
"58",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "SeargeSDXLImage2ImageSampler2",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image2Image Sampler v2 (Searge)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"40": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/SDXLLandskaper_v1-000003.safetensors",
|
|
||||||
"strength_model": 1,
|
|
||||||
"strength_clip": 1,
|
|
||||||
"model": [
|
|
||||||
"48",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"48",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"41": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/realismEngineSDXL_v20VAE.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"42": {
|
|
||||||
"inputs": {
|
|
||||||
"ckpt_name": "SDXL/sdxl_refiner_1.0.safetensors"
|
|
||||||
},
|
|
||||||
"class_type": "CheckpointLoaderSimple",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Checkpoint"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"43": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"42",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"44": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"42",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"45": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"50",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"40",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"46": {
|
|
||||||
"inputs": {
|
|
||||||
"text": [
|
|
||||||
"51",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"40",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "CLIPTextEncode",
|
|
||||||
"_meta": {
|
|
||||||
"title": "CLIP Text Encode (Prompt)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"47": {
|
|
||||||
"inputs": {
|
|
||||||
"sampler_name": "dpmpp_2m_sde",
|
|
||||||
"scheduler": "karras"
|
|
||||||
},
|
|
||||||
"class_type": "SeargeSamplerInputs",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Sampler Settings"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"48": {
|
|
||||||
"inputs": {
|
|
||||||
"lora_name": "SDXL/add-detail-xl.safetensors",
|
|
||||||
"strength_model": 1,
|
|
||||||
"strength_clip": 1,
|
|
||||||
"model": [
|
|
||||||
"41",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"clip": [
|
|
||||||
"41",
|
|
||||||
1
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "LoraLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load LoRA"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"50": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_PrePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"51": {
|
|
||||||
"inputs": {
|
|
||||||
"text": "API_NegativePrompt"
|
|
||||||
},
|
|
||||||
"class_type": "JWStringMultiline",
|
|
||||||
"_meta": {
|
|
||||||
"title": "String (Multiline)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"52": {
|
|
||||||
"inputs": {
|
|
||||||
"image": [
|
|
||||||
"38",
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"class_type": "Image Size to Number",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Image Size to Number"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"55": {
|
|
||||||
"inputs": {
|
|
||||||
"op": "a * b",
|
|
||||||
"a": [
|
|
||||||
"52",
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"65",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"c": 0
|
|
||||||
},
|
|
||||||
"class_type": "SeargeFloatMath",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"58": {
|
|
||||||
"inputs": {
|
|
||||||
"model_name": "4x_foolhardy_Remacri.pth"
|
|
||||||
},
|
|
||||||
"class_type": "UpscaleModelLoader",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Load Upscale Model"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"59": {
|
|
||||||
"inputs": {
|
|
||||||
"op": "a * b",
|
|
||||||
"a": [
|
|
||||||
"52",
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"65",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"c": 0
|
|
||||||
},
|
|
||||||
"class_type": "SeargeFloatMath",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"60": {
|
|
||||||
"inputs": {
|
|
||||||
"value": [
|
|
||||||
"55",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"mode": "round"
|
|
||||||
},
|
|
||||||
"class_type": "JWFloatToInteger",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float to Integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"64": {
|
|
||||||
"inputs": {
|
|
||||||
"value": [
|
|
||||||
"59",
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"mode": "round"
|
|
||||||
},
|
|
||||||
"class_type": "JWFloatToInteger",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float to Integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"65": {
|
|
||||||
"inputs": {
|
|
||||||
"value": 2
|
|
||||||
},
|
|
||||||
"class_type": "JWFloat",
|
|
||||||
"_meta": {
|
|
||||||
"title": "Float"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,28 +0,0 @@
|
||||||
-----BEGIN PRIVATE KEY-----
|
|
||||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCikW67UW0RpncJ
|
|
||||||
h4Ha9HumZ/WzgEZWRWkgksVJIOJ8t1PftctizLUlz+xMWNl+Volp4crxnPpnczis
|
|
||||||
pOXU4g65XoFHHpF9nhF/3YDgxo5BDEM/mIIKEO9LFkIBQVBdE85qXnIVot5MfuNj
|
|
||||||
HeyEs7doRMBxilOSR/DkT8bTWu7m5yeHlF58iYVOxxssGhP3bo7CcAcaZD1LJBnP
|
|
||||||
Df+UBqzWQ9as903p5bFixHy2kVz8Qkd5k5tyIQ/tXqlhRfLLHG4AYHmBbS06CAg0
|
|
||||||
nEpKUeQx4l1J/ykAjQTwhHf70xv1z0p28mHcr5ib4UvpYK9fMM6FKWenwlqA3qrK
|
|
||||||
zQUJQ7E/AgMBAAECggEAQ5H/XIxzsSpnv+Y66y9DVd9QGNPwaFthXtCif8rTWNM6
|
|
||||||
YXnGl8JOaPELXpBvljuR0hivqc19pxIVNG01uk5boGDPiygBgRz6WRNQRh1Bc3gN
|
|
||||||
W5mgM17ml2cg+DSVmppo6X1oHeYcT99N1BzT+jRYv1YURx0fr2WHkt413hOlyQMR
|
|
||||||
b8ir/TOBx3olg4KBPDuysRC5BCIr3Mkz4jsh+9wVIOReKVezsy7nxJVzipcxOyZO
|
|
||||||
9VGgvlw4XLrRTOJEv4e3ldcg219j5KEGsJ4FFSziSmpj5fN4Vt+JmY7nueSHyL6P
|
|
||||||
3hX52lRfOcTXTEeiEV2cXkm3h8uQ3zfiZRYi3P0DQQKBgQDXGBZc3WnfXim0u1EV
|
|
||||||
JzZFwxBS7SHkyGgnd50ak6e9yDbdxOuYIOo4mBlc3ofd20EfT4TvR7Xyw+PD2fWJ
|
|
||||||
+isdwCEb9JZZ1H6RDGIzSDYXGNeGId4kMKBZdmKpEeLgStihsrYp/nxtwcE/8A7N
|
|
||||||
jCEKZj1ld7QfbQlGT/NJ4Jj80wKBgQDBfBpth6vMyCETKMJVqYd2qhVnJKiFLfRn
|
|
||||||
OD/Ck6xwUuedbfe9M34wNO3Pn2Xvu1xVsQGb2dmlT345Iq9Z1nbZCGXyY9yfLnTV
|
|
||||||
fz7F2utjUjaJtuiSb52SgX7MWZ8E4nbqqKnC4SYSIlaeuL9KK3r/x6bcNLAYPcdk
|
|
||||||
qKHexDkGZQKBgF0JGyshzhiChzGYUBMBOfVk0Ru9XAq0MHDZyQdk1Io/HpRAB9Nu
|
|
||||||
cUD3cQj9a/EnU/yyDYLeFrIhztO44/7BSYL9xpRr79h9FB2zKIqb8mF9KkPnREmN
|
|
||||||
Ct6HWVdd2C9B0H/oZ+i0HafvxaHdONnpgaUY4feQlkV9iSRzknzi++lnAoGAXOuu
|
|
||||||
/X80oMpUKBFhEyaxqemREdHnJN6nC5NV+6pUHDWUimSvn6vFJH2m4BlbKUC/3V9+
|
|
||||||
uExtXBjLM8FWmTyIIz8HRttyrvfuoEHV8ctrVG29R3ISS5FTCXMrZBR+bCgemB+c
|
|
||||||
N71NPVREaUGsjIBJN+G4XvTmxR2WTt81rfhqsokCgYEA1It9e9Ut2Krzf2FaPGLG
|
|
||||||
ozlKhWadMNntthg3uxze80Rx8WSvgJQdbVpdbni2B/9xdYBIIljW/LGivYBrCSSp
|
|
||||||
aXFpXL7ZGkvl3b0MkojfghIpXVGqu+8ISDtFgL0B1gZ5hq9xMBl94fLVfQgC9Cy6
|
|
||||||
uvDHlz+fjWaWKYUPiouAtVs=
|
|
||||||
-----END PRIVATE KEY-----
|
|
|
@ -1,19 +0,0 @@
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIDAzCCAeugAwIBAgIUc+EtilZslnS7N6MAx0u9HeP83wAwDQYJKoZIhvcNAQEL
|
|
||||||
BQAwETEPMA0GA1UEAwwGcHl0aG9uMB4XDTI0MDYwODEyNTcxM1oXDTI1MDYwODEy
|
|
||||||
NTcxM1owETEPMA0GA1UEAwwGcHl0aG9uMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
|
|
||||||
MIIBCgKCAQEAopFuu1FtEaZ3CYeB2vR7pmf1s4BGVkVpIJLFSSDifLdT37XLYsy1
|
|
||||||
Jc/sTFjZflaJaeHK8Zz6Z3M4rKTl1OIOuV6BRx6RfZ4Rf92A4MaOQQxDP5iCChDv
|
|
||||||
SxZCAUFQXRPOal5yFaLeTH7jYx3shLO3aETAcYpTkkfw5E/G01ru5ucnh5RefImF
|
|
||||||
TscbLBoT926OwnAHGmQ9SyQZzw3/lAas1kPWrPdN6eWxYsR8tpFc/EJHeZObciEP
|
|
||||||
7V6pYUXyyxxuAGB5gW0tOggINJxKSlHkMeJdSf8pAI0E8IR3+9Mb9c9KdvJh3K+Y
|
|
||||||
m+FL6WCvXzDOhSlnp8JagN6qys0FCUOxPwIDAQABo1MwUTAdBgNVHQ4EFgQUS74L
|
|
||||||
HD4Cdzh1ajatbvSHNQXIVvAwHwYDVR0jBBgwFoAUS74LHD4Cdzh1ajatbvSHNQXI
|
|
||||||
VvAwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAhpwtVubDjsyq
|
|
||||||
/LiTwpXKhjB/eFb6Yse782Iq+9rsiGGhsN88IA25fKgsJ2AIkR/KA7QSle3ds+1Q
|
|
||||||
EY9/vqpWnfBdpvOi7oV7ozBe+t/5JLu1GQBzg+cVa4iLAWYCiqg1d5NDdIcYMfsM
|
|
||||||
Yq2a3eQoP8Xbj3fFMXdNopXARa1d1zHB3ugXIJYinwMlS0EoGXVQVaHhemOh8GwW
|
|
||||||
keRaA6TDTBFsp0Gl4jv/NrisAt4qg+rlqr0mNcQK92vRX65mDWa/cQKwpUH8+Seq
|
|
||||||
Jl717NnsIGcqYWg8SSvVlkbFfxYhwYICXT824MAdSZtpHNCN/TegxsviYnlDyJKj
|
|
||||||
OJzn4fCxnQ==
|
|
||||||
-----END CERTIFICATE-----
|
|
|
@ -1 +0,0 @@
|
||||||
{"token_type": "Bearer", "scope": "Calendars.Read Calendars.ReadWrite User.Read profile openid email", "expires_in": 3962, "ext_expires_in": 3962, "access_token": "eyJ0eXAiOiJKV1QiLCJub25jZSI6IldDeU91YXllN1RFX2FPM0F1alhlYmtvYTdVRHpUR1dVNWt5d3lJeDZ1MGciLCJhbGciOiJSUzI1NiIsIng1dCI6InE3UDFOdnh1R1F3RE4yVGFpTW92alo4YVp3cyIsImtpZCI6InE3UDFOdnh1R1F3RE4yVGFpTW92alo4YVp3cyJ9.eyJhdWQiOiIwMDAwMDAwMy0wMDAwLTAwMDAtYzAwMC0wMDAwMDAwMDAwMDAiLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC9iYWQ3ODA0OC1hNmUwLTQ3YjEtYTI0Yi00MDNjNDQ0YWEzNDkvIiwiaWF0IjoxNzE4Mzc0NzA5LCJuYmYiOjE3MTgzNzQ3MDksImV4cCI6MTcxODM3ODk3MiwiYWNjdCI6MCwiYWNyIjoiMSIsImFpbyI6IkFWUUFxLzhYQUFBQVRnWHJ0Q1pCVjlPa1M2WldldHVVSHNMSFN0LzErYVcxT1BSSjVOWjJEL1Bzd05mY1Fxb0JTNEFZRmhLR3UvaE5TNnNWOGtLQUpmcDNNTzdqRUlNMEZrY1VaZ0IyREh4cWdOK3lUQVBUYnRVPSIsImFtciI6WyJwd2QiLCJtZmEiXSwiYXBwX2Rpc3BsYXluYW1lIjoicHl0aG9uIiwiYXBwaWQiOiJjZThjYmQyNC1mMTQ2LTRkYzctOGVlNy01MWQ5YjY5ZGVjNTkiLCJhcHBpZGFjciI6IjEiLCJmYW1pbHlfbmFtZSI6IkluY2UtSm9oYW5uc2VuIiwiZ2l2ZW5fbmFtZSI6IlNhbmd5ZSIsImlkdHlwIjoidXNlciIsImlwYWRkciI6IjY4LjIzNS40NC4yMDIiLCJuYW1lIjoiU2FuZ3llIEluY2UtSm9oYW5uc2VuIiwib2lkIjoiMWNiMWQwNDAtZmM1OS00MjMxLTllMDUtOWRjNGI0MzJjY2MxIiwicGxhdGYiOiI1IiwicHVpZCI6IjEwMDMyMDAyQTNGQjU4RjIiLCJyaCI6IjAuQVgwQVNJRFh1dUNtc1VlaVMwQThSRXFqU1FNQUFBQUFBQUFBd0FBQUFBQUFBQUMxQUk4LiIsInNjcCI6IkNhbGVuZGFycy5SZWFkIENhbGVuZGFycy5SZWFkV3JpdGUgVXNlci5SZWFkIHByb2ZpbGUgb3BlbmlkIGVtYWlsIiwic2lnbmluX3N0YXRlIjpbImttc2kiXSwic3ViIjoiV0FYVFdIR0puVFhBTjlncmIyamlEU3U4ZENOMmc0dDFacERiVHlwM1k3USIsInRlbmFudF9yZWdpb25fc2NvcGUiOiJOQSIsInRpZCI6ImJhZDc4MDQ4LWE2ZTAtNDdiMS1hMjRiLTQwM2M0NDRhYTM0OSIsInVuaXF1ZV9uYW1lIjoic2FuZ3llaWpAd2VzdGVybmxhdy5vcmciLCJ1cG4iOiJzYW5neWVpakB3ZXN0ZXJubGF3Lm9yZyIsInV0aSI6InFHcVlEODRzaDBHMFBfSEdldlVXQUEiLCJ2ZXIiOiIxLjAiLCJ3aWRzIjpbImI3OWZiZjRkLTNlZjktNDY4OS04MTQzLTc2YjE5NGU4NTUwOSJdLCJ4bXNfaWRyZWwiOiIxIDIiLCJ4bXNfc3QiOnsic3ViIjoieXhjdzFhV1FiM2VrX0FvNFRuRy11SDN6ZndGbVRRUmMxVGpFaEdqZ2p2WSJ9LCJ4bXNfdGNkdCI6MTY0ODY4MTc1Mn0.ssgIrbYo1SPNusoB9bNIB7pLxCmwBKhox__KOnwRRtnE63vbfGWAl53ww1KpNWPdDfC3p94yuPybTRqjZnTPluv1oJgGINml4AleUnZJnJttRsFHvGflzKOLtXnzmhQGUBXxu7QucKTCMH4J36neeQAWthITMwCHbaGmSy0RLotaIsoEHIufxR9ZEYD4XP5e3sFX54eSnyf4P3GgHHC1y5xxWUlemG4G1BRas8i7oX9o-gqRFube6BMtCLir_HMTNPfrCG-lhd9msLhc6e_WJSmLMHQ7RVLo-GlTMY9UouE190GzBBVKUrTg462I3kP_GayO1kt6qopBrwnF6bDUsw", "refresh_token": "0.AX0ASIDXuuCmsUeiS0A8REqjSSS9jM5G8cdNjudR2bad7Fm1AI8.AgABAwEAAAApTwJmzXqdR4BN2miheQMYAgDs_wUA9P_rTWFRiXWkWxvihyyXonsZPLrulRvnKKRlZ9PxKUltEOQsxjlg86xvCYzAS6dYeDBQiQxRAS_WEuuXVmTqUWDVwqgwQOa3BCbLwxQhPwfG-O9uFY6D239Jo8rdXTrf8XOntGs6fCn3wuo5kvJr2D-FGRA_EepltvRxZgrWdHROKuqoL_ArjLDdoFP7zM95MKhVYTmCO7LCM7u6O9ItU4_6y2_lH864zUivT1LFG8-h9sx0Ln3wd8LBP3P5GSeXwtQlkbNpj1FNDl_Ex5SwGCTM7uDHj0dn5CdUMgLkOcAC__HJdzmlEryTquoXcjd1RAmkq1MqAGD7QQreI7NQTZXwTcjoMwiBg92-bk-_o2ajeIVqzgOVBQIu1W8gkN2F7PAqRc5lGB-2mAXchqKMoL31CLUPxgTMBjWgR4waAjfZXT4h2WqXAAdGFy2nzUJAjyEQa9ZW1J5B6asCf3cVJQwI6nWIN7OphrXkGHl0ffpfrC-skVG3N2vrelAutRvyvWi4bbMqAZNglRrkTn5G_kULmnyydZBcFSc5uPmKD7OkfBD5UpTa_KLTjYexWRVsBfG9czIVxOh3ojnnza9BjrN5cHwHhzPM1t67E5iqronvT2OR_r-4BerUfRNHXrxwrLvDUEZwQ8o5IRs2N5FH0y_QN049o_NTgqytCj6wrIB4T-ZBUK2AsFej7ipdHAMYtWLZdoAo1o4nMuPBb4syN0VYd1sLUP-RQ5iv7wIkMWmNjhjIErIktZ134pGK9TlWa904H6HUin0qNTXyTmX2feE0nBlm6xJbO1ISfFkaf8aEjcAMfeu9qiArKQqUgvY", "expires_at": 1718378971}
|
|
|
@ -1,51 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e # Exit immediately if a command exits with a non-zero status.
|
|
||||||
|
|
||||||
# Set variables
|
|
||||||
DB_NAME="sij"
|
|
||||||
DB_USER="sij"
|
|
||||||
OSM_FILE="north-america-latest.osm.pbf"
|
|
||||||
FLAT_NODES="/Users/sij/workshop/sijapi/sijapi/data/db/flat-nodes.bin"
|
|
||||||
|
|
||||||
# Ensure the directory for flat-nodes exists
|
|
||||||
mkdir -p "$(dirname "$FLAT_NODES")"
|
|
||||||
|
|
||||||
# Determine total system memory in MB
|
|
||||||
TOTAL_MEM=$(sysctl hw.memsize | awk '{print $2 / 1024 / 1024}')
|
|
||||||
|
|
||||||
# Calculate cache size (50% of total memory, max 32GB)
|
|
||||||
CACHE_SIZE=$(echo "scale=0; $TOTAL_MEM * 0.5 / 1" | bc)
|
|
||||||
CACHE_SIZE=$(( CACHE_SIZE > 32768 ? 32768 : CACHE_SIZE ))
|
|
||||||
|
|
||||||
# Calculate number of processes (number of CPU cores minus 1, min 1)
|
|
||||||
NUM_PROCESSES=$(sysctl -n hw.ncpu)
|
|
||||||
NUM_PROCESSES=$(( NUM_PROCESSES > 1 ? NUM_PROCESSES - 1 : 1 ))
|
|
||||||
|
|
||||||
echo "Starting OSM data import..."
|
|
||||||
|
|
||||||
# Run osm2pgsql
|
|
||||||
osm2pgsql -d $DB_NAME \
|
|
||||||
--create \
|
|
||||||
--slim \
|
|
||||||
-G \
|
|
||||||
--hstore \
|
|
||||||
--tag-transform-script /opt/homebrew/Cellar/osm2pgsql/1.11.0_1/share/osm2pgsql/openstreetmap-carto.lua \
|
|
||||||
-C $CACHE_SIZE \
|
|
||||||
--number-processes $NUM_PROCESSES \
|
|
||||||
-S /opt/homebrew/Cellar/osm2pgsql/1.11.0_1/share/osm2pgsql/default.style \
|
|
||||||
--prefix osm \
|
|
||||||
-H localhost \
|
|
||||||
-P 5432 \
|
|
||||||
-U $DB_USER \
|
|
||||||
--flat-nodes $FLAT_NODES \
|
|
||||||
$OSM_FILE
|
|
||||||
|
|
||||||
echo "OSM data import completed. Creating indexes..."
|
|
||||||
|
|
||||||
# Create indexes (adjust table names if necessary)
|
|
||||||
psql -d $DB_NAME -U $DB_USER -c "CREATE INDEX IF NOT EXISTS idx_osm_point_way ON osm_point USING GIST (way);"
|
|
||||||
psql -d $DB_NAME -U $DB_USER -c "CREATE INDEX IF NOT EXISTS idx_osm_line_way ON osm_line USING GIST (way);"
|
|
||||||
psql -d $DB_NAME -U $DB_USER -c "CREATE INDEX IF NOT EXISTS idx_osm_polygon_way ON osm_polygon USING GIST (way);"
|
|
||||||
|
|
||||||
echo "Import completed and indexes created."
|
|
|
@ -1,110 +0,0 @@
|
||||||
# database.py
|
|
||||||
import yaml
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from datetime import datetime
|
|
||||||
from loguru import logger
|
|
||||||
from sqlalchemy import text, create_engine
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
|
||||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
|
||||||
from sqlalchemy import Column, Integer, String, DateTime, JSON, Text
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
|
||||||
from sqlalchemy.sql import func
|
|
||||||
import os
|
|
||||||
import uuid
|
|
||||||
from .logs import get_logger
|
|
||||||
from .serialization import serialize
|
|
||||||
|
|
||||||
l = get_logger(__name__)
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent
|
|
||||||
CONFIG_DIR = BASE_DIR / "config"
|
|
||||||
ENV_PATH = CONFIG_DIR / ".env"
|
|
||||||
load_dotenv(ENV_PATH)
|
|
||||||
TS_ID = os.environ.get('TS_ID')
|
|
||||||
|
|
||||||
class Database:
|
|
||||||
@classmethod
|
|
||||||
def init(cls, config_name: str):
|
|
||||||
return cls(config_name)
|
|
||||||
|
|
||||||
def __init__(self, config_path: str):
|
|
||||||
self.config = self.load_config(config_path)
|
|
||||||
self.engines: Dict[str, Any] = {}
|
|
||||||
self.sessions: Dict[str, Any] = {}
|
|
||||||
self.local_ts_id = self.get_local_ts_id()
|
|
||||||
|
|
||||||
def load_config(self, config_path: str) -> Dict[str, Any]:
|
|
||||||
base_path = Path(__file__).parent.parent
|
|
||||||
full_path = base_path / "sijapi" / "config" / f"{config_path}.yaml"
|
|
||||||
|
|
||||||
with open(full_path, 'r') as file:
|
|
||||||
config = yaml.safe_load(file)
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
def get_local_ts_id(self) -> str:
|
|
||||||
return os.environ.get('TS_ID')
|
|
||||||
|
|
||||||
async def initialize_engines(self):
|
|
||||||
for db_info in self.config['POOL']:
|
|
||||||
url = f"postgresql+asyncpg://{db_info['db_user']}:{db_info['db_pass']}@{db_info['ts_ip']}:{db_info['db_port']}/{db_info['db_name']}"
|
|
||||||
try:
|
|
||||||
engine = create_async_engine(url, pool_pre_ping=True, pool_size=5, max_overflow=10)
|
|
||||||
self.engines[db_info['ts_id']] = engine
|
|
||||||
self.sessions[db_info['ts_id']] = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
|
|
||||||
l.info(f"Initialized engine and session for {db_info['ts_id']}")
|
|
||||||
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
l.info(f"Ensured tables exist for {db_info['ts_id']}")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to initialize engine for {db_info['ts_id']}: {str(e)}")
|
|
||||||
|
|
||||||
if self.local_ts_id not in self.sessions:
|
|
||||||
l.error(f"Failed to initialize session for local server {self.local_ts_id}")
|
|
||||||
|
|
||||||
async def read(self, query: str, **kwargs):
|
|
||||||
if self.local_ts_id not in self.sessions:
|
|
||||||
l.error(f"No session found for local server {self.local_ts_id}. Database may not be properly initialized.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async with self.sessions[self.local_ts_id]() as session:
|
|
||||||
try:
|
|
||||||
result = await session.execute(text(query), kwargs)
|
|
||||||
rows = result.fetchall()
|
|
||||||
if rows:
|
|
||||||
columns = result.keys()
|
|
||||||
return [dict(zip(columns, row)) for row in rows]
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to execute read query: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def write(self, query: str, **kwargs):
|
|
||||||
if self.local_ts_id not in self.sessions:
|
|
||||||
l.error(f"No session found for local server {self.local_ts_id}. Database may not be properly initialized.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async with self.sessions[self.local_ts_id]() as session:
|
|
||||||
try:
|
|
||||||
serialized_kwargs = {key: serialize(value) for key, value in kwargs.items()}
|
|
||||||
result = await session.execute(text(query), serialized_kwargs)
|
|
||||||
await session.commit()
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to execute write query: {str(e)}")
|
|
||||||
l.error(f"Query: {query}")
|
|
||||||
l.error(f"Kwargs: {kwargs}")
|
|
||||||
l.error(f"Serialized kwargs: {serialized_kwargs}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
for engine in self.engines.values():
|
|
||||||
await engine.dispose()
|
|
||||||
l.info("Closed all database connections")
|
|
|
@ -1,333 +0,0 @@
|
||||||
#database.py
|
|
||||||
import json
|
|
||||||
import yaml
|
|
||||||
import time
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import traceback
|
|
||||||
from datetime import datetime as dt_datetime, date
|
|
||||||
from tqdm.asyncio import tqdm
|
|
||||||
import reverse_geocoder as rg
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union, TypeVar, ClassVar
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
from pydantic import BaseModel, Field, create_model, PrivateAttr
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
from srtm import get_data
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from loguru import logger
|
|
||||||
from sqlalchemy import text, select, func, and_
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
|
||||||
from sqlalchemy.orm import sessionmaker, declarative_base, make_transient
|
|
||||||
from sqlalchemy.exc import OperationalError
|
|
||||||
from sqlalchemy import Column, Integer, String, DateTime, JSON, Text
|
|
||||||
import uuid
|
|
||||||
from sqlalchemy import Column, String, DateTime, Text, ARRAY
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
|
||||||
from sqlalchemy.sql import func
|
|
||||||
from urllib.parse import urljoin
|
|
||||||
import hashlib
|
|
||||||
import random
|
|
||||||
from .logs import get_logger
|
|
||||||
from .serialization import json_dumps, json_serial, serialize
|
|
||||||
|
|
||||||
l = get_logger(__name__)
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent
|
|
||||||
CONFIG_DIR = BASE_DIR / "config"
|
|
||||||
ENV_PATH = CONFIG_DIR / ".env"
|
|
||||||
load_dotenv(ENV_PATH)
|
|
||||||
TS_ID = os.environ.get('TS_ID')
|
|
||||||
|
|
||||||
|
|
||||||
class QueryTracking(Base):
|
|
||||||
__tablename__ = 'query_tracking'
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
origin_ts_id = Column(String, nullable=False)
|
|
||||||
query = Column(Text, nullable=False)
|
|
||||||
args = Column(JSON)
|
|
||||||
executed_at = Column(DateTime(timezone=True), server_default=func.now())
|
|
||||||
completed_by = Column(ARRAY(String), default=[])
|
|
||||||
result_checksum = Column(String(32))
|
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
|
||||||
SYNC_COOLDOWN = 30 # seconds
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def init(cls, config_name: str):
|
|
||||||
return cls(config_name)
|
|
||||||
|
|
||||||
def __init__(self, config_path: str):
|
|
||||||
self.config = self.load_config(config_path)
|
|
||||||
self.engines: Dict[str, Any] = {}
|
|
||||||
self.sessions: Dict[str, Any] = {}
|
|
||||||
self.online_servers: set = set()
|
|
||||||
self.local_ts_id = self.get_local_ts_id()
|
|
||||||
self.last_sync_time = 0
|
|
||||||
|
|
||||||
def load_config(self, config_path: str) -> Dict[str, Any]:
|
|
||||||
base_path = Path(__file__).parent.parent
|
|
||||||
full_path = base_path / "sijapi" / "config" / f"{config_path}.yaml"
|
|
||||||
|
|
||||||
with open(full_path, 'r') as file:
|
|
||||||
config = yaml.safe_load(file)
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
def get_local_ts_id(self) -> str:
|
|
||||||
return os.environ.get('TS_ID')
|
|
||||||
|
|
||||||
async def initialize_engines(self):
|
|
||||||
for db_info in self.config['POOL']:
|
|
||||||
url = f"postgresql+asyncpg://{db_info['db_user']}:{db_info['db_pass']}@{db_info['ts_ip']}:{db_info['db_port']}/{db_info['db_name']}"
|
|
||||||
try:
|
|
||||||
engine = create_async_engine(url, pool_pre_ping=True, pool_size=5, max_overflow=10)
|
|
||||||
self.engines[db_info['ts_id']] = engine
|
|
||||||
self.sessions[db_info['ts_id']] = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
|
|
||||||
l.info(f"Initialized engine and session for {db_info['ts_id']}")
|
|
||||||
|
|
||||||
# Create tables if they don't exist
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
l.info(f"Ensured tables exist for {db_info['ts_id']}")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to initialize engine for {db_info['ts_id']}: {str(e)}")
|
|
||||||
|
|
||||||
if self.local_ts_id not in self.sessions:
|
|
||||||
l.error(f"Failed to initialize session for local server {self.local_ts_id}")
|
|
||||||
|
|
||||||
async def get_online_servers(self) -> List[str]:
|
|
||||||
online_servers = []
|
|
||||||
for ts_id, engine in self.engines.items():
|
|
||||||
try:
|
|
||||||
async with engine.connect() as conn:
|
|
||||||
await conn.execute(text("SELECT 1"))
|
|
||||||
online_servers.append(ts_id)
|
|
||||||
l.debug(f"Server {ts_id} is online")
|
|
||||||
except OperationalError:
|
|
||||||
l.warning(f"Server {ts_id} is offline")
|
|
||||||
self.online_servers = set(online_servers)
|
|
||||||
l.info(f"Online servers: {', '.join(online_servers)}")
|
|
||||||
return online_servers
|
|
||||||
|
|
||||||
async def read(self, query: str, **kwargs):
|
|
||||||
if self.local_ts_id not in self.sessions:
|
|
||||||
l.error(f"No session found for local server {self.local_ts_id}. Database may not be properly initialized.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async with self.sessions[self.local_ts_id]() as session:
|
|
||||||
try:
|
|
||||||
result = await session.execute(text(query), kwargs)
|
|
||||||
rows = result.fetchall()
|
|
||||||
if rows:
|
|
||||||
columns = result.keys()
|
|
||||||
return [dict(zip(columns, row)) for row in rows]
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to execute read query: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def write(self, query: str, **kwargs):
|
|
||||||
if self.local_ts_id not in self.sessions:
|
|
||||||
l.error(f"No session found for local server {self.local_ts_id}. Database may not be properly initialized.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async with self.sessions[self.local_ts_id]() as session:
|
|
||||||
try:
|
|
||||||
# Execute the write query locally
|
|
||||||
serialized_kwargs = {key: serialize(value) for key, value in kwargs.items()}
|
|
||||||
result = await session.execute(text(query), serialized_kwargs)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
# Initiate async operations
|
|
||||||
asyncio.create_task(self._async_sync_operations(query, kwargs))
|
|
||||||
|
|
||||||
# Return the result
|
|
||||||
return result
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to execute write query: {str(e)}")
|
|
||||||
l.error(f"Query: {query}")
|
|
||||||
l.error(f"Kwargs: {kwargs}")
|
|
||||||
l.error(f"Serialized kwargs: {serialized_kwargs}")
|
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def _async_sync_operations(self, query: str, kwargs: dict):
|
|
||||||
try:
|
|
||||||
# Add the write query to the query_tracking table
|
|
||||||
await self.add_query_to_tracking(query, kwargs)
|
|
||||||
|
|
||||||
# Call /db/sync on all online servers
|
|
||||||
await self.call_db_sync_on_servers()
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error in async sync operations: {str(e)}")
|
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def add_query_to_tracking(self, query: str, kwargs: dict, result_checksum: str = None):
|
|
||||||
async with self.sessions[self.local_ts_id]() as session:
|
|
||||||
new_query = QueryTracking(
|
|
||||||
origin_ts_id=self.local_ts_id,
|
|
||||||
query=query,
|
|
||||||
args=json_dumps(kwargs),
|
|
||||||
completed_by=[self.local_ts_id],
|
|
||||||
result_checksum=result_checksum
|
|
||||||
)
|
|
||||||
session.add(new_query)
|
|
||||||
await session.commit()
|
|
||||||
l.info(f"Added query to tracking: {query[:50]}...")
|
|
||||||
|
|
||||||
|
|
||||||
async def sync_db(self):
|
|
||||||
current_time = time.time()
|
|
||||||
if current_time - self.last_sync_time < self.SYNC_COOLDOWN:
|
|
||||||
l.info(f"Skipping sync, last sync was less than {self.SYNC_COOLDOWN} seconds ago")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
l.info("Starting database synchronization")
|
|
||||||
self.last_sync_time = current_time # Update the last sync time before starting
|
|
||||||
await self.pull_query_tracking_from_all_servers()
|
|
||||||
await self.execute_unexecuted_queries()
|
|
||||||
l.info("Database synchronization completed successfully")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error during database sync: {str(e)}")
|
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
finally:
|
|
||||||
# Ensure the cooldown is respected even if an error occurs
|
|
||||||
self.last_sync_time = max(self.last_sync_time, current_time)
|
|
||||||
|
|
||||||
|
|
||||||
async def pull_query_tracking_from_all_servers(self):
|
|
||||||
online_servers = await self.get_online_servers()
|
|
||||||
l.info(f"Pulling query tracking from {len(online_servers)} online servers")
|
|
||||||
|
|
||||||
for server_id in online_servers:
|
|
||||||
if server_id == self.local_ts_id:
|
|
||||||
continue # Skip local server
|
|
||||||
|
|
||||||
l.info(f"Pulling queries from server: {server_id}")
|
|
||||||
async with self.sessions[server_id]() as remote_session:
|
|
||||||
try:
|
|
||||||
result = await remote_session.execute(select(QueryTracking))
|
|
||||||
queries = result.scalars().all()
|
|
||||||
l.info(f"Retrieved {len(queries)} queries from server {server_id}")
|
|
||||||
|
|
||||||
async with self.sessions[self.local_ts_id]() as local_session:
|
|
||||||
for query in queries:
|
|
||||||
# Detach the object from its original session
|
|
||||||
make_transient(query)
|
|
||||||
|
|
||||||
existing = await local_session.execute(
|
|
||||||
select(QueryTracking).where(QueryTracking.id == query.id)
|
|
||||||
)
|
|
||||||
existing = existing.scalar_one_or_none()
|
|
||||||
|
|
||||||
if existing:
|
|
||||||
# Update existing query
|
|
||||||
existing.completed_by = list(set(existing.completed_by + query.completed_by))
|
|
||||||
l.debug(f"Updated existing query: {query.id}")
|
|
||||||
else:
|
|
||||||
# Create a new instance for the local session
|
|
||||||
new_query = QueryTracking(
|
|
||||||
id=query.id,
|
|
||||||
origin_ts_id=query.origin_ts_id,
|
|
||||||
query=query.query,
|
|
||||||
args=query.args,
|
|
||||||
executed_at=query.executed_at,
|
|
||||||
completed_by=query.completed_by,
|
|
||||||
result_checksum=query.result_checksum
|
|
||||||
)
|
|
||||||
local_session.add(new_query)
|
|
||||||
l.debug(f"Added new query: {query.id}")
|
|
||||||
await local_session.commit()
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error pulling queries from server {server_id}: {str(e)}")
|
|
||||||
l.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
l.info("Finished pulling queries from all servers")
|
|
||||||
|
|
||||||
|
|
||||||
async def execute_unexecuted_queries(self):
|
|
||||||
async with self.sessions[self.local_ts_id]() as session:
|
|
||||||
unexecuted_queries = await session.execute(
|
|
||||||
select(QueryTracking).where(~QueryTracking.completed_by.any(self.local_ts_id)).order_by(QueryTracking.executed_at)
|
|
||||||
)
|
|
||||||
unexecuted_queries = unexecuted_queries.scalars().all()
|
|
||||||
|
|
||||||
l.info(f"Executing {len(unexecuted_queries)} unexecuted queries")
|
|
||||||
for query in unexecuted_queries:
|
|
||||||
try:
|
|
||||||
params = json.loads(query.args)
|
|
||||||
|
|
||||||
# Convert string datetime to datetime objects
|
|
||||||
for key, value in params.items():
|
|
||||||
if isinstance(value, str) and value.endswith(('Z', '+00:00')):
|
|
||||||
try:
|
|
||||||
params[key] = datetime.fromisoformat(value.rstrip('Z'))
|
|
||||||
except ValueError:
|
|
||||||
# If conversion fails, leave the original value
|
|
||||||
pass
|
|
||||||
|
|
||||||
async with session.begin():
|
|
||||||
await session.execute(text(query.query), params)
|
|
||||||
query.completed_by = list(set(query.completed_by + [self.local_ts_id]))
|
|
||||||
await session.commit()
|
|
||||||
l.info(f"Successfully executed query ID {query.id}")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to execute query ID {query.id}: {str(e)}")
|
|
||||||
await session.rollback()
|
|
||||||
l.info("Finished executing unexecuted queries")
|
|
||||||
|
|
||||||
async def call_db_sync_on_servers(self):
|
|
||||||
"""Call /db/sync on all online servers."""
|
|
||||||
online_servers = await self.get_online_servers()
|
|
||||||
l.info(f"Calling /db/sync on {len(online_servers)} online servers")
|
|
||||||
for server in self.config['POOL']:
|
|
||||||
if server['ts_id'] in online_servers and server['ts_id'] != self.local_ts_id:
|
|
||||||
try:
|
|
||||||
await self.call_db_sync(server)
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to call /db/sync on {server['ts_id']}: {str(e)}")
|
|
||||||
l.info("Finished calling /db/sync on all servers")
|
|
||||||
|
|
||||||
async def call_db_sync(self, server):
|
|
||||||
url = f"http://{server['ts_ip']}:{server['app_port']}/db/sync"
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {server['api_key']}"
|
|
||||||
}
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
try:
|
|
||||||
async with session.post(url, headers=headers, timeout=30) as response:
|
|
||||||
if response.status == 200:
|
|
||||||
l.info(f"Successfully called /db/sync on {url}")
|
|
||||||
else:
|
|
||||||
l.warning(f"Failed to call /db/sync on {url}. Status: {response.status}")
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
l.debug(f"Timeout while calling /db/sync on {url}")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Error calling /db/sync on {url}: {str(e)}")
|
|
||||||
|
|
||||||
async def ensure_query_tracking_table(self):
|
|
||||||
for ts_id, engine in self.engines.items():
|
|
||||||
try:
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
l.info(f"Ensured query_tracking table exists for {ts_id}")
|
|
||||||
except Exception as e:
|
|
||||||
l.error(f"Failed to create query_tracking table for {ts_id}: {str(e)}")
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
for engine in self.engines.values():
|
|
||||||
await engine.dispose()
|
|
||||||
l.info("Closed all database connections")
|
|
|
@ -1,24 +0,0 @@
|
||||||
#!/Users/sij/miniforge3/envs/sijapi/bin/python
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import asyncio
|
|
||||||
from fastapi import BackgroundTasks
|
|
||||||
from sijapi.routers.news import process_and_save_article
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
print("Usage: python script.py <article_url>")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
url = sys.argv[1]
|
|
||||||
bg_tasks = BackgroundTasks()
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = await process_and_save_article(bg_tasks, url)
|
|
||||||
print(result)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error processing article: {str(e)}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
BIN
sijapi/helpers/calendar/exportCal.scpt
Normal file
BIN
sijapi/helpers/calendar/exportCal.scpt
Normal file
Binary file not shown.
2
sijapi/helpers/calendar/updateCal
Executable file
2
sijapi/helpers/calendar/updateCal
Executable file
|
@ -0,0 +1,2 @@
|
||||||
|
#!/bin/bash
|
||||||
|
osascript /Users/sij/workshop/sijapi/helpers/updateCal.scpt
|
BIN
sijapi/helpers/calendar/updateCal.scpt
Executable file
BIN
sijapi/helpers/calendar/updateCal.scpt
Executable file
Binary file not shown.
BIN
sijapi/helpers/calendar/updateCal2.scpt
Normal file
BIN
sijapi/helpers/calendar/updateCal2.scpt
Normal file
Binary file not shown.
|
@ -1,237 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
# Environment variables for database connection
|
|
||||||
DB_NAME = os.getenv('DB_NAME', 'sij')
|
|
||||||
DB_USER = os.getenv('DB_USER', 'sij')
|
|
||||||
DB_PASSWORD = os.getenv('DB_PASSWORD', 'Synchr0!')
|
|
||||||
DB_HOST = os.getenv('DB_HOST', 'localhost')
|
|
||||||
DB_PORT = os.getenv('DB_PORT', '5432')
|
|
||||||
|
|
||||||
def get_feature_count(url):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'returnCountOnly': 'true',
|
|
||||||
'f': 'json'
|
|
||||||
}
|
|
||||||
retries = Retry(total=10, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
|
|
||||||
with requests.Session() as session:
|
|
||||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
||||||
response = session.get(url, params=params, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()
|
|
||||||
return data.get('count', 0)
|
|
||||||
|
|
||||||
def fetch_features(url, offset, num, max_retries=5):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'outFields': '*',
|
|
||||||
'geometryPrecision': 6,
|
|
||||||
'outSR': 4326,
|
|
||||||
'f': 'json',
|
|
||||||
'resultOffset': offset,
|
|
||||||
'resultRecordCount': num
|
|
||||||
}
|
|
||||||
for attempt in range(max_retries):
|
|
||||||
try:
|
|
||||||
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
|
||||||
with requests.Session() as session:
|
|
||||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
||||||
response = session.get(url, params=params, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching features (attempt {attempt + 1}/{max_retries}): {e}")
|
|
||||||
if attempt == max_retries - 1:
|
|
||||||
raise
|
|
||||||
time.sleep(5 * (attempt + 1)) # Exponential backoff
|
|
||||||
|
|
||||||
|
|
||||||
def download_layer(layer_num, layer_name):
|
|
||||||
base_dir = os.path.expanduser('~/data')
|
|
||||||
os.makedirs(base_dir, exist_ok=True)
|
|
||||||
|
|
||||||
file_path = os.path.join(base_dir, f'PLSS_{layer_name}.geojson')
|
|
||||||
temp_file_path = os.path.join(base_dir, f'PLSS_{layer_name}_temp.json')
|
|
||||||
|
|
||||||
url = f"https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/{layer_num}/query"
|
|
||||||
|
|
||||||
total_count = get_feature_count(url)
|
|
||||||
print(f"Total {layer_name} features: {total_count}")
|
|
||||||
|
|
||||||
batch_size = 1000
|
|
||||||
chunk_size = 10000 # Write to file every 10,000 features
|
|
||||||
offset = 0
|
|
||||||
all_features = []
|
|
||||||
|
|
||||||
# Check if temporary file exists and load its content
|
|
||||||
if os.path.exists(temp_file_path):
|
|
||||||
try:
|
|
||||||
with open(temp_file_path, 'r') as f:
|
|
||||||
all_features = json.load(f)
|
|
||||||
offset = len(all_features)
|
|
||||||
print(f"Resuming download from offset {offset}")
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
print("Error reading temporary file. Starting download from the beginning.")
|
|
||||||
offset = 0
|
|
||||||
all_features = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
while offset < total_count:
|
|
||||||
print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
|
|
||||||
data = fetch_features(url, offset, batch_size)
|
|
||||||
|
|
||||||
new_features = data.get('features', [])
|
|
||||||
if not new_features:
|
|
||||||
break
|
|
||||||
|
|
||||||
all_features.extend(new_features)
|
|
||||||
offset += len(new_features)
|
|
||||||
|
|
||||||
# Progress indicator
|
|
||||||
progress = offset / total_count
|
|
||||||
bar_length = 30
|
|
||||||
filled_length = int(bar_length * progress)
|
|
||||||
bar = '=' * filled_length + '-' * (bar_length - filled_length)
|
|
||||||
print(f'\rProgress: [{bar}] {progress:.1%} ({offset}/{total_count} features)', end='', flush=True)
|
|
||||||
|
|
||||||
# Save progress to temporary file every chunk_size features
|
|
||||||
if len(all_features) % chunk_size == 0:
|
|
||||||
with open(temp_file_path, 'w') as f:
|
|
||||||
json.dump(all_features, f)
|
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
print(f"\nTotal {layer_name} features fetched: {len(all_features)}")
|
|
||||||
|
|
||||||
# Write final GeoJSON file
|
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
f.write('{"type": "FeatureCollection", "features": [\n')
|
|
||||||
for i, feature in enumerate(all_features):
|
|
||||||
geojson_feature = {
|
|
||||||
"type": "Feature",
|
|
||||||
"properties": feature['attributes'],
|
|
||||||
"geometry": feature['geometry']
|
|
||||||
}
|
|
||||||
json.dump(geojson_feature, f)
|
|
||||||
if i < len(all_features) - 1:
|
|
||||||
f.write(',\n')
|
|
||||||
f.write('\n]}')
|
|
||||||
|
|
||||||
print(f"GeoJSON file saved as '{file_path}'")
|
|
||||||
|
|
||||||
# Remove temporary file
|
|
||||||
if os.path.exists(temp_file_path):
|
|
||||||
os.remove(temp_file_path)
|
|
||||||
|
|
||||||
return file_path
|
|
||||||
except Exception as e:
|
|
||||||
print(f"\nError during download: {e}")
|
|
||||||
print(f"Partial data saved in {temp_file_path}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def check_postgres_connection():
|
|
||||||
try:
|
|
||||||
subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME, '-c', 'SELECT 1;'],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
return True
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def check_postgis_extension():
|
|
||||||
try:
|
|
||||||
result = subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME,
|
|
||||||
'-c', "SELECT 1 FROM pg_extension WHERE extname = 'postgis';"],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
return '1' in result.stdout
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def create_postgis_extension():
|
|
||||||
try:
|
|
||||||
subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME,
|
|
||||||
'-c', "CREATE EXTENSION IF NOT EXISTS postgis;"],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
print("PostGIS extension created successfully.")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error creating PostGIS extension: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def import_to_postgis(file_path, table_name):
|
|
||||||
if not check_postgres_connection():
|
|
||||||
print("Error: Unable to connect to PostgreSQL. Please check your connection settings.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not check_postgis_extension():
|
|
||||||
print("PostGIS extension not found. Attempting to create it...")
|
|
||||||
create_postgis_extension()
|
|
||||||
|
|
||||||
ogr2ogr_command = [
|
|
||||||
'ogr2ogr',
|
|
||||||
'-f', 'PostgreSQL',
|
|
||||||
f'PG:dbname={DB_NAME} user={DB_USER} password={DB_PASSWORD} host={DB_HOST} port={DB_PORT}',
|
|
||||||
file_path,
|
|
||||||
'-nln', table_name,
|
|
||||||
'-overwrite'
|
|
||||||
]
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.run(ogr2ogr_command, check=True, capture_output=True, text=True)
|
|
||||||
print(f"Data successfully imported into PostGIS table: {table_name}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error importing data into PostGIS: {e}")
|
|
||||||
print(f"Command that failed: {e.cmd}")
|
|
||||||
print(f"Error output: {e.stderr}")
|
|
||||||
|
|
||||||
def check_ogr2ogr():
|
|
||||||
try:
|
|
||||||
subprocess.run(['ogr2ogr', '--version'], check=True, capture_output=True, text=True)
|
|
||||||
return True
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
except FileNotFoundError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if not check_ogr2ogr():
|
|
||||||
print("Error: ogr2ogr not found. Please install GDAL/OGR tools.")
|
|
||||||
print("On Debian: sudo apt-get install gdal-bin")
|
|
||||||
print("On macOS with Homebrew: brew install gdal")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
try:
|
|
||||||
township_file = os.path.expanduser('~/data/PLSS_Townships.geojson')
|
|
||||||
if not os.path.exists(township_file):
|
|
||||||
township_file = download_layer(1, "Townships")
|
|
||||||
if township_file:
|
|
||||||
import_to_postgis(township_file, "public.plss_townships")
|
|
||||||
else:
|
|
||||||
print("Failed to download Townships data. Skipping import.")
|
|
||||||
|
|
||||||
section_file = os.path.expanduser('~/data/PLSS_Sections.geojson')
|
|
||||||
if not os.path.exists(section_file):
|
|
||||||
section_file = download_layer(2, "Sections")
|
|
||||||
if section_file:
|
|
||||||
import_to_postgis(section_file, "public.plss_sections")
|
|
||||||
else:
|
|
||||||
print("Failed to download Sections data. Skipping import.")
|
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching data: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,133 +0,0 @@
|
||||||
# CaPLSS_downloader_and_importer.py
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import requests
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
|
|
||||||
def get_feature_count(url):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'returnCountOnly': 'true',
|
|
||||||
'f': 'json'
|
|
||||||
}
|
|
||||||
retries = Retry(total=10, backoff_factor=0.5)
|
|
||||||
adapter = HTTPAdapter(max_retries=retries)
|
|
||||||
session = requests.Session()
|
|
||||||
session.mount("https://", adapter)
|
|
||||||
|
|
||||||
response = session.get(url, params=params, timeout=15) # Add timeout parameter
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()
|
|
||||||
return data.get('count', 0)
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_features(url, offset, num):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'outFields': '*',
|
|
||||||
'geometryPrecision': 6,
|
|
||||||
'outSR': 4326,
|
|
||||||
'f': 'json',
|
|
||||||
'resultOffset': offset,
|
|
||||||
'resultRecordCount': num
|
|
||||||
}
|
|
||||||
response = requests.get(url, params=params)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()
|
|
||||||
|
|
||||||
def download_layer(layer_num, layer_name):
|
|
||||||
url = f"https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/{layer_num}/query"
|
|
||||||
|
|
||||||
total_count = get_feature_count(url)
|
|
||||||
print(f"Total {layer_name} features: {total_count}")
|
|
||||||
|
|
||||||
batch_size = 1000
|
|
||||||
offset = 0
|
|
||||||
all_features = []
|
|
||||||
|
|
||||||
while offset < total_count:
|
|
||||||
print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
|
|
||||||
data = fetch_features(url, offset, batch_size)
|
|
||||||
|
|
||||||
new_features = data.get('features', [])
|
|
||||||
if not new_features:
|
|
||||||
break
|
|
||||||
|
|
||||||
all_features.extend(new_features)
|
|
||||||
offset += len(new_features)
|
|
||||||
|
|
||||||
print(f"Progress: {len(all_features)}/{total_count} features")
|
|
||||||
|
|
||||||
time.sleep(1) # Be nice to the server
|
|
||||||
|
|
||||||
print(f"Total {layer_name} features fetched: {len(all_features)}")
|
|
||||||
|
|
||||||
# Convert to GeoJSON
|
|
||||||
geojson_features = [
|
|
||||||
{
|
|
||||||
"type": "Feature",
|
|
||||||
"properties": feature['attributes'],
|
|
||||||
"geometry": feature['geometry']
|
|
||||||
} for feature in all_features
|
|
||||||
]
|
|
||||||
|
|
||||||
full_geojson = {
|
|
||||||
"type": "FeatureCollection",
|
|
||||||
"features": geojson_features
|
|
||||||
}
|
|
||||||
|
|
||||||
# Define a base directory that exists on both macOS and Debian
|
|
||||||
base_dir = os.path.expanduser('~/data')
|
|
||||||
os.makedirs(base_dir, exist_ok=True) # Create the directory if it doesn't exist
|
|
||||||
|
|
||||||
# Use os.path.join to construct the file path
|
|
||||||
file_path = os.path.join(base_dir, f'PLSS_{layer_name}.geojson')
|
|
||||||
|
|
||||||
# Save to file
|
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
json.dump(full_geojson, f)
|
|
||||||
|
|
||||||
print(f"GeoJSON file saved as '{file_path}'")
|
|
||||||
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
def import_to_postgis(file_path, table_name):
|
|
||||||
db_name = 'sij'
|
|
||||||
db_user = 'sij'
|
|
||||||
db_password = 'Synchr0!'
|
|
||||||
|
|
||||||
ogr2ogr_command = [
|
|
||||||
'ogr2ogr',
|
|
||||||
'-f', 'PostgreSQL',
|
|
||||||
f'PG:dbname={db_name} user={db_user} password={db_password}',
|
|
||||||
file_path,
|
|
||||||
'-nln', table_name,
|
|
||||||
'-overwrite'
|
|
||||||
]
|
|
||||||
|
|
||||||
subprocess.run(ogr2ogr_command, check=True)
|
|
||||||
print(f"Data successfully imported into PostGIS table: {table_name}")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
# Download and import Townships (Layer 1)
|
|
||||||
township_file = download_layer(1, "Townships")
|
|
||||||
import_to_postgis(township_file, "public.plss_townships")
|
|
||||||
|
|
||||||
# Download and import Sections (Layer 2)
|
|
||||||
section_file = download_layer(2, "Sections")
|
|
||||||
import_to_postgis(section_file, "public.plss_sections")
|
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching data: {e}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error importing data into PostGIS: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1 +0,0 @@
|
||||||
{"offset": 50000}
|
|
|
@ -1,366 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
import argparse
|
|
||||||
import psycopg2
|
|
||||||
from psycopg2.extras import execute_values
|
|
||||||
|
|
||||||
def load_config():
|
|
||||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
sys_config_path = os.path.join(script_dir, '..', 'config', 'sys.yaml')
|
|
||||||
gis_config_path = os.path.join(script_dir, '..', 'config', 'gis.yaml')
|
|
||||||
|
|
||||||
with open(sys_config_path, 'r') as f:
|
|
||||||
sys_config = yaml.safe_load(f)
|
|
||||||
|
|
||||||
with open(gis_config_path, 'r') as f:
|
|
||||||
gis_config = yaml.safe_load(f)
|
|
||||||
|
|
||||||
return sys_config, gis_config
|
|
||||||
|
|
||||||
def get_db_config(sys_config):
|
|
||||||
pool = sys_config.get('POOL', [])
|
|
||||||
if pool:
|
|
||||||
db_config = pool[0]
|
|
||||||
return {
|
|
||||||
'DB_NAME': db_config.get('db_name'),
|
|
||||||
'DB_USER': db_config.get('db_user'),
|
|
||||||
'DB_PASSWORD': db_config.get('db_pass'),
|
|
||||||
'DB_HOST': db_config.get('ts_ip'),
|
|
||||||
'DB_PORT': str(db_config.get('db_port'))
|
|
||||||
}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def get_feature_count(url):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'returnCountOnly': 'true',
|
|
||||||
'f': 'json'
|
|
||||||
}
|
|
||||||
retries = Retry(total=10, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
|
|
||||||
with requests.Session() as session:
|
|
||||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
||||||
response = session.get(url, params=params, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()
|
|
||||||
return data.get('count', 0)
|
|
||||||
|
|
||||||
def fetch_features(url, offset, num, max_retries=5):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'outFields': '*',
|
|
||||||
'geometryPrecision': 6,
|
|
||||||
'outSR': 4326,
|
|
||||||
'f': 'json',
|
|
||||||
'resultOffset': offset,
|
|
||||||
'resultRecordCount': num,
|
|
||||||
'orderByFields': 'OBJECTID'
|
|
||||||
}
|
|
||||||
for attempt in range(max_retries):
|
|
||||||
try:
|
|
||||||
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
|
||||||
with requests.Session() as session:
|
|
||||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
||||||
response = session.get(url, params=params, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching features (attempt {attempt + 1}/{max_retries}): {e}")
|
|
||||||
if attempt == max_retries - 1:
|
|
||||||
raise
|
|
||||||
time.sleep(5 * (attempt + 1)) # Exponential backoff
|
|
||||||
|
|
||||||
|
|
||||||
def create_table(db_config, table_name, gis_config):
|
|
||||||
conn = psycopg2.connect(
|
|
||||||
dbname=db_config['DB_NAME'],
|
|
||||||
user=db_config['DB_USER'],
|
|
||||||
password=db_config['DB_PASSWORD'],
|
|
||||||
host=db_config['DB_HOST'],
|
|
||||||
port=db_config['DB_PORT']
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
# Check if the table already exists
|
|
||||||
cur.execute(f"SELECT to_regclass('{table_name}')")
|
|
||||||
if cur.fetchone()[0] is None:
|
|
||||||
# If the table doesn't exist, create it based on the first feature
|
|
||||||
url = next(layer['url'] for layer in gis_config['layers'] if layer['table_name'] == table_name)
|
|
||||||
first_feature = fetch_features(url, 0, 1)['features'][0]
|
|
||||||
columns = []
|
|
||||||
for attr, value in first_feature['attributes'].items():
|
|
||||||
column_name = attr.lower().replace('.', '_').replace('()', '')
|
|
||||||
if isinstance(value, int):
|
|
||||||
columns.append(f'"{column_name}" INTEGER')
|
|
||||||
elif isinstance(value, float):
|
|
||||||
columns.append(f'"{column_name}" DOUBLE PRECISION')
|
|
||||||
else:
|
|
||||||
columns.append(f'"{column_name}" TEXT')
|
|
||||||
|
|
||||||
create_sql = f"""
|
|
||||||
CREATE TABLE {table_name} (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
geom GEOMETRY(Polygon, 4326),
|
|
||||||
{', '.join(columns)}
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
cur.execute(create_sql)
|
|
||||||
|
|
||||||
# Create index on plssid
|
|
||||||
cur.execute(f'CREATE INDEX idx_{table_name.split(".")[-1]}_plssid ON {table_name}("plssid")')
|
|
||||||
|
|
||||||
print(f"Created table: {table_name}")
|
|
||||||
else:
|
|
||||||
print(f"Table {table_name} already exists")
|
|
||||||
conn.commit()
|
|
||||||
except psycopg2.Error as e:
|
|
||||||
print(f"Error creating table {table_name}: {e}")
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
|
|
||||||
def insert_features_to_db(features, table_name, db_config):
|
|
||||||
conn = psycopg2.connect(
|
|
||||||
dbname=db_config['DB_NAME'],
|
|
||||||
user=db_config['DB_USER'],
|
|
||||||
password=db_config['DB_PASSWORD'],
|
|
||||||
host=db_config['DB_HOST'],
|
|
||||||
port=db_config['DB_PORT']
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
# Get the column names from the table
|
|
||||||
cur.execute(f"SELECT column_name FROM information_schema.columns WHERE table_name = '{table_name.split('.')[-1]}'")
|
|
||||||
db_columns = [row[0] for row in cur.fetchall() if row[0] != 'id']
|
|
||||||
|
|
||||||
# Prepare the SQL statement
|
|
||||||
sql = f"""
|
|
||||||
INSERT INTO {table_name} ({', '.join([f'"{col}"' for col in db_columns])})
|
|
||||||
VALUES %s
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Prepare the template for execute_values
|
|
||||||
template = f"({', '.join(['%s' for _ in db_columns])})"
|
|
||||||
|
|
||||||
values = []
|
|
||||||
for feature in features:
|
|
||||||
geom = feature.get('geometry')
|
|
||||||
attrs = feature.get('attributes')
|
|
||||||
if geom and attrs:
|
|
||||||
rings = geom['rings'][0]
|
|
||||||
wkt = f"POLYGON(({','.join([f'{x} {y}' for x, y in rings])}))"
|
|
||||||
|
|
||||||
row = []
|
|
||||||
for col in db_columns:
|
|
||||||
if col == 'geom':
|
|
||||||
row.append(wkt)
|
|
||||||
else:
|
|
||||||
# Map database column names back to original attribute names
|
|
||||||
attr_name = col.upper()
|
|
||||||
if attr_name == 'SHAPE_STAREA':
|
|
||||||
attr_name = 'Shape.STArea()'
|
|
||||||
elif attr_name == 'SHAPE_STLENGTH':
|
|
||||||
attr_name = 'Shape.STLength()'
|
|
||||||
row.append(attrs.get(attr_name))
|
|
||||||
|
|
||||||
values.append(tuple(row))
|
|
||||||
else:
|
|
||||||
print(f"Skipping invalid feature: {feature}")
|
|
||||||
|
|
||||||
if values:
|
|
||||||
execute_values(cur, sql, values, template=template, page_size=100)
|
|
||||||
print(f"Inserted {len(values)} features")
|
|
||||||
else:
|
|
||||||
print("No valid features to insert")
|
|
||||||
conn.commit()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error inserting features: {e}")
|
|
||||||
print(f"First feature for debugging: {features[0] if features else 'No features'}")
|
|
||||||
conn.rollback()
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def download_and_import_layer(layer_config, db_config, gis_config, force_refresh):
|
|
||||||
url = layer_config['url']
|
|
||||||
layer_name = layer_config['layer_name']
|
|
||||||
table_name = layer_config['table_name']
|
|
||||||
batch_size = layer_config['batch_size']
|
|
||||||
delay = layer_config['delay'] / 1000 # Convert to seconds
|
|
||||||
|
|
||||||
total_count = get_feature_count(url)
|
|
||||||
print(f"Total {layer_name} features: {total_count}")
|
|
||||||
|
|
||||||
# Check existing records in the database
|
|
||||||
existing_count = get_existing_record_count(db_config, table_name)
|
|
||||||
|
|
||||||
if existing_count == total_count and not force_refresh:
|
|
||||||
print(f"Table {table_name} already contains all {total_count} features. Skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
if force_refresh:
|
|
||||||
delete_existing_table(db_config, table_name)
|
|
||||||
create_table(db_config, table_name, gis_config)
|
|
||||||
existing_count = 0
|
|
||||||
elif existing_count == 0:
|
|
||||||
create_table(db_config, table_name, gis_config)
|
|
||||||
|
|
||||||
offset = existing_count
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
try:
|
|
||||||
while offset < total_count:
|
|
||||||
batch_start_time = time.time()
|
|
||||||
print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
|
|
||||||
try:
|
|
||||||
data = fetch_features(url, offset, batch_size)
|
|
||||||
new_features = data.get('features', [])
|
|
||||||
if not new_features:
|
|
||||||
break
|
|
||||||
|
|
||||||
insert_features_to_db(new_features, table_name, db_config)
|
|
||||||
offset += len(new_features)
|
|
||||||
|
|
||||||
batch_end_time = time.time()
|
|
||||||
batch_duration = batch_end_time - batch_start_time
|
|
||||||
print(f"Batch processed in {batch_duration:.2f} seconds")
|
|
||||||
|
|
||||||
# Progress indicator
|
|
||||||
progress = offset / total_count
|
|
||||||
bar_length = 30
|
|
||||||
filled_length = int(bar_length * progress)
|
|
||||||
bar = '=' * filled_length + '-' * (bar_length - filled_length)
|
|
||||||
print(f'\rProgress: [{bar}] {progress:.1%} ({offset}/{total_count} features)', end='', flush=True)
|
|
||||||
|
|
||||||
time.sleep(delay)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"\nError processing batch starting at offset {offset}: {e}")
|
|
||||||
print("Continuing with next batch...")
|
|
||||||
offset += batch_size
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
total_duration = end_time - start_time
|
|
||||||
print(f"\nTotal {layer_name} features fetched and imported: {offset}")
|
|
||||||
print(f"Total time: {total_duration:.2f} seconds")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"\nError during download and import: {e}")
|
|
||||||
print(f"Last successful offset: {offset}")
|
|
||||||
|
|
||||||
def get_existing_record_count(db_config, table_name):
|
|
||||||
conn = psycopg2.connect(
|
|
||||||
dbname=db_config['DB_NAME'],
|
|
||||||
user=db_config['DB_USER'],
|
|
||||||
password=db_config['DB_PASSWORD'],
|
|
||||||
host=db_config['DB_HOST'],
|
|
||||||
port=db_config['DB_PORT']
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
cur.execute(f"SELECT COUNT(*) FROM {table_name}")
|
|
||||||
count = cur.fetchone()[0]
|
|
||||||
return count
|
|
||||||
except psycopg2.Error:
|
|
||||||
return 0
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def delete_existing_table(db_config, table_name):
|
|
||||||
conn = psycopg2.connect(
|
|
||||||
dbname=db_config['DB_NAME'],
|
|
||||||
user=db_config['DB_USER'],
|
|
||||||
password=db_config['DB_PASSWORD'],
|
|
||||||
host=db_config['DB_HOST'],
|
|
||||||
port=db_config['DB_PORT']
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
# Drop the index if it exists
|
|
||||||
cur.execute(f"DROP INDEX IF EXISTS idx_{table_name.split('.')[-1]}_plssid")
|
|
||||||
|
|
||||||
# Then drop the table
|
|
||||||
cur.execute(f"DROP TABLE IF EXISTS {table_name} CASCADE")
|
|
||||||
conn.commit()
|
|
||||||
print(f"Deleted existing table and index: {table_name}")
|
|
||||||
except psycopg2.Error as e:
|
|
||||||
print(f"Error deleting table {table_name}: {e}")
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
|
|
||||||
def check_postgres_connection(db_config):
|
|
||||||
try:
|
|
||||||
subprocess.run(['psql',
|
|
||||||
'-h', db_config['DB_HOST'],
|
|
||||||
'-p', db_config['DB_PORT'],
|
|
||||||
'-U', db_config['DB_USER'],
|
|
||||||
'-d', db_config['DB_NAME'],
|
|
||||||
'-c', 'SELECT 1;'],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
return True
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def check_postgis_extension(db_config):
|
|
||||||
try:
|
|
||||||
result = subprocess.run(['psql',
|
|
||||||
'-h', db_config['DB_HOST'],
|
|
||||||
'-p', db_config['DB_PORT'],
|
|
||||||
'-U', db_config['DB_USER'],
|
|
||||||
'-d', db_config['DB_NAME'],
|
|
||||||
'-c', "SELECT 1 FROM pg_extension WHERE extname = 'postgis';"],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
return '1' in result.stdout
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def create_postgis_extension(db_config):
|
|
||||||
try:
|
|
||||||
subprocess.run(['psql',
|
|
||||||
'-h', db_config['DB_HOST'],
|
|
||||||
'-p', db_config['DB_PORT'],
|
|
||||||
'-U', db_config['DB_USER'],
|
|
||||||
'-d', db_config['DB_NAME'],
|
|
||||||
'-c', "CREATE EXTENSION IF NOT EXISTS postgis;"],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
print("PostGIS extension created successfully.")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error creating PostGIS extension: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description="Download and import PLSS data")
|
|
||||||
parser.add_argument("--force-refresh", nargs='*', help="Force refresh of specified layers or all if none specified")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
sys_config, gis_config = load_config()
|
|
||||||
db_config = get_db_config(sys_config)
|
|
||||||
|
|
||||||
if not check_postgres_connection(db_config):
|
|
||||||
print("Error: Unable to connect to PostgreSQL. Please check your connection settings.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not check_postgis_extension(db_config):
|
|
||||||
print("PostGIS extension not found. Attempting to create it...")
|
|
||||||
create_postgis_extension(db_config)
|
|
||||||
|
|
||||||
try:
|
|
||||||
for layer in gis_config['layers']:
|
|
||||||
if args.force_refresh is None or not args.force_refresh or layer['layer_name'] in args.force_refresh:
|
|
||||||
download_and_import_layer(layer, db_config, gis_config, bool(args.force_refresh))
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching data: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
195
sijapi/helpers/courtlistener/clHooks.py
Normal file
195
sijapi/helpers/courtlistener/clHooks.py
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
from fastapi import FastAPI, Request, BackgroundTasks, HTTPException, status
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
import os, io
|
||||||
|
from PyPDF2 import PdfReader
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
hook = FastAPI()
|
||||||
|
|
||||||
|
|
||||||
|
# /Users/sij/Library/CloudStorage/OneDrive-WELC/Documents - WELC-Docket
|
||||||
|
SYNC_FOLDER = Path(__file__).resolve().parent.parent
|
||||||
|
HOME_FOLDER = Path.home()
|
||||||
|
DOCKETS_FOLDER = HOME_FOLDER / "Dockets"
|
||||||
|
SEARCH_FOLDER = HOME_FOLDER / "Watched Cases"
|
||||||
|
SCRIPTS_FOLDER = SYNC_FOLDER / ".scripts"
|
||||||
|
REQUESTS_FOLDER = HOME_FOLDER / "sync" / "requests"
|
||||||
|
COURTLISTENER_BASE_URL = "https://www.courtlistener.com"
|
||||||
|
COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
|
||||||
|
COURTLISTENER_API_KEY = "efb5fe00f3c6c88d65a32541260945befdf53a7e"
|
||||||
|
|
||||||
|
with open(SCRIPTS_FOLDER / 'caseTable.json', 'r') as file:
|
||||||
|
CASE_TABLE = json.load(file)
|
||||||
|
|
||||||
|
@hook.get("/health")
|
||||||
|
async def health():
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
@hook.post("/cl/docket")
|
||||||
|
async def respond(request: Request, background_tasks: BackgroundTasks):
|
||||||
|
client_ip = request.client.host
|
||||||
|
logging.info(f"Received request from IP: {client_ip}")
|
||||||
|
data = await request.json()
|
||||||
|
payload = data['payload']
|
||||||
|
results = data['payload']['results']
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||||
|
payload_file = REQUESTS_FOLDER / f"{timestamp}-{client_ip}_docket.json"
|
||||||
|
with open(payload_file, 'w') as file:
|
||||||
|
json.dump(payload, file, indent=2)
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
background_tasks.add_task(process_docket, result)
|
||||||
|
return JSONResponse(content={"message": "Received"}, status_code=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
async def process_docket(result):
|
||||||
|
async with httpx.AsyncClient() as session:
|
||||||
|
await process_docket_result(result, session)
|
||||||
|
|
||||||
|
|
||||||
|
async def process_docket_result(result, session):
|
||||||
|
docket = str(result.get('docket'))
|
||||||
|
case_code, case_shortname = get_case_details(docket)
|
||||||
|
date_filed = result.get('date_filed', 'No Date Filed')
|
||||||
|
|
||||||
|
try:
|
||||||
|
date_filed_formatted = datetime.strptime(date_filed, '%Y-%m-%d').strftime('%Y%m%d')
|
||||||
|
except ValueError:
|
||||||
|
date_filed_formatted = 'NoDateFiled'
|
||||||
|
|
||||||
|
# Fetching court docket information from the API
|
||||||
|
url = f"{COURTLISTENER_DOCKETS_URL}?id={docket}"
|
||||||
|
headers = {'Authorization': f'Token {COURTLISTENER_API_KEY}'}
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(url, headers=headers) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
logging.info(f"Fetching CourtListener docket information for {docket}...")
|
||||||
|
data = await response.json()
|
||||||
|
court_docket = data['results'][0]['docket_number_core']
|
||||||
|
court_docket = f"{court_docket[:2]}-cv-{court_docket[2:]}" # Formatting the docket number
|
||||||
|
case_name = data['results'][0]['case_name']
|
||||||
|
logging.info(f"Obtained from CourtListener: docket {court_docket}, case name {case_name}.")
|
||||||
|
else:
|
||||||
|
logging.info("Failed to fetch data from CourtListener API.")
|
||||||
|
court_docket = 'NoCourtDocket'
|
||||||
|
case_name = 'NoCaseName'
|
||||||
|
|
||||||
|
for document in result.get('recap_documents', []):
|
||||||
|
filepath_ia = document.get('filepath_ia')
|
||||||
|
filepath_local = document.get('filepath_local')
|
||||||
|
|
||||||
|
if filepath_ia:
|
||||||
|
file_url = filepath_ia
|
||||||
|
logging.info(f"Found IA file at {file_url}.")
|
||||||
|
elif filepath_local:
|
||||||
|
file_url = f"{COURTLISTENER_BASE_URL}/{filepath_local}"
|
||||||
|
logging.info(f"Found local file at {file_url}.")
|
||||||
|
else:
|
||||||
|
logging.info(f"No file URL found in filepath_ia or filepath_local for one of the documents.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
document_number = document.get('document_number', 'NoDocumentNumber')
|
||||||
|
description = document.get('description', 'NoDescription').replace(" ", "_").replace("/", "_")
|
||||||
|
description = description[:50] # Truncate description
|
||||||
|
# case_shortname = case_name # TEMPORARY OVERRIDE
|
||||||
|
file_name = f"{case_code}_{document_number}_{date_filed_formatted}_{description}.pdf"
|
||||||
|
target_path = Path(DOCKETS_FOLDER) / case_shortname / "Docket" / file_name
|
||||||
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
await download_file(file_url, target_path, session)
|
||||||
|
logging.info(f"Downloaded {file_name} to {target_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_case_details(docket):
|
||||||
|
case_info = CASE_TABLE.get(str(docket), {"code": "000", "shortname": "UNKNOWN"})
|
||||||
|
case_code = case_info.get("code")
|
||||||
|
short_name = case_info.get("shortname")
|
||||||
|
return case_code, short_name
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def download_file(url: str, path: Path, session: aiohttp.ClientSession = None):
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36'
|
||||||
|
}
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
logging.info(f"Attempting to download {url} to {path}.")
|
||||||
|
try:
|
||||||
|
async with session.get(url, headers=headers, allow_redirects=True) as response:
|
||||||
|
if response.status == 403:
|
||||||
|
logging.error(f"Access denied (403 Forbidden) for URL: {url}. Skipping download.")
|
||||||
|
return
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
# Check if the response content type is a PDF
|
||||||
|
content_type = response.headers.get('Content-Type')
|
||||||
|
if content_type != 'application/pdf':
|
||||||
|
logging.error(f"Invalid content type: {content_type}. Skipping download.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create an in-memory buffer to store the downloaded content
|
||||||
|
buffer = io.BytesIO()
|
||||||
|
async for chunk in response.content.iter_chunked(1024):
|
||||||
|
buffer.write(chunk)
|
||||||
|
|
||||||
|
# Reset the buffer position to the beginning
|
||||||
|
buffer.seek(0)
|
||||||
|
|
||||||
|
# Validate the downloaded PDF content
|
||||||
|
try:
|
||||||
|
PdfReader(buffer)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Invalid PDF content: {str(e)}. Skipping download.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# If the PDF is valid, write the content to the file on disk
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with path.open('wb') as file:
|
||||||
|
file.write(buffer.getvalue())
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error downloading file: {str(e)}")
|
||||||
|
|
||||||
|
@hook.post("/cl/search")
|
||||||
|
async def respond_search(request: Request, background_tasks: BackgroundTasks):
|
||||||
|
client_ip = request.client.host
|
||||||
|
logging.info(f"Received request from IP: {client_ip}")
|
||||||
|
data = await request.json()
|
||||||
|
payload = data['payload']
|
||||||
|
results = data['payload']['results']
|
||||||
|
|
||||||
|
# Save the payload data
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||||
|
payload_file = REQUESTS_FOLDER / f"{timestamp}-{client_ip}_search.json"
|
||||||
|
with open(payload_file, 'w') as file:
|
||||||
|
json.dump(payload, file, indent=2)
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
background_tasks.add_task(process_search_result, result)
|
||||||
|
return JSONResponse(content={"message": "Received"}, status_code=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
async def process_search_result(result):
|
||||||
|
async with httpx.AsyncClient() as session:
|
||||||
|
download_url = result.get('download_url')
|
||||||
|
court_id = result.get('court_id')
|
||||||
|
case_name_short = result.get('caseNameShort')
|
||||||
|
case_name = result.get('caseName')
|
||||||
|
logging.info(f"Received payload for case {case_name} ({court_id}) and download url {download_url}")
|
||||||
|
|
||||||
|
court_folder = court_id
|
||||||
|
|
||||||
|
if case_name_short:
|
||||||
|
case_folder = case_name_short
|
||||||
|
else:
|
||||||
|
case_folder = case_name
|
||||||
|
|
||||||
|
file_name = download_url.split('/')[-1]
|
||||||
|
target_path = Path(SEARCH_FOLDER) / court_folder / case_folder / file_name
|
||||||
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
await download_file(download_url, target_path, session)
|
||||||
|
logging.info(f"Downloaded {file_name} to {target_path}")
|
120
sijapi/helpers/courtlistener/downloadEarlier.py
Normal file
120
sijapi/helpers/courtlistener/downloadEarlier.py
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
import json
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
from selenium import webdriver
|
||||||
|
import shutil
|
||||||
|
from selenium.webdriver.chrome.service import Service
|
||||||
|
from selenium.webdriver.chrome.options import Options
|
||||||
|
from selenium.webdriver.common.by import By
|
||||||
|
from selenium.webdriver.support.ui import WebDriverWait
|
||||||
|
from selenium.webdriver.support import expected_conditions as EC
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import zipfile
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import zipfile
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||||
|
|
||||||
|
COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
|
||||||
|
COURTLISTENER_API_KEY = "efb5fe00f3c6c88d65a32541260945befdf53a7e"
|
||||||
|
SYNC_FOLDER = Path(__file__).resolve().parent.parent
|
||||||
|
DOCKETS_FOLDER = SYNC_FOLDER / "Documents - WELC-Docket"
|
||||||
|
|
||||||
|
SELENIUM_HOST = "http://10.13.37.11:4646"
|
||||||
|
selenium_remote_url = f"{SELENIUM_HOST}/wd/hub"
|
||||||
|
|
||||||
|
with open('caseTable.json') as f:
|
||||||
|
CASE_TABLE = json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def process_docket_result(docket_id):
|
||||||
|
case_code = CASE_TABLE[docket_id].get("code")
|
||||||
|
case_shortname = CASE_TABLE[docket_id].get("shortname")
|
||||||
|
case_court = CASE_TABLE[docket_id].get("court") # docket_info.get("court")
|
||||||
|
|
||||||
|
# Fetching court docket information from the API
|
||||||
|
url = f"{COURTLISTENER_DOCKETS_URL}?id={docket_id}"
|
||||||
|
headers = {'Authorization': f'Token {COURTLISTENER_API_KEY}'}
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
if response.status_code == 200:
|
||||||
|
logging.info(f"Fetching CourtListener docket information for {docket_id}...")
|
||||||
|
data = response.json()
|
||||||
|
absolute_url = data['results'][0]['absolute_url']
|
||||||
|
court_docket = data['results'][0]['docket_number_core']
|
||||||
|
court_docket = f"{court_docket[:2]}-cv-{court_docket[2:]}" # Formatting the docket number
|
||||||
|
date_filed = data['results'][0]['date_filed']
|
||||||
|
pacer_case_id = data['results'][0]['pacer_case_id']
|
||||||
|
logging.info(f"Obtained from CourtListener: docket {court_docket}, date filed {date_filed}.")
|
||||||
|
else:
|
||||||
|
logging.info("Failed to fetch data from CourtListener API.")
|
||||||
|
return
|
||||||
|
|
||||||
|
download_url_to_try = f"https://archive.org/compress/gov.uscourts.{case_court}.{pacer_case_id}/formats=TEXT%20PDF&file=/gov.uscourts.{case_court}.{pacer_case_id}.zip"
|
||||||
|
|
||||||
|
print(f"\n{download_url_to_try}\n")
|
||||||
|
|
||||||
|
directory_path = os.path.join(DOCKETS_FOLDER, case_shortname, "Docket")
|
||||||
|
|
||||||
|
# Create the directory if it doesn't exist
|
||||||
|
os.makedirs(directory_path, exist_ok=True)
|
||||||
|
|
||||||
|
target_path = os.path.join(directory_path, case_code + "_Archive.zip")
|
||||||
|
|
||||||
|
if os.path.exists(target_path):
|
||||||
|
logging.info(f"Using existing archive: {target_path}")
|
||||||
|
else:
|
||||||
|
response = requests.get(download_url_to_try)
|
||||||
|
if response.status_code == 200:
|
||||||
|
with open(target_path, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
|
logging.info(f"Downloaded archive: {target_path}")
|
||||||
|
time.sleep(3)
|
||||||
|
else:
|
||||||
|
logging.info(f"Failed to download archive from {download_url_to_try}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Extract the contents of the ZIP file
|
||||||
|
with zipfile.ZipFile(target_path, 'r') as zip_ref:
|
||||||
|
for member in zip_ref.infolist():
|
||||||
|
filename = os.path.basename(member.filename)
|
||||||
|
if filename.endswith('.pdf'):
|
||||||
|
# Extract the docket entry from the filename
|
||||||
|
match = re.search(r'(\d+)\.(\d+)\.pdf$', filename)
|
||||||
|
if match:
|
||||||
|
docket_entry_num = match.group(1)
|
||||||
|
docket_entry_dec = match.group(2)
|
||||||
|
if docket_entry_dec == '0':
|
||||||
|
docket_entry = docket_entry_num
|
||||||
|
else:
|
||||||
|
docket_entry = f"{docket_entry_num}.{docket_entry_dec}"
|
||||||
|
|
||||||
|
# Construct the new filename
|
||||||
|
new_filename = f"{case_code}_{docket_entry}.pdf"
|
||||||
|
target_file_path = os.path.join(directory_path, new_filename)
|
||||||
|
|
||||||
|
# Extract the file with the new filename
|
||||||
|
with open(target_file_path, 'wb') as target_file:
|
||||||
|
with zip_ref.open(member) as source:
|
||||||
|
shutil.copyfileobj(source, target_file)
|
||||||
|
logging.info(f"Extracted {filename} as {new_filename}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
docket_ids = list(CASE_TABLE.keys())
|
||||||
|
for docket_id in docket_ids:
|
||||||
|
process_docket_result(docket_id)
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
153
sijapi/helpers/courtlistener/downloadEarlier2.py
Normal file
153
sijapi/helpers/courtlistener/downloadEarlier2.py
Normal file
|
@ -0,0 +1,153 @@
|
||||||
|
import json
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
from selenium import webdriver
|
||||||
|
import shutil
|
||||||
|
from selenium.webdriver.chrome.service import Service
|
||||||
|
from selenium.webdriver.chrome.options import Options
|
||||||
|
from selenium.webdriver.common.by import By
|
||||||
|
from selenium.webdriver.support.ui import WebDriverWait
|
||||||
|
from selenium.webdriver.support import expected_conditions as EC
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import zipfile
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import zipfile
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import requests
|
||||||
|
from PyPDF2 import PdfReader
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||||
|
|
||||||
|
COURTLISTENER_DOCKETS_URL = os.getenv("COURTLISTENER_DOCKETS_URL")
|
||||||
|
COURTLISTENER_API_KEY = os.getenv("COURTLISTENER_API_KEY")
|
||||||
|
SYNC_FOLDER = Path(__file__).resolve().parent.parent
|
||||||
|
DOCKETS_FOLDER = os.getenv("DOCKETS_FOLDER")
|
||||||
|
SELENIUM_HOST=os.getenv("SELENIUM_HOST")
|
||||||
|
selenium_remote_url = f"{SELENIUM_HOST}/wd/hub"
|
||||||
|
|
||||||
|
with open('caseTable.json') as f:
|
||||||
|
CASE_TABLE = json.load(f)
|
||||||
|
|
||||||
|
def extract_date_from_pdf(pdf_path):
|
||||||
|
with open(pdf_path, 'rb') as file:
|
||||||
|
reader = PdfReader(file)
|
||||||
|
page = reader.pages[0]
|
||||||
|
text = page.extract_text()
|
||||||
|
lines = text.split('\n')
|
||||||
|
for line in lines[:2]:
|
||||||
|
match = re.search(r'\b(\d{1,2})-(\d{1,2})-(\d{2})\b', line)
|
||||||
|
if match:
|
||||||
|
month, day, year = match.groups()
|
||||||
|
if len(year) == 2:
|
||||||
|
if int(year) > 24:
|
||||||
|
year = '19' + year
|
||||||
|
else:
|
||||||
|
year = '20' + year
|
||||||
|
date_str = f"{year}-{month.zfill(2)}-{day.zfill(2)}"
|
||||||
|
file_date = datetime.strptime(date_str, "%Y-%m-%d")
|
||||||
|
if file_date >= datetime(1924, 5, 1) and file_date <= datetime(2024, 4, 30):
|
||||||
|
return file_date.strftime("%Y%m%d")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def process_docket_result(docket_id):
|
||||||
|
case_code = CASE_TABLE[docket_id].get("code")
|
||||||
|
case_shortname = CASE_TABLE[docket_id].get("shortname")
|
||||||
|
case_court = CASE_TABLE[docket_id].get("court") # docket_info.get("court")
|
||||||
|
|
||||||
|
# Fetching court docket information from the API
|
||||||
|
url = f"{COURTLISTENER_DOCKETS_URL}?id={docket_id}"
|
||||||
|
headers = {'Authorization': f'Token {COURTLISTENER_API_KEY}'}
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
if response.status_code == 200:
|
||||||
|
logging.info(f"Fetching CourtListener docket information for {docket_id}...")
|
||||||
|
data = response.json()
|
||||||
|
absolute_url = data['results'][0]['absolute_url']
|
||||||
|
court_docket = data['results'][0]['docket_number_core']
|
||||||
|
court_docket = f"{court_docket[:2]}-cv-{court_docket[2:]}" # Formatting the docket number
|
||||||
|
date_filed = data['results'][0]['date_filed']
|
||||||
|
pacer_case_id = data['results'][0]['pacer_case_id']
|
||||||
|
logging.info(f"Obtained from CourtListener: docket {court_docket}, date filed {date_filed}.")
|
||||||
|
else:
|
||||||
|
logging.info("Failed to fetch data from CourtListener API.")
|
||||||
|
return
|
||||||
|
|
||||||
|
download_url_to_try = f"https://archive.org/compress/gov.uscourts.{case_court}.{pacer_case_id}/formats=TEXT%20PDF&file=/gov.uscourts.{case_court}.{pacer_case_id}.zip"
|
||||||
|
|
||||||
|
print(f"\n{download_url_to_try}\n")
|
||||||
|
|
||||||
|
directory_path = os.path.join(DOCKETS_FOLDER, case_shortname, "Docket")
|
||||||
|
|
||||||
|
# Create the directory if it doesn't exist
|
||||||
|
os.makedirs(directory_path, exist_ok=True)
|
||||||
|
|
||||||
|
target_path = os.path.join(directory_path, case_code + "_Archive.zip")
|
||||||
|
|
||||||
|
if os.path.exists(target_path):
|
||||||
|
logging.info(f"Using existing archive: {target_path}")
|
||||||
|
else:
|
||||||
|
response = requests.get(download_url_to_try)
|
||||||
|
if response.status_code == 200:
|
||||||
|
with open(target_path, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
|
logging.info(f"Downloaded archive: {target_path}")
|
||||||
|
time.sleep(3)
|
||||||
|
else:
|
||||||
|
logging.info(f"Failed to download archive from {download_url_to_try}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Extract the contents of the ZIP file
|
||||||
|
with zipfile.ZipFile(target_path, 'r') as zip_ref:
|
||||||
|
for member in zip_ref.infolist():
|
||||||
|
filename = os.path.basename(member.filename)
|
||||||
|
if filename.endswith('.pdf'):
|
||||||
|
# Extract the docket entry from the filename
|
||||||
|
match = re.search(r'(\d+)\.(\d+)\.pdf$', filename)
|
||||||
|
if match:
|
||||||
|
docket_entry_num = match.group(1)
|
||||||
|
docket_entry_dec = match.group(2)
|
||||||
|
if docket_entry_dec == '0':
|
||||||
|
docket_entry = docket_entry_num
|
||||||
|
else:
|
||||||
|
docket_entry = f"{docket_entry_num}.{docket_entry_dec}"
|
||||||
|
|
||||||
|
# Extract the date from the first two lines of the PDF
|
||||||
|
with zip_ref.open(member) as source:
|
||||||
|
temp_file_path = os.path.join(directory_path, 'temp.pdf')
|
||||||
|
with open(temp_file_path, 'wb') as temp_file:
|
||||||
|
shutil.copyfileobj(source, temp_file)
|
||||||
|
pdf_date = extract_date_from_pdf(temp_file_path)
|
||||||
|
os.remove(temp_file_path)
|
||||||
|
|
||||||
|
# Construct the new filename
|
||||||
|
if pdf_date:
|
||||||
|
new_filename = f"{case_code}_{docket_entry}_{pdf_date}.pdf"
|
||||||
|
else:
|
||||||
|
new_filename = f"{case_code}_{docket_entry}.pdf"
|
||||||
|
target_file_path = os.path.join(directory_path, new_filename)
|
||||||
|
|
||||||
|
# Extract the file with the new filename
|
||||||
|
with open(target_file_path, 'wb') as target_file:
|
||||||
|
with zip_ref.open(member) as source:
|
||||||
|
shutil.copyfileobj(source, target_file)
|
||||||
|
logging.info(f"Extracted {filename} as {new_filename}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
docket_ids = list(CASE_TABLE.keys())
|
||||||
|
for docket_id in docket_ids:
|
||||||
|
process_docket_result(docket_id)
|
||||||
|
time.sleep(3)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
32
sijapi/helpers/courtlistener/subscribeAlerts.py
Normal file
32
sijapi/helpers/courtlistener/subscribeAlerts.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# Load the caseTable.json file
|
||||||
|
with open('caseTable.json', 'r') as file:
|
||||||
|
case_table = json.load(file)
|
||||||
|
|
||||||
|
# Set the base URL and authorization token
|
||||||
|
base_url = "https://www.courtlistener.com/api/rest/v3/docket-alerts/"
|
||||||
|
auth_token = "a90d3f2de489aa4138a32133ca8bfec9d85fecfa"
|
||||||
|
|
||||||
|
# Iterate through each key (docket ID) in the case table
|
||||||
|
for docket_id in case_table.keys():
|
||||||
|
# Set the data payload and headers for the request
|
||||||
|
data = {'docket': docket_id}
|
||||||
|
headers = {'Authorization': f'Token {auth_token}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Send the POST request to the CourtListener API
|
||||||
|
response = requests.post(base_url, data=data, headers=headers)
|
||||||
|
|
||||||
|
# Check the response status code
|
||||||
|
if response.status_code == 200:
|
||||||
|
print(f"Successfully created docket alert for docket ID: {docket_id}")
|
||||||
|
else:
|
||||||
|
print(f"Failed to create docket alert for docket ID: {docket_id}")
|
||||||
|
print(f"Status code: {response.status_code}")
|
||||||
|
print(f"Response content: {response.content}")
|
||||||
|
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
print(f"Error occurred while creating docket alert for docket ID: {docket_id}")
|
||||||
|
print(f"Error message: {str(e)}")
|
146
sijapi/helpers/database/dbrestore.sh
Executable file
146
sijapi/helpers/database/dbrestore.sh
Executable file
|
@ -0,0 +1,146 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
DB_NAME="weatherlocate.db"
|
||||||
|
|
||||||
|
# Step 1: Backup existing data
|
||||||
|
echo "Backing up existing data..."
|
||||||
|
sqlite3 $DB_NAME <<EOF
|
||||||
|
.headers on
|
||||||
|
.mode csv
|
||||||
|
.output hourly_weather_backup.csv
|
||||||
|
SELECT * FROM HourlyWeather;
|
||||||
|
.output daily_weather_backup.csv
|
||||||
|
SELECT * FROM DailyWeather;
|
||||||
|
.output hours_backup.csv
|
||||||
|
SELECT * FROM Hours;
|
||||||
|
.output days_backup.csv
|
||||||
|
SELECT * FROM Days;
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Step 2: Drop and recreate tables
|
||||||
|
echo "Dropping and recreating tables..."
|
||||||
|
sqlite3 $DB_NAME <<EOF
|
||||||
|
DROP TABLE IF EXISTS HourlyWeather;
|
||||||
|
DROP TABLE IF EXISTS DailyWeather;
|
||||||
|
DROP TABLE IF EXISTS Hours;
|
||||||
|
DROP TABLE IF EXISTS Days;
|
||||||
|
|
||||||
|
CREATE TABLE HourlyWeather (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
datetime TEXT NOT NULL,
|
||||||
|
temp REAL,
|
||||||
|
feelslike REAL,
|
||||||
|
humidity REAL,
|
||||||
|
dew REAL,
|
||||||
|
precip REAL,
|
||||||
|
precipprob REAL,
|
||||||
|
snow REAL,
|
||||||
|
snowdepth REAL,
|
||||||
|
windgust REAL,
|
||||||
|
windspeed REAL,
|
||||||
|
winddir REAL,
|
||||||
|
pressure REAL,
|
||||||
|
cloudcover REAL,
|
||||||
|
visibility REAL,
|
||||||
|
solarradiation REAL,
|
||||||
|
solarenergy REAL,
|
||||||
|
uvindex REAL,
|
||||||
|
severerisk REAL,
|
||||||
|
conditions TEXT,
|
||||||
|
icon TEXT,
|
||||||
|
last_updated TEXT DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE DailyWeather (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
sunrise_time TEXT,
|
||||||
|
sunset_time TEXT,
|
||||||
|
description TEXT,
|
||||||
|
tempmax REAL,
|
||||||
|
tempmin REAL,
|
||||||
|
uvindex REAL,
|
||||||
|
winddir REAL,
|
||||||
|
windspeedmean REAL,
|
||||||
|
windspeed REAL,
|
||||||
|
icon TEXT,
|
||||||
|
last_updated TEXT DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE Hours (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
day_id INTEGER,
|
||||||
|
hour INTEGER,
|
||||||
|
hourly_weather_id INTEGER,
|
||||||
|
FOREIGN KEY (day_id) REFERENCES Days(id),
|
||||||
|
FOREIGN KEY (hourly_weather_id) REFERENCES HourlyWeather(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE Days (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
date TEXT NOT NULL,
|
||||||
|
daily_weather_id INTEGER,
|
||||||
|
FOREIGN KEY (daily_weather_id) REFERENCES DailyWeather(id)
|
||||||
|
);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Step 3: Import data from backup files
|
||||||
|
echo "Importing data from backup files..."
|
||||||
|
|
||||||
|
python3 <<EOF
|
||||||
|
import sqlite3
|
||||||
|
import csv
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
def import_data():
|
||||||
|
conn = sqlite3.connect('$DB_NAME')
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
with open('hourly_weather_backup.csv', 'r') as file:
|
||||||
|
reader = csv.DictReader(file)
|
||||||
|
for row in reader:
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO HourlyWeather (datetime, temp, feelslike, humidity, dew, precip, precipprob, snow, snowdepth, windgust, windspeed, winddir, pressure, cloudcover, visibility, solarradiation, solarenergy, uvindex, severerisk, conditions, icon, last_updated)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
''', (
|
||||||
|
row['datetime'], row['temp'], row['feelslike'], row['humidity'], row['dew'], row['precip'],
|
||||||
|
row['precipprob'], row['snow'], row['snowdepth'], row['windgust'], row['windspeed'], row['winddir'],
|
||||||
|
row['pressure'], row['cloudcover'], row['visibility'], row['solarradiation'], row['solarenergy'], row['uvindex'],
|
||||||
|
row['severerisk'], row['conditions'], row['icon'],
|
||||||
|
datetime.strptime(row['last_updated'], '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
))
|
||||||
|
|
||||||
|
with open('daily_weather_backup.csv', 'r') as file:
|
||||||
|
reader = csv.DictReader(file)
|
||||||
|
for row in reader:
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO DailyWeather (sunrise_time, sunset_time, description, tempmax, tempmin, uvindex, winddir, windspeedmean, windspeed, icon, last_updated)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
''', (
|
||||||
|
row['sunrise_time'], row['sunset_time'], row['description'], row['tempmax'], row['tempmin'],
|
||||||
|
row['uvindex'], row['winddir'], row['windspeedmean'], row['windspeed'], row['icon'],
|
||||||
|
datetime.strptime(row['last_updated'], '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
))
|
||||||
|
|
||||||
|
with open('hours_backup.csv', 'r') as file:
|
||||||
|
reader = csv.DictReader(file)
|
||||||
|
for row in reader:
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO Hours (day_id, hour, hourly_weather_id)
|
||||||
|
VALUES (?, ?, ?)
|
||||||
|
''', (row['day_id'], row['hour'], row['hourly_weather_id']))
|
||||||
|
|
||||||
|
with open('days_backup.csv', 'r') as file:
|
||||||
|
reader = csv.DictReader(file)
|
||||||
|
for row in reader:
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO Days (date, daily_weather_id)
|
||||||
|
VALUES (?, ?)
|
||||||
|
''', (row['date'], row['daily_weather_id']))
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
import_data()
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Database rebuild complete."
|
123
sijapi/helpers/database/mergedb.py
Normal file
123
sijapi/helpers/database/mergedb.py
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Get the home directory
|
||||||
|
home_dir = Path.home()
|
||||||
|
|
||||||
|
# Define the path to the database
|
||||||
|
DB = home_dir / "sync" / "sijapi" / "data" / "weatherlocate.db"
|
||||||
|
|
||||||
|
def create_database():
|
||||||
|
with sqlite3.connect(DB) as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create the Locations table
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS Locations (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
street TEXT,
|
||||||
|
city TEXT,
|
||||||
|
state TEXT,
|
||||||
|
country TEXT,
|
||||||
|
latitude REAL,
|
||||||
|
longitude REAL,
|
||||||
|
zip TEXT,
|
||||||
|
elevation REAL,
|
||||||
|
last_updated DATETIME
|
||||||
|
);
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Create the Days table with a direct reference to DailyWeather
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS Days (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
date DATE UNIQUE NOT NULL,
|
||||||
|
daily_weather_id INTEGER,
|
||||||
|
general_location_id INTEGER,
|
||||||
|
FOREIGN KEY(daily_weather_id) REFERENCES DailyWeather(id),
|
||||||
|
FOREIGN KEY(general_location_id) REFERENCES Locations(id)
|
||||||
|
);
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Create the DailyWeather table with fields adjusted for direct CSV storage of preciptype
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS DailyWeather (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
sunrise TEXT,
|
||||||
|
sunriseEpoch TEXT,
|
||||||
|
sunset TEXT,
|
||||||
|
sunsetEpoch TEXT,
|
||||||
|
description TEXT,
|
||||||
|
tempmax REAL,
|
||||||
|
tempmin REAL,
|
||||||
|
uvindex INTEGER,
|
||||||
|
winddir REAL,
|
||||||
|
windspeed REAL,
|
||||||
|
icon TEXT,
|
||||||
|
last_updated DATETIME,
|
||||||
|
datetime TEXT,
|
||||||
|
datetimeEpoch INTEGER,
|
||||||
|
temp REAL,
|
||||||
|
feelslikemax REAL,
|
||||||
|
feelslikemin REAL,
|
||||||
|
feelslike REAL,
|
||||||
|
dew REAL,
|
||||||
|
humidity REAL,
|
||||||
|
precip REAL,
|
||||||
|
precipprob REAL,
|
||||||
|
precipcover REAL,
|
||||||
|
preciptype TEXT,
|
||||||
|
snow REAL,
|
||||||
|
snowdepth REAL,
|
||||||
|
windgust REAL,
|
||||||
|
pressure REAL,
|
||||||
|
cloudcover REAL,
|
||||||
|
visibility REAL,
|
||||||
|
solarradiation REAL,
|
||||||
|
solarenergy REAL,
|
||||||
|
severerisk REAL,
|
||||||
|
moonphase REAL,
|
||||||
|
conditions TEXT,
|
||||||
|
stations TEXT,
|
||||||
|
source TEXT
|
||||||
|
);
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Create the HourlyWeather table
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS HourlyWeather (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
day_id INTEGER,
|
||||||
|
datetime TEXT,
|
||||||
|
datetimeEpoch INTEGER,
|
||||||
|
temp REAL,
|
||||||
|
feelslike REAL,
|
||||||
|
humidity REAL,
|
||||||
|
dew REAL,
|
||||||
|
precip REAL,
|
||||||
|
precipprob REAL,
|
||||||
|
snow REAL,
|
||||||
|
snowdepth REAL,
|
||||||
|
preciptype TEXT,
|
||||||
|
windgust REAL,
|
||||||
|
windspeed REAL,
|
||||||
|
winddir REAL,
|
||||||
|
pressure REAL,
|
||||||
|
cloudcover REAL,
|
||||||
|
visibility REAL,
|
||||||
|
solarradiation REAL,
|
||||||
|
solarenergy REAL,
|
||||||
|
uvindex REAL,
|
||||||
|
severerisk REAL,
|
||||||
|
conditions TEXT,
|
||||||
|
icon TEXT,
|
||||||
|
stations TEXT,
|
||||||
|
source TEXT,
|
||||||
|
FOREIGN KEY(day_id) REFERENCES Days(id)
|
||||||
|
);
|
||||||
|
''')
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
create_database()
|
30
sijapi/helpers/database/mergedbtests.py
Normal file
30
sijapi/helpers/database/mergedbtests.py
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
from fastapi import FastAPI, HTTPException
|
||||||
|
from typing import List
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
def get_db_connection():
|
||||||
|
conn = sqlite3.connect('tracking.db')
|
||||||
|
conn.row_factory = sqlite3.Row # This enables column access by name: row['column_name']
|
||||||
|
return conn
|
||||||
|
|
||||||
|
@app.get("/location/{date}", response_model=List[str])
|
||||||
|
async def read_location_zip(date: str):
|
||||||
|
conn = get_db_connection()
|
||||||
|
cursor = conn.cursor()
|
||||||
|
print(f"Querying for date: {date}") # Debugging output
|
||||||
|
cursor.execute('''
|
||||||
|
SELECT L.zip FROM Hours H
|
||||||
|
JOIN Days D ON H.day_id = D.id
|
||||||
|
JOIN Locations L ON H.location_id = L.id
|
||||||
|
WHERE D.date = ?
|
||||||
|
''', (date,))
|
||||||
|
zips = cursor.fetchall()
|
||||||
|
print(f"Found zip codes: {zips}") # Debugging output
|
||||||
|
conn.close()
|
||||||
|
if not zips:
|
||||||
|
raise HTTPException(status_code=404, detail="No location data found for this date")
|
||||||
|
return [zip[0] for zip in zips]
|
||||||
|
|
||||||
|
|
89
sijapi/helpers/database/osm_geocode_upload.py
Normal file
89
sijapi/helpers/database/osm_geocode_upload.py
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
import osmium
|
||||||
|
import psycopg2
|
||||||
|
import json
|
||||||
|
|
||||||
|
from sijapi import DB_USER, DB_PASS, DB_HOST, DB, DATA_DIR
|
||||||
|
|
||||||
|
OSM_DATA_PATH = DATA_DIR / "north-america-latest.osm.pbf"
|
||||||
|
|
||||||
|
class OSMHandler(osmium.SimpleHandler):
|
||||||
|
def __init__(self, conn):
|
||||||
|
osmium.SimpleHandler.__init__(self)
|
||||||
|
self.conn = conn
|
||||||
|
|
||||||
|
def node(self, n):
|
||||||
|
tags = {tag.k: tag.v for tag in n.tags}
|
||||||
|
cur = self.conn.cursor()
|
||||||
|
cur.execute("""
|
||||||
|
INSERT INTO nodes (id, location, tags)
|
||||||
|
VALUES (%s, ST_SetSRID(ST_MAKEPOINT(%s, %s),4326), %s)
|
||||||
|
""",
|
||||||
|
(n.id, n.location.lon, n.location.lat, json.dumps(tags)))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def way(self, w):
|
||||||
|
nodes = [(node.lon, node.lat) for node in w.nodes]
|
||||||
|
tags = {tag.k: tag.v for tag in w.tags}
|
||||||
|
cur = self.conn.cursor()
|
||||||
|
cur.execute("""
|
||||||
|
INSERT INTO ways (id, nodes, tags)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
""",
|
||||||
|
(w.id, json.dumps(nodes), json.dumps(tags)))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def relation(self, r):
|
||||||
|
members = [{"type": m.type, "ref": m.ref, "role": m.role} for m in r.members]
|
||||||
|
tags = {tag.k: tag.v for tag in r.tags}
|
||||||
|
cur = self.conn.cursor()
|
||||||
|
cur.execute("""
|
||||||
|
INSERT INTO relations (id, members, tags)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
""",
|
||||||
|
(r.id, json.dumps(members), json.dumps(tags)))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
conn = psycopg2.connect(user=DB_USER, password=DB_PASS, dbname=DB, host=DB_HOST)
|
||||||
|
cur = conn.cursor()
|
||||||
|
|
||||||
|
# Drop existing tables if they exist
|
||||||
|
cur.execute("DROP TABLE IF EXISTS nodes")
|
||||||
|
cur.execute("DROP TABLE IF EXISTS ways")
|
||||||
|
cur.execute("DROP TABLE IF EXISTS relations")
|
||||||
|
|
||||||
|
# Create tables for nodes, ways, and relations
|
||||||
|
cur.execute("""
|
||||||
|
CREATE TABLE nodes (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
location geography(POINT, 4326),
|
||||||
|
tags jsonb
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
cur.execute("""
|
||||||
|
CREATE TABLE ways (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
nodes jsonb,
|
||||||
|
tags jsonb
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
cur.execute("""
|
||||||
|
CREATE TABLE relations (
|
||||||
|
id bigint PRIMARY KEY,
|
||||||
|
members jsonb,
|
||||||
|
tags jsonb
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
handler = OSMHandler(conn)
|
||||||
|
handler.apply_file(str(OSM_DATA_PATH))
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
95
sijapi/helpers/database/zip-init.py
Executable file
95
sijapi/helpers/database/zip-init.py
Executable file
|
@ -0,0 +1,95 @@
|
||||||
|
import sqlite3
|
||||||
|
import csv
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
def create_geonames_table(conn):
|
||||||
|
cursor = conn.cursor()
|
||||||
|
# Create table with required columns for geocode_location function
|
||||||
|
cursor.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS geonames (
|
||||||
|
zip TEXT,
|
||||||
|
city TEXT,
|
||||||
|
state TEXT,
|
||||||
|
country_code TEXT,
|
||||||
|
latitude TEXT,
|
||||||
|
longitude TEXT,
|
||||||
|
region TEXT DEFAULT NULL,
|
||||||
|
altitude TEXT DEFAULT NULL,
|
||||||
|
street TEXT DEFAULT NULL
|
||||||
|
);
|
||||||
|
""")
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# Data importation
|
||||||
|
def import_geonames_data(conn, file_path):
|
||||||
|
cursor = conn.cursor()
|
||||||
|
with open(file_path, 'r', encoding='utf-8') as csvfile:
|
||||||
|
reader = csv.reader(csvfile)
|
||||||
|
next(reader) # Skip the header row
|
||||||
|
for row in reader:
|
||||||
|
if len(row) < 7: # Ensuring there are enough columns
|
||||||
|
logging.warning("Skipped a line due to insufficient data.")
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO geonames (zip, latitude, longitude, city, state, region, country_code, altitude, street)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?);
|
||||||
|
""", (row[0], row[1], row[2], row[3], row[4], row[5], 'US', None, None))
|
||||||
|
except sqlite3.DatabaseError as e:
|
||||||
|
logging.error(f"Database error: {e}")
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# Geocode location based on ZIP
|
||||||
|
def geocode_location(conn, zip_code):
|
||||||
|
cursor = conn.cursor()
|
||||||
|
query = """SELECT latitude, longitude FROM geonames WHERE zip = ?;"""
|
||||||
|
cursor.execute(query, (zip_code,))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# Test function to validate database setup and query
|
||||||
|
def test_geocode_function(conn):
|
||||||
|
# This tests a known ZIP code; replace '97401' with a ZIP code from your data
|
||||||
|
latitude, longitude = geocode_location(conn, '97401')
|
||||||
|
if latitude and longitude:
|
||||||
|
print(f"Test passed! Latitude: {latitude}, Longitude: {longitude}")
|
||||||
|
else:
|
||||||
|
print("Test failed. No data returned.")
|
||||||
|
|
||||||
|
# Example test within the same script or a separate test script
|
||||||
|
def test_geocode_function2(conn):
|
||||||
|
# Example data insertion for testing
|
||||||
|
conn.execute("INSERT INTO geonames (zip, city, state, country_code, latitude, longitude) VALUES ('99999', 'Testville', 'TestState', 'US', '45.0', '-93.0')")
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# Test retrieval
|
||||||
|
location_info = geocode_location(conn, zip_code='99999')
|
||||||
|
print(location_info)
|
||||||
|
|
||||||
|
# Assuming you call this test function from your main or setup
|
||||||
|
|
||||||
|
|
||||||
|
# Main execution function
|
||||||
|
def main():
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
db_path = 'geonames.db'
|
||||||
|
file_path = 'US.csv'
|
||||||
|
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
|
create_geonames_table(conn)
|
||||||
|
import_geonames_data(conn, file_path)
|
||||||
|
|
||||||
|
# Run the test
|
||||||
|
test_geocode_function(conn)
|
||||||
|
test_geocode_function2(conn)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -1,71 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
import yaml
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
async def load_config():
|
|
||||||
config_path = Path(__file__).parent.parent / 'config' / 'sys.yaml'
|
|
||||||
with open(config_path, 'r') as file:
|
|
||||||
return yaml.safe_load(file)
|
|
||||||
|
|
||||||
async def add_foreign_key_constraint(conn):
|
|
||||||
# Ensure short_code is not null in both tables
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE short_urls
|
|
||||||
ALTER COLUMN short_code SET NOT NULL;
|
|
||||||
""")
|
|
||||||
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
ALTER COLUMN short_code SET NOT NULL;
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Add unique constraint to short_urls.short_code if it doesn't exist
|
|
||||||
await conn.execute("""
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1
|
|
||||||
FROM pg_constraint
|
|
||||||
WHERE conname = 'short_urls_short_code_key'
|
|
||||||
) THEN
|
|
||||||
ALTER TABLE short_urls
|
|
||||||
ADD CONSTRAINT short_urls_short_code_key UNIQUE (short_code);
|
|
||||||
END IF;
|
|
||||||
END $$;
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Add foreign key constraint
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
ADD CONSTRAINT fk_click_logs_short_urls
|
|
||||||
FOREIGN KEY (short_code)
|
|
||||||
REFERENCES short_urls(short_code)
|
|
||||||
ON DELETE CASCADE;
|
|
||||||
""")
|
|
||||||
|
|
||||||
print("Foreign key constraint added successfully.")
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
config = await load_config()
|
|
||||||
source_server = config['POOL'][0] # sij-mbp16
|
|
||||||
|
|
||||||
conn_params = {
|
|
||||||
'database': source_server['db_name'],
|
|
||||||
'user': source_server['db_user'],
|
|
||||||
'password': source_server['db_pass'],
|
|
||||||
'host': source_server['ts_ip'],
|
|
||||||
'port': source_server['db_port']
|
|
||||||
}
|
|
||||||
|
|
||||||
conn = await asyncpg.connect(**conn_params)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await add_foreign_key_constraint(conn)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred: {str(e)}")
|
|
||||||
finally:
|
|
||||||
await conn.close()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,89 +0,0 @@
|
||||||
import psycopg2
|
|
||||||
from psycopg2 import sql
|
|
||||||
|
|
||||||
def connect_to_db():
|
|
||||||
return psycopg2.connect(
|
|
||||||
dbname='sij',
|
|
||||||
user='sij',
|
|
||||||
password='Synchr0!',
|
|
||||||
host='localhost' # Adjust if your database is not on localhost
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_table_info(conn):
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
# Get all tables in the public schema
|
|
||||||
cur.execute("""
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
""")
|
|
||||||
tables = cur.fetchall()
|
|
||||||
|
|
||||||
table_info = {}
|
|
||||||
for (table_name,) in tables:
|
|
||||||
table_info[table_name] = {
|
|
||||||
'primary_keys': get_primary_keys(cur, table_name),
|
|
||||||
'foreign_keys': get_foreign_keys(cur, table_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return table_info
|
|
||||||
|
|
||||||
def get_primary_keys(cur, table_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT a.attname
|
|
||||||
FROM pg_index i
|
|
||||||
JOIN pg_attribute a ON a.attrelid = i.indrelid
|
|
||||||
AND a.attnum = ANY(i.indkey)
|
|
||||||
WHERE i.indrelid = %s::regclass
|
|
||||||
AND i.indisprimary
|
|
||||||
""", (table_name,))
|
|
||||||
return [row[0] for row in cur.fetchall()]
|
|
||||||
|
|
||||||
def get_foreign_keys(cur, table_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT
|
|
||||||
tc.constraint_name,
|
|
||||||
kcu.column_name,
|
|
||||||
ccu.table_name AS foreign_table_name,
|
|
||||||
ccu.column_name AS foreign_column_name
|
|
||||||
FROM
|
|
||||||
information_schema.table_constraints AS tc
|
|
||||||
JOIN information_schema.key_column_usage AS kcu
|
|
||||||
ON tc.constraint_name = kcu.constraint_name
|
|
||||||
AND tc.table_schema = kcu.table_schema
|
|
||||||
JOIN information_schema.constraint_column_usage AS ccu
|
|
||||||
ON ccu.constraint_name = tc.constraint_name
|
|
||||||
AND ccu.table_schema = tc.table_schema
|
|
||||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name=%s
|
|
||||||
""", (table_name,))
|
|
||||||
return cur.fetchall()
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
with connect_to_db() as conn:
|
|
||||||
table_info = get_table_info(conn)
|
|
||||||
|
|
||||||
for table_name, info in table_info.items():
|
|
||||||
print(f"\n## Table: {table_name}")
|
|
||||||
|
|
||||||
print("\nPrimary Keys:")
|
|
||||||
if info['primary_keys']:
|
|
||||||
for pk in info['primary_keys']:
|
|
||||||
print(f"- {pk}")
|
|
||||||
else:
|
|
||||||
print("- No primary keys found")
|
|
||||||
|
|
||||||
print("\nForeign Keys:")
|
|
||||||
if info['foreign_keys']:
|
|
||||||
for fk in info['foreign_keys']:
|
|
||||||
print(f"- {fk[1]} -> {fk[2]}.{fk[3]} (Constraint: {fk[0]})")
|
|
||||||
else:
|
|
||||||
print("- No foreign keys found")
|
|
||||||
|
|
||||||
except psycopg2.Error as e:
|
|
||||||
print(f"Database error: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,83 +0,0 @@
|
||||||
import yaml
|
|
||||||
import subprocess
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def load_config():
|
|
||||||
with open('../config/sys.yaml', 'r') as file:
|
|
||||||
return yaml.safe_load(file)
|
|
||||||
|
|
||||||
def run_command(command):
|
|
||||||
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
|
||||||
stdout, stderr = process.communicate()
|
|
||||||
return process.returncode, stdout.decode(), stderr.decode()
|
|
||||||
|
|
||||||
def pg_dump(host, port, db_name, user, password, tables):
|
|
||||||
dump_command = f"PGPASSWORD={password} pg_dump -h {host} -p {port} -U {user} -d {db_name} -t {' -t '.join(tables)} -c --no-owner"
|
|
||||||
return run_command(dump_command)
|
|
||||||
|
|
||||||
def pg_restore(host, port, db_name, user, password, dump_data):
|
|
||||||
restore_command = f"PGPASSWORD={password} psql -h {host} -p {port} -U {user} -d {db_name}"
|
|
||||||
process = subprocess.Popen(restore_command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
|
||||||
stdout, stderr = process.communicate(input=dump_data.encode())
|
|
||||||
return process.returncode, stdout.decode(), stderr.decode()
|
|
||||||
|
|
||||||
def check_postgres_version(host, port, user, password):
|
|
||||||
version_command = f"PGPASSWORD={password} psql -h {host} -p {port} -U {user} -c 'SELECT version();'"
|
|
||||||
returncode, stdout, stderr = run_command(version_command)
|
|
||||||
if returncode == 0:
|
|
||||||
return stdout.strip()
|
|
||||||
else:
|
|
||||||
return f"Error checking version: {stderr}"
|
|
||||||
|
|
||||||
def replicate_databases():
|
|
||||||
config = load_config()
|
|
||||||
pool = config['POOL']
|
|
||||||
tables_to_replicate = ['click_logs', 'dailyweather', 'hourlyweather', 'locations', 'short_urls']
|
|
||||||
|
|
||||||
source_db = pool[0]
|
|
||||||
target_dbs = pool[1:]
|
|
||||||
|
|
||||||
# Check source database version
|
|
||||||
source_version = check_postgres_version(source_db['ts_ip'], source_db['db_port'], source_db['db_user'], source_db['db_pass'])
|
|
||||||
print(f"Source database version: {source_version}")
|
|
||||||
|
|
||||||
for target_db in target_dbs:
|
|
||||||
print(f"\nReplicating to {target_db['ts_id']}...")
|
|
||||||
|
|
||||||
# Check target database version
|
|
||||||
target_version = check_postgres_version(target_db['ts_ip'], target_db['db_port'], target_db['db_user'], target_db['db_pass'])
|
|
||||||
print(f"Target database version: {target_version}")
|
|
||||||
|
|
||||||
# Perform dump
|
|
||||||
returncode, dump_data, stderr = pg_dump(
|
|
||||||
source_db['ts_ip'],
|
|
||||||
source_db['db_port'],
|
|
||||||
source_db['db_name'],
|
|
||||||
source_db['db_user'],
|
|
||||||
source_db['db_pass'],
|
|
||||||
tables_to_replicate
|
|
||||||
)
|
|
||||||
|
|
||||||
if returncode != 0:
|
|
||||||
print(f"Error during dump: {stderr}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Perform restore
|
|
||||||
returncode, stdout, stderr = pg_restore(
|
|
||||||
target_db['ts_ip'],
|
|
||||||
target_db['db_port'],
|
|
||||||
target_db['db_name'],
|
|
||||||
target_db['db_user'],
|
|
||||||
target_db['db_pass'],
|
|
||||||
dump_data
|
|
||||||
)
|
|
||||||
|
|
||||||
if returncode != 0:
|
|
||||||
print(f"Error during restore: {stderr}")
|
|
||||||
else:
|
|
||||||
print(f"Replication to {target_db['ts_id']} completed successfully.")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
replicate_databases()
|
|
||||||
|
|
|
@ -1,76 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import yaml
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
def load_config():
|
|
||||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
sys_config_path = os.path.join(script_dir, '..', 'config', 'sys.yaml')
|
|
||||||
gis_config_path = os.path.join(script_dir, '..', 'config', 'gis.yaml')
|
|
||||||
|
|
||||||
with open(sys_config_path, 'r') as f:
|
|
||||||
sys_config = yaml.safe_load(f)
|
|
||||||
|
|
||||||
with open(gis_config_path, 'r') as f:
|
|
||||||
gis_config = yaml.safe_load(f)
|
|
||||||
|
|
||||||
return sys_config, gis_config
|
|
||||||
|
|
||||||
def replicate_table(source, targets, table_name):
|
|
||||||
print(f"Replicating {table_name}")
|
|
||||||
|
|
||||||
# Dump the table from the source
|
|
||||||
dump_command = [
|
|
||||||
'pg_dump',
|
|
||||||
'-h', source['ts_ip'],
|
|
||||||
'-p', str(source['db_port']),
|
|
||||||
'-U', source['db_user'],
|
|
||||||
'-d', source['db_name'],
|
|
||||||
'-t', table_name,
|
|
||||||
'--no-owner',
|
|
||||||
'--no-acl'
|
|
||||||
]
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
env['PGPASSWORD'] = source['db_pass']
|
|
||||||
|
|
||||||
with open(f"{table_name}.sql", 'w') as f:
|
|
||||||
subprocess.run(dump_command, env=env, stdout=f, check=True)
|
|
||||||
|
|
||||||
# Restore the table to each target
|
|
||||||
for target in targets:
|
|
||||||
print(f"Replicating to {target['ts_id']}")
|
|
||||||
restore_command = [
|
|
||||||
'psql',
|
|
||||||
'-h', target['ts_ip'],
|
|
||||||
'-p', str(target['db_port']),
|
|
||||||
'-U', target['db_user'],
|
|
||||||
'-d', target['db_name'],
|
|
||||||
'-c', f"DROP TABLE IF EXISTS {table_name} CASCADE;",
|
|
||||||
'-f', f"{table_name}.sql"
|
|
||||||
]
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
env['PGPASSWORD'] = target['db_pass']
|
|
||||||
|
|
||||||
subprocess.run(restore_command, env=env, check=True)
|
|
||||||
|
|
||||||
# Clean up the dump file
|
|
||||||
os.remove(f"{table_name}.sql")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
sys_config, gis_config = load_config()
|
|
||||||
|
|
||||||
source_server = sys_config['POOL'][0]
|
|
||||||
target_servers = sys_config['POOL'][1:]
|
|
||||||
|
|
||||||
tables = [layer['table_name'] for layer in gis_config['layers']]
|
|
||||||
|
|
||||||
for table in tables:
|
|
||||||
replicate_table(source_server, target_servers, table)
|
|
||||||
|
|
||||||
print("Replication complete!")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,46 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
|
|
||||||
# Database connection information
|
|
||||||
DB_INFO = {
|
|
||||||
'host': '100.64.64.20',
|
|
||||||
'port': 5432,
|
|
||||||
'database': 'sij',
|
|
||||||
'user': 'sij',
|
|
||||||
'password': 'Synchr0!'
|
|
||||||
}
|
|
||||||
|
|
||||||
async def update_click_logs():
|
|
||||||
# Connect to the database
|
|
||||||
conn = await asyncpg.connect(**DB_INFO)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Drop existing 'id' and 'new_id' columns if they exist
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
DROP COLUMN IF EXISTS id,
|
|
||||||
DROP COLUMN IF EXISTS new_id;
|
|
||||||
""")
|
|
||||||
print("Dropped existing id and new_id columns (if they existed)")
|
|
||||||
|
|
||||||
# Add new UUID column as primary key
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
ADD COLUMN id UUID PRIMARY KEY DEFAULT gen_random_uuid();
|
|
||||||
""")
|
|
||||||
print("Added new UUID column as primary key")
|
|
||||||
|
|
||||||
# Get the number of rows in the table
|
|
||||||
row_count = await conn.fetchval("SELECT COUNT(*) FROM click_logs")
|
|
||||||
print(f"Number of rows in click_logs: {row_count}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred: {str(e)}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
finally:
|
|
||||||
# Close the database connection
|
|
||||||
await conn.close()
|
|
||||||
|
|
||||||
# Run the update
|
|
||||||
asyncio.run(update_click_logs())
|
|
|
@ -1,25 +0,0 @@
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
db:
|
|
||||||
image: postgis/postgis:16-3.4
|
|
||||||
container_name: sij_postgres
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: sij
|
|
||||||
POSTGRES_USER: sij
|
|
||||||
POSTGRES_PASSWORD: Synchr0!
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql/data
|
|
||||||
- ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
networks:
|
|
||||||
- sij_network
|
|
||||||
|
|
||||||
networks:
|
|
||||||
sij_network:
|
|
||||||
driver: bridge
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres_data:
|
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
||||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
|
||||||
CREATE EXTENSION IF NOT EXISTS postgis_topology;
|
|
||||||
EOSQL
|
|
||||||
|
|
||||||
# Modify pg_hba.conf to allow connections from Tailscale network
|
|
||||||
echo "host all all 100.64.64.0/24 trust" >> /var/lib/postgresql/data/pg_hba.conf
|
|
||||||
|
|
|
@ -1,191 +0,0 @@
|
||||||
import psycopg2
|
|
||||||
from psycopg2 import sql
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def connect_to_db():
|
|
||||||
return psycopg2.connect(
|
|
||||||
dbname='sij',
|
|
||||||
user='sij',
|
|
||||||
password='Synchr0!',
|
|
||||||
host='localhost'
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_tables(cur):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
|
|
||||||
AND table_name NOT LIKE '%_uuid' AND table_name NOT LIKE '%_orig'
|
|
||||||
AND table_name != 'spatial_ref_sys'
|
|
||||||
""")
|
|
||||||
return [row[0] for row in cur.fetchall()]
|
|
||||||
|
|
||||||
def get_columns(cur, table_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT column_name, udt_name,
|
|
||||||
is_nullable, column_default,
|
|
||||||
character_maximum_length, numeric_precision, numeric_scale
|
|
||||||
FROM information_schema.columns
|
|
||||||
WHERE table_name = %s
|
|
||||||
ORDER BY ordinal_position
|
|
||||||
""", (table_name,))
|
|
||||||
return cur.fetchall()
|
|
||||||
|
|
||||||
def get_constraints(cur, table_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT conname, contype, pg_get_constraintdef(c.oid)
|
|
||||||
FROM pg_constraint c
|
|
||||||
JOIN pg_namespace n ON n.oid = c.connamespace
|
|
||||||
WHERE conrelid = %s::regclass
|
|
||||||
AND n.nspname = 'public'
|
|
||||||
""", (table_name,))
|
|
||||||
return cur.fetchall()
|
|
||||||
|
|
||||||
def drop_table_if_exists(cur, table_name):
|
|
||||||
cur.execute(sql.SQL("DROP TABLE IF EXISTS {} CASCADE").format(sql.Identifier(table_name)))
|
|
||||||
|
|
||||||
def create_uuid_table(cur, old_table, new_table):
|
|
||||||
drop_table_if_exists(cur, new_table)
|
|
||||||
columns = get_columns(cur, old_table)
|
|
||||||
constraints = get_constraints(cur, old_table)
|
|
||||||
|
|
||||||
column_defs = []
|
|
||||||
has_id_column = any(col[0] == 'id' for col in columns)
|
|
||||||
|
|
||||||
for col in columns:
|
|
||||||
col_name, udt_name, is_nullable, default, max_length, precision, scale = col
|
|
||||||
if col_name == 'id' and has_id_column:
|
|
||||||
column_defs.append(sql.SQL("{} UUID PRIMARY KEY DEFAULT gen_random_uuid()").format(sql.Identifier(col_name)))
|
|
||||||
else:
|
|
||||||
type_sql = sql.SQL("{}").format(sql.Identifier(udt_name))
|
|
||||||
if max_length:
|
|
||||||
type_sql = sql.SQL("{}({})").format(type_sql, sql.Literal(max_length))
|
|
||||||
elif precision and scale:
|
|
||||||
type_sql = sql.SQL("{}({},{})").format(type_sql, sql.Literal(precision), sql.Literal(scale))
|
|
||||||
|
|
||||||
column_def = sql.SQL("{} {}").format(sql.Identifier(col_name), type_sql)
|
|
||||||
if is_nullable == 'NO':
|
|
||||||
column_def = sql.SQL("{} NOT NULL").format(column_def)
|
|
||||||
if default and 'nextval' not in default: # Skip auto-increment defaults
|
|
||||||
column_def = sql.SQL("{} DEFAULT {}").format(column_def, sql.SQL(default))
|
|
||||||
column_defs.append(column_def)
|
|
||||||
|
|
||||||
constraint_defs = []
|
|
||||||
for constraint in constraints:
|
|
||||||
conname, contype, condef = constraint
|
|
||||||
if contype != 'p' or not has_id_column: # Keep primary key if there's no id column
|
|
||||||
constraint_defs.append(sql.SQL(condef))
|
|
||||||
|
|
||||||
if not has_id_column:
|
|
||||||
column_defs.append(sql.SQL("uuid UUID DEFAULT gen_random_uuid()"))
|
|
||||||
|
|
||||||
query = sql.SQL("CREATE TABLE {} ({})").format(
|
|
||||||
sql.Identifier(new_table),
|
|
||||||
sql.SQL(", ").join(column_defs + constraint_defs)
|
|
||||||
)
|
|
||||||
cur.execute(query)
|
|
||||||
|
|
||||||
def migrate_data(cur, old_table, new_table):
|
|
||||||
columns = get_columns(cur, old_table)
|
|
||||||
column_names = [col[0] for col in columns]
|
|
||||||
has_id_column = 'id' in column_names
|
|
||||||
|
|
||||||
if has_id_column:
|
|
||||||
column_names.remove('id')
|
|
||||||
old_cols = sql.SQL(", ").join(map(sql.Identifier, column_names))
|
|
||||||
new_cols = sql.SQL(", ").join(map(sql.Identifier, ['id'] + column_names))
|
|
||||||
query = sql.SQL("INSERT INTO {} ({}) SELECT gen_random_uuid(), {} FROM {}").format(
|
|
||||||
sql.Identifier(new_table),
|
|
||||||
new_cols,
|
|
||||||
old_cols,
|
|
||||||
sql.Identifier(old_table)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
old_cols = sql.SQL(", ").join(map(sql.Identifier, column_names))
|
|
||||||
new_cols = sql.SQL(", ").join(map(sql.Identifier, column_names + ['uuid']))
|
|
||||||
query = sql.SQL("INSERT INTO {} ({}) SELECT {}, gen_random_uuid() FROM {}").format(
|
|
||||||
sql.Identifier(new_table),
|
|
||||||
new_cols,
|
|
||||||
old_cols,
|
|
||||||
sql.Identifier(old_table)
|
|
||||||
)
|
|
||||||
cur.execute(query)
|
|
||||||
|
|
||||||
def update_foreign_keys(cur, tables):
|
|
||||||
for table in tables:
|
|
||||||
constraints = get_constraints(cur, table)
|
|
||||||
for constraint in constraints:
|
|
||||||
conname, contype, condef = constraint
|
|
||||||
if contype == 'f': # Foreign key constraint
|
|
||||||
referenced_table = condef.split('REFERENCES ')[1].split('(')[0].strip()
|
|
||||||
referenced_column = condef.split('(')[2].split(')')[0].strip()
|
|
||||||
local_column = condef.split('(')[1].split(')')[0].strip()
|
|
||||||
|
|
||||||
cur.execute(sql.SQL("""
|
|
||||||
UPDATE {table_uuid}
|
|
||||||
SET {local_column} = subquery.new_id::text::{local_column_type}
|
|
||||||
FROM (
|
|
||||||
SELECT old.{ref_column} AS old_id, new_table.id AS new_id
|
|
||||||
FROM {ref_table} old
|
|
||||||
JOIN public.{ref_table_uuid} new_table ON new_table.{ref_column}::text = old.{ref_column}::text
|
|
||||||
) AS subquery
|
|
||||||
WHERE {local_column}::text = subquery.old_id::text
|
|
||||||
""").format(
|
|
||||||
table_uuid=sql.Identifier(f"{table}_uuid"),
|
|
||||||
local_column=sql.Identifier(local_column),
|
|
||||||
local_column_type=sql.SQL(get_column_type(cur, f"{table}_uuid", local_column)),
|
|
||||||
ref_column=sql.Identifier(referenced_column),
|
|
||||||
ref_table=sql.Identifier(referenced_table),
|
|
||||||
ref_table_uuid=sql.Identifier(f"{referenced_table}_uuid")
|
|
||||||
))
|
|
||||||
|
|
||||||
def get_column_type(cur, table_name, column_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT data_type
|
|
||||||
FROM information_schema.columns
|
|
||||||
WHERE table_name = %s AND column_name = %s
|
|
||||||
""", (table_name, column_name))
|
|
||||||
return cur.fetchone()[0]
|
|
||||||
|
|
||||||
def rename_tables(cur, tables):
|
|
||||||
for table in tables:
|
|
||||||
drop_table_if_exists(cur, f"{table}_orig")
|
|
||||||
cur.execute(sql.SQL("ALTER TABLE IF EXISTS {} RENAME TO {}").format(
|
|
||||||
sql.Identifier(table), sql.Identifier(f"{table}_orig")
|
|
||||||
))
|
|
||||||
cur.execute(sql.SQL("ALTER TABLE IF EXISTS {} RENAME TO {}").format(
|
|
||||||
sql.Identifier(f"{table}_uuid"), sql.Identifier(table)
|
|
||||||
))
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
with connect_to_db() as conn:
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
tables = get_tables(cur)
|
|
||||||
|
|
||||||
# Create new UUID tables
|
|
||||||
for table in tables:
|
|
||||||
print(f"Creating UUID table for {table}...")
|
|
||||||
create_uuid_table(cur, table, f"{table}_uuid")
|
|
||||||
|
|
||||||
# Migrate data
|
|
||||||
for table in tables:
|
|
||||||
print(f"Migrating data for {table}...")
|
|
||||||
migrate_data(cur, table, f"{table}_uuid")
|
|
||||||
|
|
||||||
# Update foreign keys
|
|
||||||
print("Updating foreign key references...")
|
|
||||||
update_foreign_keys(cur, tables)
|
|
||||||
|
|
||||||
# Rename tables
|
|
||||||
print("Renaming tables...")
|
|
||||||
rename_tables(cur, tables)
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
print("Migration completed successfully.")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred: {e}")
|
|
||||||
conn.rollback()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,35 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# PostgreSQL connection details
|
|
||||||
DB_NAME="sij"
|
|
||||||
DB_USER="sij"
|
|
||||||
DB_PASSWORD="Synchr0!"
|
|
||||||
DB_HOST="localhost"
|
|
||||||
DB_PORT="5432"
|
|
||||||
|
|
||||||
# Function to execute SQL commands
|
|
||||||
execute_sql() {
|
|
||||||
PGPASSWORD=$DB_PASSWORD psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -c "$1"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main script
|
|
||||||
echo "Starting migration of query_tracking table..."
|
|
||||||
|
|
||||||
# Enable uuid-ossp extension if not already enabled
|
|
||||||
execute_sql "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";"
|
|
||||||
|
|
||||||
# Add a new UUID column
|
|
||||||
execute_sql "ALTER TABLE query_tracking ADD COLUMN new_id UUID DEFAULT uuid_generate_v4();"
|
|
||||||
|
|
||||||
# Generate new UUIDs for all existing rows
|
|
||||||
execute_sql "UPDATE query_tracking SET new_id = uuid_generate_v4() WHERE new_id IS NULL;"
|
|
||||||
|
|
||||||
# Drop the old id column and rename the new one
|
|
||||||
execute_sql "ALTER TABLE query_tracking DROP COLUMN id;"
|
|
||||||
execute_sql "ALTER TABLE query_tracking RENAME COLUMN new_id TO id;"
|
|
||||||
|
|
||||||
# Set the new id column as primary key
|
|
||||||
execute_sql "ALTER TABLE query_tracking ADD PRIMARY KEY (id);"
|
|
||||||
|
|
||||||
echo "Migration completed successfully!"
|
|
||||||
|
|
|
@ -1,101 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
import yaml
|
|
||||||
from pathlib import Path
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
async def load_config():
|
|
||||||
config_path = Path(__file__).parent.parent / 'config' / 'db.yaml'
|
|
||||||
with open(config_path, 'r') as file:
|
|
||||||
return yaml.safe_load(file)
|
|
||||||
|
|
||||||
async def get_table_size(conn, table_name):
|
|
||||||
return await conn.fetchval(f"SELECT COUNT(*) FROM {table_name}")
|
|
||||||
|
|
||||||
async def check_postgres_version(conn):
|
|
||||||
return await conn.fetchval("SELECT version()")
|
|
||||||
|
|
||||||
async def replicate_table(source, target, table_name):
|
|
||||||
print(f"Replicating {table_name} from {source['ts_id']} to {target['ts_id']}")
|
|
||||||
|
|
||||||
source_conn = await asyncpg.connect(**{k: source[k] for k in ['db_name', 'db_user', 'db_pass', 'ts_ip', 'db_port']})
|
|
||||||
target_conn = await asyncpg.connect(**{k: target[k] for k in ['db_name', 'db_user', 'db_pass', 'ts_ip', 'db_port']})
|
|
||||||
|
|
||||||
try:
|
|
||||||
source_version = await check_postgres_version(source_conn)
|
|
||||||
target_version = await check_postgres_version(target_conn)
|
|
||||||
print(f"Source database version: {source_version}")
|
|
||||||
print(f"Target database version: {target_version}")
|
|
||||||
|
|
||||||
table_size = await get_table_size(source_conn, table_name)
|
|
||||||
print(f"Table size: {table_size} rows")
|
|
||||||
|
|
||||||
# Dump the table
|
|
||||||
dump_command = [
|
|
||||||
'pg_dump',
|
|
||||||
'-h', source['ts_ip'],
|
|
||||||
'-p', str(source['db_port']),
|
|
||||||
'-U', source['db_user'],
|
|
||||||
'-d', source['db_name'],
|
|
||||||
'-t', table_name,
|
|
||||||
'--no-owner',
|
|
||||||
'--no-acl'
|
|
||||||
]
|
|
||||||
env = {'PGPASSWORD': source['db_pass']}
|
|
||||||
dump_result = subprocess.run(dump_command, env=env, capture_output=True, text=True)
|
|
||||||
|
|
||||||
if dump_result.returncode != 0:
|
|
||||||
raise Exception(f"Dump failed: {dump_result.stderr}")
|
|
||||||
|
|
||||||
print("Dump completed successfully")
|
|
||||||
|
|
||||||
# Drop and recreate the table on the target
|
|
||||||
await target_conn.execute(f"DROP TABLE IF EXISTS {table_name} CASCADE")
|
|
||||||
print(f"Dropped table {table_name} on target")
|
|
||||||
|
|
||||||
# Restore the table
|
|
||||||
restore_command = [
|
|
||||||
'psql',
|
|
||||||
'-h', target['ts_ip'],
|
|
||||||
'-p', str(target['db_port']),
|
|
||||||
'-U', target['db_user'],
|
|
||||||
'-d', target['db_name'],
|
|
||||||
]
|
|
||||||
env = {'PGPASSWORD': target['db_pass']}
|
|
||||||
restore_result = subprocess.run(restore_command, input=dump_result.stdout, env=env, capture_output=True, text=True)
|
|
||||||
|
|
||||||
if restore_result.returncode != 0:
|
|
||||||
raise Exception(f"Restore failed: {restore_result.stderr}")
|
|
||||||
|
|
||||||
print(f"Table {table_name} restored successfully")
|
|
||||||
|
|
||||||
# Verify the number of rows in the target table
|
|
||||||
target_size = await get_table_size(target_conn, table_name)
|
|
||||||
if target_size == table_size:
|
|
||||||
print(f"Replication successful. {target_size} rows copied.")
|
|
||||||
else:
|
|
||||||
print(f"Warning: Source had {table_size} rows, but target has {target_size} rows.")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred while replicating {table_name}: {str(e)}")
|
|
||||||
finally:
|
|
||||||
await source_conn.close()
|
|
||||||
await target_conn.close()
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
config = await load_config()
|
|
||||||
source_server = config['POOL'][0] # sij-mbp16
|
|
||||||
target_servers = config['POOL'][1:] # sij-vm and sij-vps
|
|
||||||
|
|
||||||
tables_to_replicate = [
|
|
||||||
'click_logs', 'dailyweather', 'hourlyweather', 'locations', 'short_urls'
|
|
||||||
]
|
|
||||||
|
|
||||||
for table_name in tables_to_replicate:
|
|
||||||
for target_server in target_servers:
|
|
||||||
await replicate_table(source_server, target_server, table_name)
|
|
||||||
|
|
||||||
print("All replications completed!")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,135 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import yaml
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
from tqdm import tqdm
|
|
||||||
|
|
||||||
# Configuration variables
|
|
||||||
CONFIG_FILE = 'sys.yaml'
|
|
||||||
POOL_KEY = 'POOL'
|
|
||||||
TABLES_KEY = 'TABLES'
|
|
||||||
SOURCE_INDEX = 0
|
|
||||||
|
|
||||||
def load_config():
|
|
||||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
project_root = os.path.abspath(os.path.join(script_dir, '..', '..'))
|
|
||||||
config_path = os.path.join(project_root, 'config', CONFIG_FILE)
|
|
||||||
|
|
||||||
with open(config_path, 'r') as f:
|
|
||||||
config = yaml.safe_load(f)
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
def get_table_size(server, table_name):
|
|
||||||
env = os.environ.copy()
|
|
||||||
env['PGPASSWORD'] = server['db_pass']
|
|
||||||
|
|
||||||
command = [
|
|
||||||
'psql',
|
|
||||||
'-h', server['ts_ip'],
|
|
||||||
'-p', str(server['db_port']),
|
|
||||||
'-U', server['db_user'],
|
|
||||||
'-d', server['db_name'],
|
|
||||||
'-t',
|
|
||||||
'-c', f"SELECT COUNT(*) FROM {table_name}"
|
|
||||||
]
|
|
||||||
|
|
||||||
result = subprocess.run(command, env=env, capture_output=True, text=True, check=True)
|
|
||||||
return int(result.stdout.strip())
|
|
||||||
|
|
||||||
def replicate_table(source, targets, table_name):
|
|
||||||
print(f"Replicating {table_name}")
|
|
||||||
|
|
||||||
# Get table size for progress bar
|
|
||||||
table_size = get_table_size(source, table_name)
|
|
||||||
print(f"Table size: {table_size} rows")
|
|
||||||
|
|
||||||
# Dump the table from the source
|
|
||||||
dump_command = [
|
|
||||||
'pg_dump',
|
|
||||||
'-h', source['ts_ip'],
|
|
||||||
'-p', str(source['db_port']),
|
|
||||||
'-U', source['db_user'],
|
|
||||||
'-d', source['db_name'],
|
|
||||||
'-t', table_name,
|
|
||||||
'--no-owner',
|
|
||||||
'--no-acl'
|
|
||||||
]
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
env['PGPASSWORD'] = source['db_pass']
|
|
||||||
|
|
||||||
print("Dumping table...")
|
|
||||||
with open(f"{table_name}.sql", 'w') as f:
|
|
||||||
subprocess.run(dump_command, env=env, stdout=f, check=True)
|
|
||||||
print("Dump complete")
|
|
||||||
|
|
||||||
# Restore the table to each target
|
|
||||||
for target in targets:
|
|
||||||
print(f"Replicating to {target['ts_id']}")
|
|
||||||
|
|
||||||
# Drop table and its sequence
|
|
||||||
drop_commands = [
|
|
||||||
f"DROP TABLE IF EXISTS {table_name} CASCADE;",
|
|
||||||
f"DROP SEQUENCE IF EXISTS {table_name}_id_seq CASCADE;"
|
|
||||||
]
|
|
||||||
|
|
||||||
restore_command = [
|
|
||||||
'psql',
|
|
||||||
'-h', target['ts_ip'],
|
|
||||||
'-p', str(target['db_port']),
|
|
||||||
'-U', target['db_user'],
|
|
||||||
'-d', target['db_name'],
|
|
||||||
]
|
|
||||||
|
|
||||||
env = os.environ.copy()
|
|
||||||
env['PGPASSWORD'] = target['db_pass']
|
|
||||||
|
|
||||||
# Execute drop commands
|
|
||||||
for cmd in drop_commands:
|
|
||||||
print(f"Executing: {cmd}")
|
|
||||||
subprocess.run(restore_command + ['-c', cmd], env=env, check=True)
|
|
||||||
|
|
||||||
# Restore the table
|
|
||||||
print("Restoring table...")
|
|
||||||
process = subprocess.Popen(restore_command + ['-f', f"{table_name}.sql"], env=env,
|
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
|
|
||||||
|
|
||||||
pbar = tqdm(total=table_size, desc="Copying rows")
|
|
||||||
copied_rows = 0
|
|
||||||
for line in process.stderr:
|
|
||||||
if line.startswith("COPY"):
|
|
||||||
copied_rows = int(line.split()[1])
|
|
||||||
pbar.update(copied_rows - pbar.n)
|
|
||||||
print(line, end='') # Print all output for visibility
|
|
||||||
|
|
||||||
pbar.close()
|
|
||||||
process.wait()
|
|
||||||
|
|
||||||
if process.returncode != 0:
|
|
||||||
print(f"Error occurred during restoration to {target['ts_id']}")
|
|
||||||
print(process.stderr.read())
|
|
||||||
else:
|
|
||||||
print(f"Restoration to {target['ts_id']} completed successfully")
|
|
||||||
|
|
||||||
# Clean up the dump file
|
|
||||||
os.remove(f"{table_name}.sql")
|
|
||||||
print(f"Replication of {table_name} completed")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
config = load_config()
|
|
||||||
|
|
||||||
source_server = config[POOL_KEY][SOURCE_INDEX]
|
|
||||||
target_servers = config[POOL_KEY][SOURCE_INDEX + 1:]
|
|
||||||
|
|
||||||
tables = list(config[TABLES_KEY].keys())
|
|
||||||
|
|
||||||
for table in tables:
|
|
||||||
replicate_table(source_server, target_servers, table)
|
|
||||||
|
|
||||||
print("All replications completed!")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,118 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Configuration
|
|
||||||
SOURCE_HOST="100.64.64.20"
|
|
||||||
SOURCE_PORT="5432"
|
|
||||||
SOURCE_DB="sij"
|
|
||||||
SOURCE_USER="sij"
|
|
||||||
SOURCE_PASS="Synchr0!"
|
|
||||||
|
|
||||||
# Target servers
|
|
||||||
declare -a TARGETS=(
|
|
||||||
"sij-vm:100.64.64.11:5432:sij:sij:Synchr0!"
|
|
||||||
"sij-vps:100.64.64.15:5432:sij:sij:Synchr0!"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Tables to replicate
|
|
||||||
TABLES=("dailyweather" "hourlyweather" "short_urls" "click_logs" "locations" "query_tracking")
|
|
||||||
|
|
||||||
# PostgreSQL binaries
|
|
||||||
PSQL="/Applications/Postgres.app/Contents/Versions/latest/bin/psql"
|
|
||||||
PG_DUMP="/Applications/Postgres.app/Contents/Versions/latest/bin/pg_dump"
|
|
||||||
|
|
||||||
# Function to run SQL and display results
|
|
||||||
run_sql() {
|
|
||||||
local host=$1
|
|
||||||
local port=$2
|
|
||||||
local db=$3
|
|
||||||
local user=$4
|
|
||||||
local pass=$5
|
|
||||||
local sql=$6
|
|
||||||
|
|
||||||
PGPASSWORD=$pass $PSQL -h $host -p $port -U $user -d $db -c "$sql"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Replicate to a target
|
|
||||||
replicate_to_target() {
|
|
||||||
local target_info=$1
|
|
||||||
IFS=':' read -r target_name target_host target_port target_db target_user target_pass <<< "$target_info"
|
|
||||||
|
|
||||||
echo "Replicating to $target_name ($target_host)"
|
|
||||||
|
|
||||||
# Check source tables
|
|
||||||
echo "Checking source tables:"
|
|
||||||
for table in "${TABLES[@]}"; do
|
|
||||||
run_sql $SOURCE_HOST $SOURCE_PORT $SOURCE_DB $SOURCE_USER $SOURCE_PASS "SELECT COUNT(*) FROM $table;"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Ensure uuid-ossp extension is created
|
|
||||||
run_sql $target_host $target_port $target_db $target_user $target_pass "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";"
|
|
||||||
|
|
||||||
# Dump and restore each table
|
|
||||||
for table in "${TABLES[@]}"; do
|
|
||||||
echo "Replicating $table"
|
|
||||||
|
|
||||||
if [ "$table" == "query_tracking" ]; then
|
|
||||||
# Dump structure
|
|
||||||
PGPASSWORD=$SOURCE_PASS $PG_DUMP -h $SOURCE_HOST -p $SOURCE_PORT -U $SOURCE_USER -d $SOURCE_DB -t $table --schema-only --no-owner --no-acl > ${table}_structure.sql
|
|
||||||
|
|
||||||
# Dump data
|
|
||||||
PGPASSWORD=$SOURCE_PASS $PG_DUMP -h $SOURCE_HOST -p $SOURCE_PORT -U $SOURCE_USER -d $SOURCE_DB -t $table --data-only --no-owner --no-acl > ${table}_data.sql
|
|
||||||
|
|
||||||
# Drop and recreate table on target
|
|
||||||
run_sql $target_host $target_port $target_db $target_user $target_pass "DROP TABLE IF EXISTS $table CASCADE; "
|
|
||||||
|
|
||||||
# Restore structure
|
|
||||||
PGPASSWORD=$target_pass $PSQL -h $target_host -p $target_port -U $target_user -d $target_db -f ${table}_structure.sql
|
|
||||||
|
|
||||||
# Restore data
|
|
||||||
PGPASSWORD=$target_pass $PSQL -h $target_host -p $target_port -U $target_user -d $target_db -f ${table}_data.sql
|
|
||||||
|
|
||||||
# Clean up dump files
|
|
||||||
rm ${table}_structure.sql ${table}_data.sql
|
|
||||||
else
|
|
||||||
# Dump table
|
|
||||||
PGPASSWORD=$SOURCE_PASS $PG_DUMP -h $SOURCE_HOST -p $SOURCE_PORT -U $SOURCE_USER -d $SOURCE_DB -t $table --no-owner --no-acl > ${table}_dump.sql
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "Error dumping $table"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Clean up the dump file
|
|
||||||
# Remove empty lines, lines with only whitespace, and lines starting with "sij"
|
|
||||||
sed -i.bak '/^\s*$/d; /^sij/d' ${table}_dump.sql && rm ${table}_dump.sql.bak
|
|
||||||
|
|
||||||
# Drop and recreate table on target
|
|
||||||
run_sql $target_host $target_port $target_db $target_user $target_pass "DROP TABLE IF EXISTS $table CASCADE; "
|
|
||||||
|
|
||||||
# Restore table
|
|
||||||
PGPASSWORD=$target_pass $PSQL -h $target_host -p $target_port -U $target_user -d $target_db -f ${table}_dump.sql
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "Error restoring $table"
|
|
||||||
else
|
|
||||||
echo "$table replicated successfully"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Clean up dump file
|
|
||||||
rm ${table}_dump.sql
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Verify replication
|
|
||||||
echo "Verifying replication:"
|
|
||||||
for table in "${TABLES[@]}"; do
|
|
||||||
echo "Checking $table on target:"
|
|
||||||
run_sql $target_host $target_port $target_db $target_user $target_pass "SELECT COUNT(*) FROM $table;"
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Main replication process
|
|
||||||
for target in "${TARGETS[@]}"; do
|
|
||||||
replicate_to_target "$target"
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Replication completed"
|
|
||||||
|
|
|
@ -1,127 +0,0 @@
|
||||||
# helpers/replicator.py
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
import yaml
|
|
||||||
from pathlib import Path
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
async def load_config():
|
|
||||||
config_path = Path(__file__).parent.parent / 'config' / 'db.yaml'
|
|
||||||
with open(config_path, 'r') as file:
|
|
||||||
return yaml.safe_load(file)
|
|
||||||
|
|
||||||
async def check_table_existence(conn, tables):
|
|
||||||
for table in tables:
|
|
||||||
exists = await conn.fetchval(f"""
|
|
||||||
SELECT EXISTS (
|
|
||||||
SELECT FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
AND table_name = $1
|
|
||||||
)
|
|
||||||
""", table)
|
|
||||||
print(f"Table {table} {'exists' if exists else 'does not exist'} in the database.")
|
|
||||||
|
|
||||||
async def check_user_permissions(conn, tables):
|
|
||||||
for table in tables:
|
|
||||||
has_permission = await conn.fetchval(f"""
|
|
||||||
SELECT has_table_privilege(current_user, $1, 'SELECT')
|
|
||||||
""", table)
|
|
||||||
print(f"User {'has' if has_permission else 'does not have'} SELECT permission on table {table}.")
|
|
||||||
|
|
||||||
async def replicate_tables(source, target, tables):
|
|
||||||
print(f"Replicating tables from {source['ts_id']} to {target['ts_id']}")
|
|
||||||
|
|
||||||
conn_params = {
|
|
||||||
'database': 'db_name',
|
|
||||||
'user': 'db_user',
|
|
||||||
'password': 'db_pass',
|
|
||||||
'host': 'ts_ip',
|
|
||||||
'port': 'db_port'
|
|
||||||
}
|
|
||||||
|
|
||||||
source_conn = await asyncpg.connect(**{k: source[v] for k, v in conn_params.items()})
|
|
||||||
target_conn = await asyncpg.connect(**{k: target[v] for k, v in conn_params.items()})
|
|
||||||
|
|
||||||
try:
|
|
||||||
source_version = await source_conn.fetchval("SELECT version()")
|
|
||||||
target_version = await target_conn.fetchval("SELECT version()")
|
|
||||||
print(f"Source database version: {source_version}")
|
|
||||||
print(f"Target database version: {target_version}")
|
|
||||||
|
|
||||||
print("Checking table existence in source database:")
|
|
||||||
await check_table_existence(source_conn, tables)
|
|
||||||
|
|
||||||
print("\nChecking user permissions in source database:")
|
|
||||||
await check_user_permissions(source_conn, tables)
|
|
||||||
|
|
||||||
# Dump all tables to a file
|
|
||||||
dump_file = 'dump.sql'
|
|
||||||
dump_command = [
|
|
||||||
'/Applications/Postgres.app/Contents/Versions/latest/bin/pg_dump',
|
|
||||||
'-h', source['ts_ip'],
|
|
||||||
'-p', str(source['db_port']),
|
|
||||||
'-U', source['db_user'],
|
|
||||||
'-d', source['db_name'],
|
|
||||||
'-t', ' -t '.join(tables),
|
|
||||||
'--no-owner',
|
|
||||||
'--no-acl',
|
|
||||||
'-f', dump_file
|
|
||||||
]
|
|
||||||
env = {'PGPASSWORD': source['db_pass']}
|
|
||||||
print(f"\nExecuting dump command: {' '.join(dump_command)}")
|
|
||||||
dump_result = subprocess.run(dump_command, env=env, capture_output=True, text=True)
|
|
||||||
|
|
||||||
if dump_result.returncode != 0:
|
|
||||||
print(f"Dump stderr: {dump_result.stderr}")
|
|
||||||
raise Exception(f"Dump failed: {dump_result.stderr}")
|
|
||||||
|
|
||||||
print("Dump completed successfully.")
|
|
||||||
|
|
||||||
# Restore from the dump file
|
|
||||||
restore_command = [
|
|
||||||
'/Applications/Postgres.app/Contents/Versions/latest/bin/psql',
|
|
||||||
'-h', target['ts_ip'],
|
|
||||||
'-p', str(target['db_port']),
|
|
||||||
'-U', target['db_user'],
|
|
||||||
'-d', target['db_name'],
|
|
||||||
'-f', dump_file
|
|
||||||
]
|
|
||||||
env = {'PGPASSWORD': target['db_pass']}
|
|
||||||
print(f"\nExecuting restore command: {' '.join(restore_command)}")
|
|
||||||
restore_result = subprocess.run(restore_command, env=env, capture_output=True, text=True)
|
|
||||||
|
|
||||||
if restore_result.returncode != 0:
|
|
||||||
print(f"Restore stderr: {restore_result.stderr}")
|
|
||||||
raise Exception(f"Restore failed: {restore_result.stderr}")
|
|
||||||
|
|
||||||
print("Restore completed successfully.")
|
|
||||||
|
|
||||||
# Clean up the dump file
|
|
||||||
os.remove(dump_file)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred during replication: {str(e)}")
|
|
||||||
print("Exception details:", sys.exc_info())
|
|
||||||
finally:
|
|
||||||
await source_conn.close()
|
|
||||||
await target_conn.close()
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
config = await load_config()
|
|
||||||
source_server = config['POOL'][0] # sij-mbp16
|
|
||||||
target_servers = config['POOL'][1:] # sij-vm and sij-vps
|
|
||||||
|
|
||||||
tables_to_replicate = [
|
|
||||||
'dailyweather', 'hourlyweather', 'short_urls', 'click_logs', 'locations'
|
|
||||||
]
|
|
||||||
|
|
||||||
for target_server in target_servers:
|
|
||||||
await replicate_tables(source_server, target_server, tables_to_replicate)
|
|
||||||
|
|
||||||
print("All replications completed!")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,67 +0,0 @@
|
||||||
import asyncio
|
|
||||||
from pathlib import Path
|
|
||||||
from sijapi import EMAIL_CONFIG, EMAIL_LOGS
|
|
||||||
from sijapi.utilities import EmailAccount
|
|
||||||
from sijapi.routers import email
|
|
||||||
from sijapi.logs import get_logger
|
|
||||||
|
|
||||||
l = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def initialize_log_files():
|
|
||||||
summarized_log = EMAIL_LOGS / "summarized.txt"
|
|
||||||
autoresponded_log = EMAIL_LOGS / "autoresponded.txt"
|
|
||||||
diagnostic_log = EMAIL_LOGS / "diagnostic.txt"
|
|
||||||
for log_file in [summarized_log, autoresponded_log, diagnostic_log]:
|
|
||||||
log_file.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
log_file.write_text("")
|
|
||||||
l.debug(f"Log files initialized: {summarized_log}, {autoresponded_log}, {diagnostic_log}")
|
|
||||||
return summarized_log, autoresponded_log, diagnostic_log
|
|
||||||
|
|
||||||
async def process_all_emails(account: EmailAccount, summarized_log: Path, autoresponded_log: Path, diagnostic_log: Path):
|
|
||||||
try:
|
|
||||||
with email.get_imap_connection(account) as inbox:
|
|
||||||
l.debug(f"Connected to {account.name}, processing all emails...")
|
|
||||||
all_messages = inbox.messages()
|
|
||||||
unread_messages = set(uid for uid, _ in inbox.messages(unread=True))
|
|
||||||
|
|
||||||
processed_count = 0
|
|
||||||
for identifier, message in all_messages:
|
|
||||||
# Log diagnostic information
|
|
||||||
with open(diagnostic_log, 'a') as f:
|
|
||||||
f.write(f"Account: {account.name}, Raw Identifier: {identifier}, Type: {type(identifier)}\n")
|
|
||||||
|
|
||||||
# Attempt to get a string representation of the identifier
|
|
||||||
if isinstance(identifier, bytes):
|
|
||||||
id_str = identifier.decode()
|
|
||||||
elif isinstance(identifier, (int, str)):
|
|
||||||
id_str = str(identifier)
|
|
||||||
else:
|
|
||||||
id_str = repr(identifier)
|
|
||||||
|
|
||||||
if identifier not in unread_messages:
|
|
||||||
processed_count += 1
|
|
||||||
for log_file in [summarized_log, autoresponded_log]:
|
|
||||||
with open(log_file, 'a') as f:
|
|
||||||
f.write(f"{id_str}\n")
|
|
||||||
|
|
||||||
l.info(f"Processed {processed_count} non-unread emails for account {account.name}")
|
|
||||||
except Exception as e:
|
|
||||||
l.logger.error(f"An error occurred while processing emails for account {account.name}: {e}")
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
email_accounts = email.load_email_accounts(EMAIL_CONFIG)
|
|
||||||
summarized_log, autoresponded_log, diagnostic_log = await initialize_log_files()
|
|
||||||
|
|
||||||
l.debug(f"Processing {len(email_accounts)} email accounts")
|
|
||||||
|
|
||||||
tasks = [process_all_emails(account, summarized_log, autoresponded_log, diagnostic_log) for account in email_accounts]
|
|
||||||
await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
# Final verification
|
|
||||||
with open(summarized_log, 'r') as f:
|
|
||||||
final_count = len(f.readlines())
|
|
||||||
l.info(f"Final non-unread email count: {final_count}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
76
sijapi/helpers/embeddings/embed.py
Normal file
76
sijapi/helpers/embeddings/embed.py
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
from vectordb import Memory
|
||||||
|
|
||||||
|
memory = Memory(memory_file="embedding.pt",
|
||||||
|
chunking_strategy={"mode": "sliding_window", "window_size": 128, "overlap": 16}, embeddings='TaylorAI/bge-micro-v2'
|
||||||
|
)
|
||||||
|
|
||||||
|
texts = [
|
||||||
|
"""
|
||||||
|
Machine learning is a method of data analysis that automates analytical model building.
|
||||||
|
|
||||||
|
It is a branch of artificial intelligence based on the idea that systems can learn from data,
|
||||||
|
identify patterns and make decisions with minimal human intervention.
|
||||||
|
|
||||||
|
Machine learning algorithms are trained on data sets that contain examples of the desired output. For example, a machine learning algorithm that is used to classify images might be trained on a data set that contains images of cats and dogs.
|
||||||
|
Once an algorithm is trained, it can be used to make predictions on new data. For example, the machine learning algorithm that is used to classify images could be used to predict whether a new image contains a cat or a dog.
|
||||||
|
|
||||||
|
Machine learning algorithms can be used to solve a wide variety of problems. Some common applications of machine learning include:
|
||||||
|
|
||||||
|
Classification: Categorizing data into different groups. For example, a machine learning algorithm could be used to classify emails as spam or not spam.
|
||||||
|
|
||||||
|
Regression: Predicting a continuous value. For example, a machine learning algorithm could be used to predict the price of a house.
|
||||||
|
|
||||||
|
Clustering: Finding groups of similar data points. For example, a machine learning algorithm could be used to find groups of customers with similar buying habits.
|
||||||
|
|
||||||
|
Anomaly detection: Finding data points that are different from the rest of the data. For example, a machine learning algorithm could be used to find fraudulent credit card transactions.
|
||||||
|
|
||||||
|
Machine learning is a powerful tool that can be used to solve a wide variety of problems. As the amount of data available continues to grow, machine learning is likely to become even more important in the future.
|
||||||
|
""",
|
||||||
|
"""
|
||||||
|
Artificial intelligence (AI) is the simulation of human intelligence in machines
|
||||||
|
that are programmed to think like humans and mimic their actions.
|
||||||
|
|
||||||
|
The term may also be applied to any machine that exhibits traits associated with
|
||||||
|
a human mind such as learning and problem-solving.
|
||||||
|
|
||||||
|
AI research has been highly successful in developing effective techniques for solving a wide range of problems, from game playing to medical diagnosis.
|
||||||
|
|
||||||
|
However, there is still a long way to go before AI can truly match the intelligence of humans. One of the main challenges is that human intelligence is incredibly complex and poorly understood.
|
||||||
|
|
||||||
|
Despite the challenges, AI is a rapidly growing field with the potential to revolutionize many aspects of our lives. Some of the potential benefits of AI include:
|
||||||
|
|
||||||
|
Increased productivity: AI can be used to automate tasks that are currently performed by humans, freeing up our time for more creative and fulfilling activities.
|
||||||
|
|
||||||
|
Improved decision-making: AI can be used to make more informed decisions, based on a wider range of data than humans can typically access.
|
||||||
|
|
||||||
|
Enhanced creativity: AI can be used to generate new ideas and solutions, beyond what humans can imagine on their own.
|
||||||
|
Of course, there are also potential risks associated with AI, such as:
|
||||||
|
|
||||||
|
Job displacement: As AI becomes more capable, it is possible that it will displace some human workers.
|
||||||
|
|
||||||
|
Weaponization: AI could be used to develop new weapons that are more powerful and destructive than anything we have today.
|
||||||
|
|
||||||
|
Loss of control: If AI becomes too powerful, we may lose control over it, with potentially disastrous consequences.
|
||||||
|
|
||||||
|
It is important to weigh the potential benefits and risks of AI carefully as we continue to develop this technology. With careful planning and oversight, AI has the potential to make the world a better place. However, if we are not careful, it could also lead to serious problems.
|
||||||
|
""",
|
||||||
|
]
|
||||||
|
|
||||||
|
metadata_list = [
|
||||||
|
{
|
||||||
|
"title": "Introduction to Machine Learning",
|
||||||
|
"url": "https://example.com/introduction-to-machine-learning",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Introduction to Artificial Intelligence",
|
||||||
|
"url": "https://example.com/introduction-to-artificial-intelligence",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
memory.save(texts, metadata_list)
|
||||||
|
|
||||||
|
query = "What is the relationship between AI and machine learning?"
|
||||||
|
results = memory.search(query, top_n=3, unique=True)
|
||||||
|
print(results)
|
||||||
|
|
||||||
|
# two results will be returned as unique param is set to True
|
15
sijapi/helpers/embeddings/embeddings.py
Normal file
15
sijapi/helpers/embeddings/embeddings.py
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
from vectordb import Memory
|
||||||
|
|
||||||
|
# Memory is where all content you want to store/search goes.
|
||||||
|
memory = Memory()
|
||||||
|
|
||||||
|
memory.save(
|
||||||
|
["apples are green", "oranges are orange"], # save your text content. for long text we will automatically chunk it
|
||||||
|
[{"url": "https://apples.com"}, {"url": "https://oranges.com"}], # associate any kind of metadata with it (optional)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Search for top n relevant results, automatically using embeddings
|
||||||
|
query = "green"
|
||||||
|
results = memory.search(query, top_n = 1)
|
||||||
|
|
||||||
|
print(results)
|
|
@ -1,225 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
# Environment variables for database connection
|
|
||||||
DB_NAME = os.getenv('DB_NAME', 'sij')
|
|
||||||
DB_USER = os.getenv('DB_USER', 'sij')
|
|
||||||
DB_PASSWORD = os.getenv('DB_PASSWORD', 'Synchr0!')
|
|
||||||
DB_HOST = os.getenv('DB_HOST', 'localhost')
|
|
||||||
DB_PORT = os.getenv('DB_PORT', '5432')
|
|
||||||
|
|
||||||
def get_feature_count(url):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'returnCountOnly': 'true',
|
|
||||||
'f': 'json'
|
|
||||||
}
|
|
||||||
retries = Retry(total=10, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
|
|
||||||
with requests.Session() as session:
|
|
||||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
||||||
response = session.get(url, params=params, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()
|
|
||||||
return data.get('count', 0)
|
|
||||||
|
|
||||||
def fetch_features(url, offset, num, max_retries=5):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'outFields': '*',
|
|
||||||
'geometryPrecision': 6,
|
|
||||||
'outSR': 4326,
|
|
||||||
'f': 'json',
|
|
||||||
'resultOffset': offset,
|
|
||||||
'resultRecordCount': num
|
|
||||||
}
|
|
||||||
for attempt in range(max_retries):
|
|
||||||
try:
|
|
||||||
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
|
||||||
with requests.Session() as session:
|
|
||||||
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
||||||
response = session.get(url, params=params, timeout=30)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching features (attempt {attempt + 1}/{max_retries}): {e}")
|
|
||||||
if attempt == max_retries - 1:
|
|
||||||
raise
|
|
||||||
time.sleep(5 * (attempt + 1)) # Exponential backoff
|
|
||||||
|
|
||||||
def download_layer(layer_num, layer_name):
|
|
||||||
base_dir = os.path.expanduser('~/data')
|
|
||||||
file_path = os.path.join(base_dir, f'PLSS_{layer_name}.geojson')
|
|
||||||
temp_file_path = os.path.join(base_dir, f'PLSS_{layer_name}_temp.json')
|
|
||||||
|
|
||||||
url = f"https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/{layer_num}/query"
|
|
||||||
|
|
||||||
total_count = get_feature_count(url)
|
|
||||||
print(f"Total {layer_name} features: {total_count}")
|
|
||||||
|
|
||||||
batch_size = 1000
|
|
||||||
offset = 0
|
|
||||||
all_features = []
|
|
||||||
|
|
||||||
# Check if temporary file exists and load its content
|
|
||||||
if os.path.exists(temp_file_path):
|
|
||||||
with open(temp_file_path, 'r') as f:
|
|
||||||
all_features = json.load(f)
|
|
||||||
offset = len(all_features)
|
|
||||||
print(f"Resuming download from offset {offset}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
while offset < total_count:
|
|
||||||
print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
|
|
||||||
data = fetch_features(url, offset, batch_size)
|
|
||||||
|
|
||||||
new_features = data.get('features', [])
|
|
||||||
if not new_features:
|
|
||||||
break
|
|
||||||
|
|
||||||
all_features.extend(new_features)
|
|
||||||
offset += len(new_features)
|
|
||||||
|
|
||||||
# Progress indicator
|
|
||||||
progress = len(all_features) / total_count
|
|
||||||
bar_length = 30
|
|
||||||
filled_length = int(bar_length * progress)
|
|
||||||
bar = '=' * filled_length + '-' * (bar_length - filled_length)
|
|
||||||
print(f'\rProgress: [{bar}] {progress:.1%} ({len(all_features)}/{total_count} features)', end='', flush=True)
|
|
||||||
|
|
||||||
# Save progress to temporary file
|
|
||||||
with open(temp_file_path, 'w') as f:
|
|
||||||
json.dump(all_features, f)
|
|
||||||
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
print(f"\nTotal {layer_name} features fetched: {len(all_features)}")
|
|
||||||
|
|
||||||
geojson_features = [
|
|
||||||
{
|
|
||||||
"type": "Feature",
|
|
||||||
"properties": feature['attributes'],
|
|
||||||
"geometry": feature['geometry']
|
|
||||||
} for feature in all_features
|
|
||||||
]
|
|
||||||
|
|
||||||
full_geojson = {
|
|
||||||
"type": "FeatureCollection",
|
|
||||||
"features": geojson_features
|
|
||||||
}
|
|
||||||
|
|
||||||
os.makedirs(base_dir, exist_ok=True)
|
|
||||||
|
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
json.dump(full_geojson, f)
|
|
||||||
|
|
||||||
print(f"GeoJSON file saved as '{file_path}'")
|
|
||||||
|
|
||||||
# Remove temporary file
|
|
||||||
if os.path.exists(temp_file_path):
|
|
||||||
os.remove(temp_file_path)
|
|
||||||
|
|
||||||
return file_path
|
|
||||||
except Exception as e:
|
|
||||||
print(f"\nError during download: {e}")
|
|
||||||
print(f"Partial data saved in {temp_file_path}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def check_postgres_connection():
|
|
||||||
try:
|
|
||||||
subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME, '-c', 'SELECT 1;'],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
return True
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def check_postgis_extension():
|
|
||||||
try:
|
|
||||||
result = subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME,
|
|
||||||
'-c', "SELECT 1 FROM pg_extension WHERE extname = 'postgis';"],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
return '1' in result.stdout
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def create_postgis_extension():
|
|
||||||
try:
|
|
||||||
subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME,
|
|
||||||
'-c', "CREATE EXTENSION IF NOT EXISTS postgis;"],
|
|
||||||
check=True, capture_output=True, text=True)
|
|
||||||
print("PostGIS extension created successfully.")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error creating PostGIS extension: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def import_to_postgis(file_path, table_name):
|
|
||||||
if not check_postgres_connection():
|
|
||||||
print("Error: Unable to connect to PostgreSQL. Please check your connection settings.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not check_postgis_extension():
|
|
||||||
print("PostGIS extension not found. Attempting to create it...")
|
|
||||||
create_postgis_extension()
|
|
||||||
|
|
||||||
ogr2ogr_command = [
|
|
||||||
'ogr2ogr',
|
|
||||||
'-f', 'PostgreSQL',
|
|
||||||
f'PG:dbname={DB_NAME} user={DB_USER} password={DB_PASSWORD} host={DB_HOST} port={DB_PORT}',
|
|
||||||
file_path,
|
|
||||||
'-nln', table_name,
|
|
||||||
'-overwrite'
|
|
||||||
]
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.run(ogr2ogr_command, check=True, capture_output=True, text=True)
|
|
||||||
print(f"Data successfully imported into PostGIS table: {table_name}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error importing data into PostGIS: {e}")
|
|
||||||
print(f"Command that failed: {e.cmd}")
|
|
||||||
print(f"Error output: {e.stderr}")
|
|
||||||
|
|
||||||
def check_ogr2ogr():
|
|
||||||
try:
|
|
||||||
subprocess.run(['ogr2ogr', '--version'], check=True, capture_output=True, text=True)
|
|
||||||
return True
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
except FileNotFoundError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if not check_ogr2ogr():
|
|
||||||
print("Error: ogr2ogr not found. Please install GDAL/OGR tools.")
|
|
||||||
print("On Debian: sudo apt-get install gdal-bin")
|
|
||||||
print("On macOS with Homebrew: brew install gdal")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
try:
|
|
||||||
township_file = os.path.expanduser('~/data/PLSS_Townships.geojson')
|
|
||||||
if not os.path.exists(township_file):
|
|
||||||
township_file = download_layer(1, "Townships")
|
|
||||||
if township_file:
|
|
||||||
import_to_postgis(township_file, "public.plss_townships")
|
|
||||||
|
|
||||||
section_file = os.path.expanduser('~/data/PLSS_Sections.geojson')
|
|
||||||
if not os.path.exists(section_file):
|
|
||||||
section_file = download_layer(2, "Sections")
|
|
||||||
if section_file:
|
|
||||||
import_to_postgis(section_file, "public.plss_sections")
|
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching data: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,133 +0,0 @@
|
||||||
# CaPLSS_downloader_and_importer.py
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import requests
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
|
|
||||||
def get_feature_count(url):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'returnCountOnly': 'true',
|
|
||||||
'f': 'json'
|
|
||||||
}
|
|
||||||
retries = Retry(total=10, backoff_factor=0.5)
|
|
||||||
adapter = HTTPAdapter(max_retries=retries)
|
|
||||||
session = requests.Session()
|
|
||||||
session.mount("https://", adapter)
|
|
||||||
|
|
||||||
response = session.get(url, params=params, timeout=15) # Add timeout parameter
|
|
||||||
response.raise_for_status()
|
|
||||||
data = response.json()
|
|
||||||
return data.get('count', 0)
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_features(url, offset, num):
|
|
||||||
params = {
|
|
||||||
'where': '1=1',
|
|
||||||
'outFields': '*',
|
|
||||||
'geometryPrecision': 6,
|
|
||||||
'outSR': 4326,
|
|
||||||
'f': 'json',
|
|
||||||
'resultOffset': offset,
|
|
||||||
'resultRecordCount': num
|
|
||||||
}
|
|
||||||
response = requests.get(url, params=params)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()
|
|
||||||
|
|
||||||
def download_layer(layer_num, layer_name):
|
|
||||||
url = f"https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/{layer_num}/query"
|
|
||||||
|
|
||||||
total_count = get_feature_count(url)
|
|
||||||
print(f"Total {layer_name} features: {total_count}")
|
|
||||||
|
|
||||||
batch_size = 1000
|
|
||||||
offset = 0
|
|
||||||
all_features = []
|
|
||||||
|
|
||||||
while offset < total_count:
|
|
||||||
print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
|
|
||||||
data = fetch_features(url, offset, batch_size)
|
|
||||||
|
|
||||||
new_features = data.get('features', [])
|
|
||||||
if not new_features:
|
|
||||||
break
|
|
||||||
|
|
||||||
all_features.extend(new_features)
|
|
||||||
offset += len(new_features)
|
|
||||||
|
|
||||||
print(f"Progress: {len(all_features)}/{total_count} features")
|
|
||||||
|
|
||||||
time.sleep(1) # Be nice to the server
|
|
||||||
|
|
||||||
print(f"Total {layer_name} features fetched: {len(all_features)}")
|
|
||||||
|
|
||||||
# Convert to GeoJSON
|
|
||||||
geojson_features = [
|
|
||||||
{
|
|
||||||
"type": "Feature",
|
|
||||||
"properties": feature['attributes'],
|
|
||||||
"geometry": feature['geometry']
|
|
||||||
} for feature in all_features
|
|
||||||
]
|
|
||||||
|
|
||||||
full_geojson = {
|
|
||||||
"type": "FeatureCollection",
|
|
||||||
"features": geojson_features
|
|
||||||
}
|
|
||||||
|
|
||||||
# Define a base directory that exists on both macOS and Debian
|
|
||||||
base_dir = os.path.expanduser('~/data')
|
|
||||||
os.makedirs(base_dir, exist_ok=True) # Create the directory if it doesn't exist
|
|
||||||
|
|
||||||
# Use os.path.join to construct the file path
|
|
||||||
file_path = os.path.join(base_dir, f'PLSS_{layer_name}.geojson')
|
|
||||||
|
|
||||||
# Save to file
|
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
json.dump(full_geojson, f)
|
|
||||||
|
|
||||||
print(f"GeoJSON file saved as '{file_path}'")
|
|
||||||
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
def import_to_postgis(file_path, table_name):
|
|
||||||
db_name = 'sij'
|
|
||||||
db_user = 'sij'
|
|
||||||
db_password = 'Synchr0!'
|
|
||||||
|
|
||||||
ogr2ogr_command = [
|
|
||||||
'ogr2ogr',
|
|
||||||
'-f', 'PostgreSQL',
|
|
||||||
f'PG:dbname={db_name} user={db_user} password={db_password}',
|
|
||||||
file_path,
|
|
||||||
'-nln', table_name,
|
|
||||||
'-overwrite'
|
|
||||||
]
|
|
||||||
|
|
||||||
subprocess.run(ogr2ogr_command, check=True)
|
|
||||||
print(f"Data successfully imported into PostGIS table: {table_name}")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
# Download and import Townships (Layer 1)
|
|
||||||
township_file = download_layer(1, "Townships")
|
|
||||||
import_to_postgis(township_file, "public.plss_townships")
|
|
||||||
|
|
||||||
# Download and import Sections (Layer 2)
|
|
||||||
section_file = download_layer(2, "Sections")
|
|
||||||
import_to_postgis(section_file, "public.plss_sections")
|
|
||||||
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error fetching data: {e}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error importing data into PostGIS: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,73 +0,0 @@
|
||||||
import requests
|
|
||||||
import PyPDF2
|
|
||||||
import io
|
|
||||||
import re
|
|
||||||
|
|
||||||
def scrape_data_from_pdf(url):
|
|
||||||
response = requests.get(url)
|
|
||||||
pdf_file = io.BytesIO(response.content)
|
|
||||||
|
|
||||||
pdf_reader = PyPDF2.PdfReader(pdf_file)
|
|
||||||
|
|
||||||
all_text = ""
|
|
||||||
for page in pdf_reader.pages:
|
|
||||||
all_text += page.extract_text() + "\n"
|
|
||||||
|
|
||||||
return all_text
|
|
||||||
|
|
||||||
def parse_data(raw_data):
|
|
||||||
lines = raw_data.split('\n')
|
|
||||||
data = []
|
|
||||||
current_entry = None
|
|
||||||
|
|
||||||
for line in lines:
|
|
||||||
line = line.strip()
|
|
||||||
if re.match(r'\d+-\d+-\d+-\w+', line):
|
|
||||||
if current_entry:
|
|
||||||
data.append(current_entry)
|
|
||||||
current_entry = {'Harvest Document': line, 'Raw Data': []}
|
|
||||||
elif current_entry:
|
|
||||||
current_entry['Raw Data'].append(line)
|
|
||||||
|
|
||||||
if current_entry:
|
|
||||||
data.append(current_entry)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def filter_data(data):
|
|
||||||
return [entry for entry in data if any(owner.lower() in ' '.join(entry['Raw Data']).lower() for owner in ["Sierra Pacific", "SPI", "Land & Timber"])]
|
|
||||||
|
|
||||||
def extract_location(raw_data):
|
|
||||||
location = []
|
|
||||||
for line in raw_data:
|
|
||||||
if 'MDBM:' in line or 'HBM:' in line:
|
|
||||||
location.append(line)
|
|
||||||
return ' '.join(location)
|
|
||||||
|
|
||||||
def extract_plss_coordinates(text):
|
|
||||||
pattern = r'(\w+): T(\d+)([NSEW]) R(\d+)([NSEW]) S(\d+)'
|
|
||||||
return re.findall(pattern, text)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
url = "https://caltreesplans.resources.ca.gov/Caltrees/Report/ShowReport.aspx?module=TH_Document&reportID=492&reportType=LINK_REPORT_LIST"
|
|
||||||
raw_data = scrape_data_from_pdf(url)
|
|
||||||
|
|
||||||
parsed_data = parse_data(raw_data)
|
|
||||||
print(f"Total timber plans parsed: {len(parsed_data)}")
|
|
||||||
|
|
||||||
filtered_data = filter_data(parsed_data)
|
|
||||||
print(f"Found {len(filtered_data)} matching entries.")
|
|
||||||
|
|
||||||
for plan in filtered_data:
|
|
||||||
print("\nHarvest Document:", plan['Harvest Document'])
|
|
||||||
|
|
||||||
location = extract_location(plan['Raw Data'])
|
|
||||||
print("Location:", location)
|
|
||||||
|
|
||||||
plss_coordinates = extract_plss_coordinates(location)
|
|
||||||
print("PLSS Coordinates:")
|
|
||||||
for coord in plss_coordinates:
|
|
||||||
meridian, township, township_dir, range_, range_dir, section = coord
|
|
||||||
print(f" {meridian}: T{township}{township_dir} R{range_}{range_dir} S{section}")
|
|
||||||
|
|
||||||
print("-" * 50)
|
|
|
@ -1,23 +0,0 @@
|
||||||
#!/Users/sij/miniforge3/envs/sijapi/bin/python
|
|
||||||
import sys
|
|
||||||
import asyncio
|
|
||||||
from fastapi import BackgroundTasks
|
|
||||||
from sijapi.routers.news import process_and_save_article
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
print("Usage: python script.py <article_url>")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
url = sys.argv[1]
|
|
||||||
bg_tasks = BackgroundTasks()
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = await process_and_save_article(bg_tasks, url)
|
|
||||||
print(result)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error processing article: {str(e)}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,46 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
|
|
||||||
# Database connection information
|
|
||||||
DB_INFO = {
|
|
||||||
'host': '100.64.64.20',
|
|
||||||
'port': 5432,
|
|
||||||
'database': 'sij',
|
|
||||||
'user': 'sij',
|
|
||||||
'password': 'Synchr0!'
|
|
||||||
}
|
|
||||||
|
|
||||||
async def update_click_logs():
|
|
||||||
# Connect to the database
|
|
||||||
conn = await asyncpg.connect(**DB_INFO)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Drop existing 'id' and 'new_id' columns if they exist
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
DROP COLUMN IF EXISTS id,
|
|
||||||
DROP COLUMN IF EXISTS new_id;
|
|
||||||
""")
|
|
||||||
print("Dropped existing id and new_id columns (if they existed)")
|
|
||||||
|
|
||||||
# Add new UUID column as primary key
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
ADD COLUMN id UUID PRIMARY KEY DEFAULT gen_random_uuid();
|
|
||||||
""")
|
|
||||||
print("Added new UUID column as primary key")
|
|
||||||
|
|
||||||
# Get the number of rows in the table
|
|
||||||
row_count = await conn.fetchval("SELECT COUNT(*) FROM click_logs")
|
|
||||||
print(f"Number of rows in click_logs: {row_count}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred: {str(e)}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
finally:
|
|
||||||
# Close the database connection
|
|
||||||
await conn.close()
|
|
||||||
|
|
||||||
# Run the update
|
|
||||||
asyncio.run(update_click_logs())
|
|
|
@ -1,66 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
import psycopg2
|
|
||||||
import sys
|
|
||||||
|
|
||||||
async def try_async_connect(host, port, user, password, database):
|
|
||||||
try:
|
|
||||||
conn = await asyncpg.connect(
|
|
||||||
host=host,
|
|
||||||
port=port,
|
|
||||||
user=user,
|
|
||||||
password=password,
|
|
||||||
database=database
|
|
||||||
)
|
|
||||||
version = await conn.fetchval('SELECT version()')
|
|
||||||
print(f"Async connection successful to {host}:{port}")
|
|
||||||
print(f"PostgreSQL version: {version}")
|
|
||||||
await conn.close()
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Async connection failed to {host}:{port}")
|
|
||||||
print(f"Error: {str(e)}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def try_sync_connect(host, port, user, password, database):
|
|
||||||
try:
|
|
||||||
conn = psycopg2.connect(
|
|
||||||
host=host,
|
|
||||||
port=port,
|
|
||||||
user=user,
|
|
||||||
password=password,
|
|
||||||
database=database
|
|
||||||
)
|
|
||||||
cur = conn.cursor()
|
|
||||||
cur.execute('SELECT version()')
|
|
||||||
version = cur.fetchone()[0]
|
|
||||||
print(f"Sync connection successful to {host}:{port}")
|
|
||||||
print(f"PostgreSQL version: {version}")
|
|
||||||
conn.close()
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Sync connection failed to {host}:{port}")
|
|
||||||
print(f"Error: {str(e)}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
# Database connection parameters
|
|
||||||
port = 5432
|
|
||||||
user = 'sij'
|
|
||||||
password = 'Synchr0!'
|
|
||||||
database = 'sij'
|
|
||||||
|
|
||||||
hosts = ['100.64.64.20', '127.0.0.1', 'localhost']
|
|
||||||
|
|
||||||
print("Attempting asynchronous connections:")
|
|
||||||
for host in hosts:
|
|
||||||
await try_async_connect(host, port, user, password, database)
|
|
||||||
print()
|
|
||||||
|
|
||||||
print("Attempting synchronous connections:")
|
|
||||||
for host in hosts:
|
|
||||||
try_sync_connect(host, port, user, password, database)
|
|
||||||
print()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,89 +0,0 @@
|
||||||
import psycopg2
|
|
||||||
from psycopg2 import sql
|
|
||||||
|
|
||||||
def connect_to_db():
|
|
||||||
return psycopg2.connect(
|
|
||||||
dbname='sij',
|
|
||||||
user='sij',
|
|
||||||
password='Synchr0!',
|
|
||||||
host='localhost' # Adjust if your database is not on localhost
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_table_info(conn):
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
# Get all tables in the public schema
|
|
||||||
cur.execute("""
|
|
||||||
SELECT table_name
|
|
||||||
FROM information_schema.tables
|
|
||||||
WHERE table_schema = 'public'
|
|
||||||
""")
|
|
||||||
tables = cur.fetchall()
|
|
||||||
|
|
||||||
table_info = {}
|
|
||||||
for (table_name,) in tables:
|
|
||||||
table_info[table_name] = {
|
|
||||||
'primary_keys': get_primary_keys(cur, table_name),
|
|
||||||
'foreign_keys': get_foreign_keys(cur, table_name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return table_info
|
|
||||||
|
|
||||||
def get_primary_keys(cur, table_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT a.attname
|
|
||||||
FROM pg_index i
|
|
||||||
JOIN pg_attribute a ON a.attrelid = i.indrelid
|
|
||||||
AND a.attnum = ANY(i.indkey)
|
|
||||||
WHERE i.indrelid = %s::regclass
|
|
||||||
AND i.indisprimary
|
|
||||||
""", (table_name,))
|
|
||||||
return [row[0] for row in cur.fetchall()]
|
|
||||||
|
|
||||||
def get_foreign_keys(cur, table_name):
|
|
||||||
cur.execute("""
|
|
||||||
SELECT
|
|
||||||
tc.constraint_name,
|
|
||||||
kcu.column_name,
|
|
||||||
ccu.table_name AS foreign_table_name,
|
|
||||||
ccu.column_name AS foreign_column_name
|
|
||||||
FROM
|
|
||||||
information_schema.table_constraints AS tc
|
|
||||||
JOIN information_schema.key_column_usage AS kcu
|
|
||||||
ON tc.constraint_name = kcu.constraint_name
|
|
||||||
AND tc.table_schema = kcu.table_schema
|
|
||||||
JOIN information_schema.constraint_column_usage AS ccu
|
|
||||||
ON ccu.constraint_name = tc.constraint_name
|
|
||||||
AND ccu.table_schema = tc.table_schema
|
|
||||||
WHERE tc.constraint_type = 'FOREIGN KEY' AND tc.table_name=%s
|
|
||||||
""", (table_name,))
|
|
||||||
return cur.fetchall()
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
with connect_to_db() as conn:
|
|
||||||
table_info = get_table_info(conn)
|
|
||||||
|
|
||||||
for table_name, info in table_info.items():
|
|
||||||
print(f"\n## Table: {table_name}")
|
|
||||||
|
|
||||||
print("\nPrimary Keys:")
|
|
||||||
if info['primary_keys']:
|
|
||||||
for pk in info['primary_keys']:
|
|
||||||
print(f"- {pk}")
|
|
||||||
else:
|
|
||||||
print("- No primary keys found")
|
|
||||||
|
|
||||||
print("\nForeign Keys:")
|
|
||||||
if info['foreign_keys']:
|
|
||||||
for fk in info['foreign_keys']:
|
|
||||||
print(f"- {fk[1]} -> {fk[2]}.{fk[3]} (Constraint: {fk[0]})")
|
|
||||||
else:
|
|
||||||
print("- No foreign keys found")
|
|
||||||
|
|
||||||
except psycopg2.Error as e:
|
|
||||||
print(f"Database error: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An unexpected error occurred: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,46 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import asyncpg
|
|
||||||
|
|
||||||
# Database connection information
|
|
||||||
DB_INFO = {
|
|
||||||
'host': '100.64.64.20',
|
|
||||||
'port': 5432,
|
|
||||||
'database': 'sij',
|
|
||||||
'user': 'sij',
|
|
||||||
'password': 'Synchr0!'
|
|
||||||
}
|
|
||||||
|
|
||||||
async def update_click_logs():
|
|
||||||
# Connect to the database
|
|
||||||
conn = await asyncpg.connect(**DB_INFO)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Drop existing 'id' and 'new_id' columns if they exist
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
DROP COLUMN IF EXISTS id,
|
|
||||||
DROP COLUMN IF EXISTS new_id;
|
|
||||||
""")
|
|
||||||
print("Dropped existing id and new_id columns (if they existed)")
|
|
||||||
|
|
||||||
# Add new UUID column as primary key
|
|
||||||
await conn.execute("""
|
|
||||||
ALTER TABLE click_logs
|
|
||||||
ADD COLUMN id UUID PRIMARY KEY DEFAULT gen_random_uuid();
|
|
||||||
""")
|
|
||||||
print("Added new UUID column as primary key")
|
|
||||||
|
|
||||||
# Get the number of rows in the table
|
|
||||||
row_count = await conn.fetchval("SELECT COUNT(*) FROM click_logs")
|
|
||||||
print(f"Number of rows in click_logs: {row_count}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"An error occurred: {str(e)}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
finally:
|
|
||||||
# Close the database connection
|
|
||||||
await conn.close()
|
|
||||||
|
|
||||||
# Run the update
|
|
||||||
asyncio.run(update_click_logs())
|
|
|
@ -1,25 +0,0 @@
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
db:
|
|
||||||
image: postgis/postgis:16-3.4
|
|
||||||
container_name: sij_postgres
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: sij
|
|
||||||
POSTGRES_USER: sij
|
|
||||||
POSTGRES_PASSWORD: Synchr0!
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql/data
|
|
||||||
- ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
networks:
|
|
||||||
- sij_network
|
|
||||||
|
|
||||||
networks:
|
|
||||||
sij_network:
|
|
||||||
driver: bridge
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
postgres_data:
|
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
||||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
|
||||||
CREATE EXTENSION IF NOT EXISTS postgis_topology;
|
|
||||||
EOSQL
|
|
||||||
|
|
||||||
# Modify pg_hba.conf to allow connections from Tailscale network
|
|
||||||
echo "host all all 100.64.64.0/24 trust" >> /var/lib/postgresql/data/pg_hba.conf
|
|
||||||
|
|
|
@ -1,104 +0,0 @@
|
||||||
{
|
|
||||||
"history_item_ids": [
|
|
||||||
"ncRYNd0Xef4LiUE74VjP",
|
|
||||||
"13pQLDAPYGIATwW1ySL5",
|
|
||||||
"dhsQNAYTWpcwo1X6rixf",
|
|
||||||
"V7wUip1NJuWAUw26sePF",
|
|
||||||
"mOYMa5lcI7wRHddIQTSa",
|
|
||||||
"mP97iOpA4oG7pwUBthq4",
|
|
||||||
"WTU5nsX6qZCYxLyoT5hq",
|
|
||||||
"15DPGnBgjr74KT3TMbK4",
|
|
||||||
"aCyBS1zoaweVjUoPf2TF",
|
|
||||||
"J8SUMQqZPtoy3Cgdhi3J",
|
|
||||||
"qKHaaJHfqh2je60Wmadb",
|
|
||||||
"2PaugQJ8c4rY44JGlaO5",
|
|
||||||
"TwzxcmYjo6XNebbMabcd",
|
|
||||||
"xdEK7rYq9UofOlkr565b",
|
|
||||||
"wik4jYd97aGMLgttTjC9",
|
|
||||||
"7oXn2yH7gdyhi6sEoWKd",
|
|
||||||
"jv8aZFiVe8gPMrAOBcNT",
|
|
||||||
"B2BctCDkCtLDxEMMBu9z",
|
|
||||||
"4KFO77NHDruNQvXIykwp",
|
|
||||||
"d033NizZaNZPc45fvxCO",
|
|
||||||
"yBKxOxfzsjpZYOFzoIM7",
|
|
||||||
"oEihKwMLWgvvoTLGx4yF",
|
|
||||||
"Q3guBm4hGml0KPAWKl7t",
|
|
||||||
"jaojY1gSafQmqshR48oT",
|
|
||||||
"yqGDMfcceaoceFEEurqa",
|
|
||||||
"oLdnyUp7plGrUMRVQ8Cf",
|
|
||||||
"FZAGCGosYEGMf8GCRFaA",
|
|
||||||
"TrWnXRdGkiH0K9kgwFiS",
|
|
||||||
"th16OEbg3u0XHslT9A33",
|
|
||||||
"856BAsn6dnzF7HeqGPfK",
|
|
||||||
"KjLoAfDXVBqR9s39T25j",
|
|
||||||
"uHQQJMMOfOxPAhEYQXLl",
|
|
||||||
"HO8WCIhkkI7AxwkU5MC6",
|
|
||||||
"9nxdesHWTRLCOd6YgWe9",
|
|
||||||
"tmx5tlIQ7hdSTgJt16P2",
|
|
||||||
"M9JN0YcBuCF6LhnqKN66",
|
|
||||||
"M9xkP4ecn0LIi7mQOfU6",
|
|
||||||
"CNtJgh52Ykh9ZqEppZeH",
|
|
||||||
"lgobcoiqmtWfbXkhEwbE",
|
|
||||||
"nr9jxnsE4DnwmTwCaHqC",
|
|
||||||
"Rnzo03tcyBqGPdmHemCb",
|
|
||||||
"X3YVGp7yf9GLgZ7WOuSU",
|
|
||||||
"wL3bkqxR9xqeFTvkJpSI",
|
|
||||||
"wNx3XDgFLTjVbMyGrIAO",
|
|
||||||
"rb0jj1ywBetmdvve5qIL",
|
|
||||||
"WdNnqvNswXeh6JFoaRSS",
|
|
||||||
"WT2ViyerKpodYmHDHhCw",
|
|
||||||
"OvhIRehXNwx7xMJHuTd7",
|
|
||||||
"EQb1iZtsADxJ0GxLJzEK",
|
|
||||||
"WXVfBJYoYGB7S61VyETD",
|
|
||||||
"q0q3Di1YJKF07dOhoa7E",
|
|
||||||
"a2XBIUPa68UiiKlzwFnG",
|
|
||||||
"YBuD7KsUpz8jxc5ItZcF",
|
|
||||||
"KdoucRVCVQGRVQ8Di9Ih",
|
|
||||||
"CkmDny98GEdfGuj2kaAx",
|
|
||||||
"R0R2p8luRZL7wwPtDilw",
|
|
||||||
"awvztgQnuaquK0dTpIuH",
|
|
||||||
"3ZPN0nJo8UQZYhFhoIOK",
|
|
||||||
"RJJeTkjYIgdv1ZoXXAax",
|
|
||||||
"ppxUNzWHAQafsM6OvEUE",
|
|
||||||
"f2VBm7yE7qmnjdS9CbYz",
|
|
||||||
"SZIMwz2T5ZAhTxTDBFol",
|
|
||||||
"YjC91PRgnQbAcdPhnWqU",
|
|
||||||
"fDTV7n8f6QK5yCwLkBwg",
|
|
||||||
"KbPpWUuiLPADj9H3OlvG",
|
|
||||||
"DIuqVoAg7lLxpvFBip84",
|
|
||||||
"pEwFAKMLGWUMHqfljJSq",
|
|
||||||
"9wwl7UbsgeKqrk8kNZin",
|
|
||||||
"2uLvjJgcZDiY9dqB8JlP",
|
|
||||||
"U5f1qZQM08t2YzJqEmxK",
|
|
||||||
"gnwn7QIhrCXRAGNddZ1H",
|
|
||||||
"g5nGEIHirFzKstdrGI1h",
|
|
||||||
"CQWH5dGSeS38VC4X4yg7",
|
|
||||||
"C5YGjhJPrTkVOpxIOHdj",
|
|
||||||
"YLbtnf1pSb9Ra7wgFHiF",
|
|
||||||
"qNLgNSvMr4VSoisKS9qj",
|
|
||||||
"Bq2ALvQVsj9L2wMpUvYO",
|
|
||||||
"gi0yTXLZLMhUKeKcalWc",
|
|
||||||
"3JQN9UbCsqj9ggi5sCkq",
|
|
||||||
"oPflJoA9kqBzjlmWY6zL",
|
|
||||||
"0kUZFgtZdqgdUBXFsXs9",
|
|
||||||
"aFTi7XdjR8W52ThmFpgc",
|
|
||||||
"pgIfjcy2UvKggfqJ1aNx",
|
|
||||||
"r0VguLaqnxTL9jza9H4y",
|
|
||||||
"444ehr4RtqgU1xjhhTLo",
|
|
||||||
"pEuzoznVDaQRBhIA9VTy",
|
|
||||||
"T9hdW9eJkEqDmOsSUoeY",
|
|
||||||
"wJjHbGzoWiKKOIGmf82T",
|
|
||||||
"kij4uMmkUlsSDu2zSH1k",
|
|
||||||
"oWt5rns196JsKIYPyrBS",
|
|
||||||
"SJ1m9mSOGOLIhkMgA8kq",
|
|
||||||
"kAaqe0ATrYtkifmZLOE5",
|
|
||||||
"O2Pvz7CP5rfyNvzFSDmy",
|
|
||||||
"w1rb8qN5nohVUovC0XAx",
|
|
||||||
"njFs4I4F7rtd9I6fEn6x",
|
|
||||||
"miFrp9GBm3MsHO03Z4eY",
|
|
||||||
"5DJywiPsfeVP9hFdqRhd",
|
|
||||||
"mUephoXhk5QdWrOfr9Xr",
|
|
||||||
"tDDiW3Yp0BptZ2wBv21A",
|
|
||||||
"YpX06liXWHquUVYFlKYa"
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -1,63 +0,0 @@
|
||||||
import asyncio
|
|
||||||
from pathlib import Path
|
|
||||||
from sijapi import L, EMAIL_CONFIG, EMAIL_LOGS
|
|
||||||
from sijapi.classes import EmailAccount
|
|
||||||
from sijapi.routers import email
|
|
||||||
|
|
||||||
async def initialize_log_files():
|
|
||||||
summarized_log = EMAIL_LOGS / "summarized.txt"
|
|
||||||
autoresponded_log = EMAIL_LOGS / "autoresponded.txt"
|
|
||||||
diagnostic_log = EMAIL_LOGS / "diagnostic.txt"
|
|
||||||
for log_file in [summarized_log, autoresponded_log, diagnostic_log]:
|
|
||||||
log_file.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
log_file.write_text("")
|
|
||||||
L.DEBUG(f"Log files initialized: {summarized_log}, {autoresponded_log}, {diagnostic_log}")
|
|
||||||
return summarized_log, autoresponded_log, diagnostic_log
|
|
||||||
|
|
||||||
async def process_all_emails(account: EmailAccount, summarized_log: Path, autoresponded_log: Path, diagnostic_log: Path):
|
|
||||||
try:
|
|
||||||
with email.get_imap_connection(account) as inbox:
|
|
||||||
L.DEBUG(f"Connected to {account.name}, processing all emails...")
|
|
||||||
all_messages = inbox.messages()
|
|
||||||
unread_messages = set(uid for uid, _ in inbox.messages(unread=True))
|
|
||||||
|
|
||||||
processed_count = 0
|
|
||||||
for identifier, message in all_messages:
|
|
||||||
# Log diagnostic information
|
|
||||||
with open(diagnostic_log, 'a') as f:
|
|
||||||
f.write(f"Account: {account.name}, Raw Identifier: {identifier}, Type: {type(identifier)}\n")
|
|
||||||
|
|
||||||
# Attempt to get a string representation of the identifier
|
|
||||||
if isinstance(identifier, bytes):
|
|
||||||
id_str = identifier.decode()
|
|
||||||
elif isinstance(identifier, (int, str)):
|
|
||||||
id_str = str(identifier)
|
|
||||||
else:
|
|
||||||
id_str = repr(identifier)
|
|
||||||
|
|
||||||
if identifier not in unread_messages:
|
|
||||||
processed_count += 1
|
|
||||||
for log_file in [summarized_log, autoresponded_log]:
|
|
||||||
with open(log_file, 'a') as f:
|
|
||||||
f.write(f"{id_str}\n")
|
|
||||||
|
|
||||||
L.INFO(f"Processed {processed_count} non-unread emails for account {account.name}")
|
|
||||||
except Exception as e:
|
|
||||||
L.logger.error(f"An error occurred while processing emails for account {account.name}: {e}")
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
email_accounts = email.load_email_accounts(EMAIL_CONFIG)
|
|
||||||
summarized_log, autoresponded_log, diagnostic_log = await initialize_log_files()
|
|
||||||
|
|
||||||
L.DEBUG(f"Processing {len(email_accounts)} email accounts")
|
|
||||||
|
|
||||||
tasks = [process_all_emails(account, summarized_log, autoresponded_log, diagnostic_log) for account in email_accounts]
|
|
||||||
await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
# Final verification
|
|
||||||
with open(summarized_log, 'r') as f:
|
|
||||||
final_count = len(f.readlines())
|
|
||||||
L.INFO(f"Final non-unread email count: {final_count}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue