Auto-update: Sun Aug 4 22:12:18 PDT 2024
This commit is contained in:
parent
a82b1da5de
commit
d4cfb5fbd2
4 changed files with 176 additions and 195 deletions
sijapi
|
@ -11,7 +11,7 @@ from hypercorn.config import Config as HypercornConfig
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import traceback
|
import traceback
|
||||||
import asyncio
|
import asyncio
|
||||||
import httpx
|
import httpx
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
@ -56,7 +56,7 @@ async def lifespan(app: FastAPI):
|
||||||
try:
|
try:
|
||||||
# Initialize sync structures on all databases
|
# Initialize sync structures on all databases
|
||||||
await API.initialize_sync()
|
await API.initialize_sync()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
crit(f"Error during startup: {str(e)}")
|
crit(f"Error during startup: {str(e)}")
|
||||||
crit(f"Traceback: {traceback.format_exc()}")
|
crit(f"Traceback: {traceback.format_exc()}")
|
||||||
|
@ -136,13 +136,13 @@ async def pull_changes():
|
||||||
await API.add_primary_keys_to_local_tables()
|
await API.add_primary_keys_to_local_tables()
|
||||||
await API.add_primary_keys_to_remote_tables()
|
await API.add_primary_keys_to_remote_tables()
|
||||||
try:
|
try:
|
||||||
|
|
||||||
source = await API.get_most_recent_source()
|
source = await API.get_most_recent_source()
|
||||||
|
|
||||||
if source:
|
if source:
|
||||||
# Pull changes from the source
|
# Pull changes from the source
|
||||||
total_changes = await API.pull_changes(source)
|
total_changes = await API.pull_changes(source)
|
||||||
|
|
||||||
return JSONResponse(content={
|
return JSONResponse(content={
|
||||||
"status": "success",
|
"status": "success",
|
||||||
"message": f"Pull complete. Total changes: {total_changes}",
|
"message": f"Pull complete. Total changes: {total_changes}",
|
||||||
|
@ -154,12 +154,12 @@ async def pull_changes():
|
||||||
"status": "info",
|
"status": "info",
|
||||||
"message": "No instances with more recent data found or all instances are offline."
|
"message": "No instances with more recent data found or all instances are offline."
|
||||||
})
|
})
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error during pull: {str(e)}")
|
err(f"Error during pull: {str(e)}")
|
||||||
err(f"Traceback: {traceback.format_exc()}")
|
err(f"Traceback: {traceback.format_exc()}")
|
||||||
raise HTTPException(status_code=500, detail=f"Error during pull: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"Error during pull: {str(e)}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error while ensuring primary keys to tables: {str(e)}")
|
err(f"Error while ensuring primary keys to tables: {str(e)}")
|
||||||
err(f"Traceback: {traceback.format_exc()}")
|
err(f"Traceback: {traceback.format_exc()}")
|
||||||
|
|
|
@ -29,6 +29,7 @@ from .logs import Logger
|
||||||
L = Logger("classes", "classes")
|
L = Logger("classes", "classes")
|
||||||
logger = L.get_module_logger("classes")
|
logger = L.get_module_logger("classes")
|
||||||
|
|
||||||
|
# Logging functions
|
||||||
def debug(text: str): logger.debug(text)
|
def debug(text: str): logger.debug(text)
|
||||||
def info(text: str): logger.info(text)
|
def info(text: str): logger.info(text)
|
||||||
def warn(text: str): logger.warning(text)
|
def warn(text: str): logger.warning(text)
|
||||||
|
@ -81,7 +82,7 @@ class Configuration(BaseModel):
|
||||||
instance = cls.create_dynamic_model(**resolved_data)
|
instance = cls.create_dynamic_model(**resolved_data)
|
||||||
instance._dir_config = dir_config or instance
|
instance._dir_config = dir_config or instance
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error loading configuration: {str(e)}")
|
err(f"Error loading configuration: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
@ -137,7 +138,7 @@ class Configuration(BaseModel):
|
||||||
elif len(parts) == 2 and parts[0] == 'ENV':
|
elif len(parts) == 2 and parts[0] == 'ENV':
|
||||||
replacement = os.getenv(parts[1], '')
|
replacement = os.getenv(parts[1], '')
|
||||||
else:
|
else:
|
||||||
replacement = value
|
replacement = value
|
||||||
|
|
||||||
value = value.replace('{{' + match + '}}', str(replacement))
|
value = value.replace('{{' + match + '}}', str(replacement))
|
||||||
|
|
||||||
|
@ -314,7 +315,7 @@ class APIConfig(BaseModel):
|
||||||
async def get_online_hosts(self) -> List[Dict[str, Any]]:
|
async def get_online_hosts(self) -> List[Dict[str, Any]]:
|
||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
cache_key = "online_hosts"
|
cache_key = "online_hosts"
|
||||||
|
|
||||||
if cache_key in self.online_hosts_cache:
|
if cache_key in self.online_hosts_cache:
|
||||||
cached_hosts, cache_time = self.online_hosts_cache[cache_key]
|
cached_hosts, cache_time = self.online_hosts_cache[cache_key]
|
||||||
if current_time - cache_time < self.online_hosts_cache_ttl:
|
if current_time - cache_time < self.online_hosts_cache_ttl:
|
||||||
|
@ -322,7 +323,7 @@ class APIConfig(BaseModel):
|
||||||
|
|
||||||
online_hosts = []
|
online_hosts = []
|
||||||
local_ts_id = os.environ.get('TS_ID')
|
local_ts_id = os.environ.get('TS_ID')
|
||||||
|
|
||||||
for pool_entry in self.POOL:
|
for pool_entry in self.POOL:
|
||||||
if pool_entry['ts_id'] != local_ts_id:
|
if pool_entry['ts_id'] != local_ts_id:
|
||||||
pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}"
|
pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}"
|
||||||
|
@ -331,7 +332,7 @@ class APIConfig(BaseModel):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
del self.offline_servers[pool_key]
|
del self.offline_servers[pool_key]
|
||||||
|
|
||||||
conn = await self.get_connection(pool_entry)
|
conn = await self.get_connection(pool_entry)
|
||||||
if conn is not None:
|
if conn is not None:
|
||||||
online_hosts.append(pool_entry)
|
online_hosts.append(pool_entry)
|
||||||
|
@ -343,16 +344,16 @@ class APIConfig(BaseModel):
|
||||||
async def get_connection(self, pool_entry: Dict[str, Any] = None):
|
async def get_connection(self, pool_entry: Dict[str, Any] = None):
|
||||||
if pool_entry is None:
|
if pool_entry is None:
|
||||||
pool_entry = self.local_db
|
pool_entry = self.local_db
|
||||||
|
|
||||||
pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}"
|
pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}"
|
||||||
|
|
||||||
# Check if the server is marked as offline
|
# Check if the server is marked as offline
|
||||||
if pool_key in self.offline_servers:
|
if pool_key in self.offline_servers:
|
||||||
if time.time() - self.offline_servers[pool_key] < self.offline_timeout:
|
if time.time() - self.offline_servers[pool_key] < self.offline_timeout:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
del self.offline_servers[pool_key]
|
del self.offline_servers[pool_key]
|
||||||
|
|
||||||
if pool_key not in self.db_pools:
|
if pool_key not in self.db_pools:
|
||||||
try:
|
try:
|
||||||
self.db_pools[pool_key] = await asyncpg.create_pool(
|
self.db_pools[pool_key] = await asyncpg.create_pool(
|
||||||
|
@ -385,7 +386,7 @@ class APIConfig(BaseModel):
|
||||||
async def initialize_sync(self):
|
async def initialize_sync(self):
|
||||||
local_ts_id = os.environ.get('TS_ID')
|
local_ts_id = os.environ.get('TS_ID')
|
||||||
online_hosts = await self.get_online_hosts()
|
online_hosts = await self.get_online_hosts()
|
||||||
|
|
||||||
for pool_entry in online_hosts:
|
for pool_entry in online_hosts:
|
||||||
if pool_entry['ts_id'] == local_ts_id:
|
if pool_entry['ts_id'] == local_ts_id:
|
||||||
continue # Skip local database
|
continue # Skip local database
|
||||||
|
@ -393,29 +394,29 @@ class APIConfig(BaseModel):
|
||||||
conn = await self.get_connection(pool_entry)
|
conn = await self.get_connection(pool_entry)
|
||||||
if conn is None:
|
if conn is None:
|
||||||
continue # Skip this database if connection failed
|
continue # Skip this database if connection failed
|
||||||
|
|
||||||
debug(f"Starting sync initialization for {pool_entry['ts_ip']}...")
|
debug(f"Starting sync initialization for {pool_entry['ts_ip']}...")
|
||||||
|
|
||||||
# Check PostGIS installation
|
# Check PostGIS installation
|
||||||
postgis_installed = await self.check_postgis(conn)
|
postgis_installed = await self.check_postgis(conn)
|
||||||
if not postgis_installed:
|
if not postgis_installed:
|
||||||
warn(f"PostGIS is not installed on {pool_entry['ts_id']} ({pool_entry['ts_ip']}). Some spatial operations may fail.")
|
warn(f"PostGIS is not installed on {pool_entry['ts_id']} ({pool_entry['ts_ip']}). Some spatial operations may fail.")
|
||||||
|
|
||||||
tables = await conn.fetch("""
|
tables = await conn.fetch("""
|
||||||
SELECT tablename FROM pg_tables
|
SELECT tablename FROM pg_tables
|
||||||
WHERE schemaname = 'public'
|
WHERE schemaname = 'public'
|
||||||
""")
|
""")
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table_name = table['tablename']
|
table_name = table['tablename']
|
||||||
await self.ensure_sync_columns(conn, table_name)
|
await self.ensure_sync_columns(conn, table_name)
|
||||||
|
|
||||||
debug(f"Sync initialization complete for {pool_entry['ts_ip']}. All tables now have necessary sync columns and triggers.")
|
debug(f"Sync initialization complete for {pool_entry['ts_ip']}. All tables now have necessary sync columns and triggers.")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error initializing sync for {pool_entry['ts_ip']}: {str(e)}")
|
err(f"Error initializing sync for {pool_entry['ts_ip']}: {str(e)}")
|
||||||
err(f"Traceback: {traceback.format_exc()}")
|
err(f"Traceback: {traceback.format_exc()}")
|
||||||
|
|
||||||
def _schedule_sync_task(self, table_name: str, pk_value: Any, version: int, server_id: str):
|
def _schedule_sync_task(self, table_name: str, pk_value: Any, version: int, server_id: str):
|
||||||
# Use a background task manager to handle syncing
|
# Use a background task manager to handle syncing
|
||||||
task_key = f"{table_name}:{pk_value}" if pk_value else table_name
|
task_key = f"{table_name}:{pk_value}" if pk_value else table_name
|
||||||
|
@ -439,20 +440,20 @@ class APIConfig(BaseModel):
|
||||||
if not primary_key:
|
if not primary_key:
|
||||||
# Add an id column as primary key if it doesn't exist
|
# Add an id column as primary key if it doesn't exist
|
||||||
await conn.execute(f"""
|
await conn.execute(f"""
|
||||||
ALTER TABLE "{table_name}"
|
ALTER TABLE "{table_name}"
|
||||||
ADD COLUMN IF NOT EXISTS id SERIAL PRIMARY KEY;
|
ADD COLUMN IF NOT EXISTS id SERIAL PRIMARY KEY;
|
||||||
""")
|
""")
|
||||||
primary_key = 'id'
|
primary_key = 'id'
|
||||||
|
|
||||||
# Ensure version column exists
|
# Ensure version column exists
|
||||||
await conn.execute(f"""
|
await conn.execute(f"""
|
||||||
ALTER TABLE "{table_name}"
|
ALTER TABLE "{table_name}"
|
||||||
ADD COLUMN IF NOT EXISTS version INTEGER DEFAULT 1;
|
ADD COLUMN IF NOT EXISTS version INTEGER DEFAULT 1;
|
||||||
""")
|
""")
|
||||||
|
|
||||||
# Ensure server_id column exists
|
# Ensure server_id column exists
|
||||||
await conn.execute(f"""
|
await conn.execute(f"""
|
||||||
ALTER TABLE "{table_name}"
|
ALTER TABLE "{table_name}"
|
||||||
ADD COLUMN IF NOT EXISTS server_id TEXT DEFAULT '{os.environ.get('TS_ID')}';
|
ADD COLUMN IF NOT EXISTS server_id TEXT DEFAULT '{os.environ.get('TS_ID')}';
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
@ -487,7 +488,7 @@ class APIConfig(BaseModel):
|
||||||
|
|
||||||
debug(f"Successfully ensured sync columns and trigger for table {table_name}")
|
debug(f"Successfully ensured sync columns and trigger for table {table_name}")
|
||||||
return primary_key
|
return primary_key
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error ensuring sync columns for table {table_name}: {str(e)}")
|
err(f"Error ensuring sync columns for table {table_name}: {str(e)}")
|
||||||
err(f"Traceback: {traceback.format_exc()}")
|
err(f"Traceback: {traceback.format_exc()}")
|
||||||
|
@ -497,7 +498,7 @@ class APIConfig(BaseModel):
|
||||||
if conn is None:
|
if conn is None:
|
||||||
debug(f"Skipping offline server...")
|
debug(f"Skipping offline server...")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = await conn.fetchval("SELECT PostGIS_version();")
|
result = await conn.fetchval("SELECT PostGIS_version();")
|
||||||
if result:
|
if result:
|
||||||
|
@ -509,7 +510,7 @@ class APIConfig(BaseModel):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error checking PostGIS: {str(e)}")
|
err(f"Error checking PostGIS: {str(e)}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
async def pull_changes(self, source_pool_entry, batch_size=10000):
|
async def pull_changes(self, source_pool_entry, batch_size=10000):
|
||||||
if source_pool_entry['ts_id'] == os.environ.get('TS_ID'):
|
if source_pool_entry['ts_id'] == os.environ.get('TS_ID'):
|
||||||
|
@ -538,10 +539,10 @@ class APIConfig(BaseModel):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
tables = await source_conn.fetch("""
|
tables = await source_conn.fetch("""
|
||||||
SELECT tablename FROM pg_tables
|
SELECT tablename FROM pg_tables
|
||||||
WHERE schemaname = 'public'
|
WHERE schemaname = 'public'
|
||||||
""")
|
""")
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table_name = table['tablename']
|
table_name = table['tablename']
|
||||||
try:
|
try:
|
||||||
|
@ -550,7 +551,7 @@ class APIConfig(BaseModel):
|
||||||
else:
|
else:
|
||||||
primary_key = await self.ensure_sync_columns(dest_conn, table_name)
|
primary_key = await self.ensure_sync_columns(dest_conn, table_name)
|
||||||
last_synced_version = await self.get_last_synced_version(dest_conn, table_name, source_id)
|
last_synced_version = await self.get_last_synced_version(dest_conn, table_name, source_id)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
changes = await source_conn.fetch(f"""
|
changes = await source_conn.fetch(f"""
|
||||||
SELECT * FROM "{table_name}"
|
SELECT * FROM "{table_name}"
|
||||||
|
@ -558,16 +559,16 @@ class APIConfig(BaseModel):
|
||||||
ORDER BY version ASC
|
ORDER BY version ASC
|
||||||
LIMIT $3
|
LIMIT $3
|
||||||
""", last_synced_version, source_id, batch_size)
|
""", last_synced_version, source_id, batch_size)
|
||||||
|
|
||||||
if not changes:
|
if not changes:
|
||||||
break # No more changes for this table
|
break # No more changes for this table
|
||||||
|
|
||||||
changes_count = await self.apply_batch_changes(dest_conn, table_name, changes, primary_key)
|
changes_count = await self.apply_batch_changes(dest_conn, table_name, changes, primary_key)
|
||||||
total_changes += changes_count
|
total_changes += changes_count
|
||||||
|
|
||||||
if changes_count > 0:
|
if changes_count > 0:
|
||||||
info(f"Synced batch for {table_name}: {changes_count} changes. Total so far: {total_changes}")
|
info(f"Synced batch for {table_name}: {changes_count} changes. Total so far: {total_changes}")
|
||||||
|
|
||||||
last_synced_version = changes[-1]['version'] # Update last synced version
|
last_synced_version = changes[-1]['version'] # Update last synced version
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -599,7 +600,7 @@ class APIConfig(BaseModel):
|
||||||
if conn is None:
|
if conn is None:
|
||||||
debug(f"Skipping offline server...")
|
debug(f"Skipping offline server...")
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if table_name in self.SPECIAL_TABLES:
|
if table_name in self.SPECIAL_TABLES:
|
||||||
debug(f"Skipping get_last_synced_version because {table_name} is special.")
|
debug(f"Skipping get_last_synced_version because {table_name} is special.")
|
||||||
return 0 # Special handling for tables without version column
|
return 0 # Special handling for tables without version column
|
||||||
|
@ -623,15 +624,15 @@ class APIConfig(BaseModel):
|
||||||
local_ts_id = os.environ.get('TS_ID')
|
local_ts_id = os.environ.get('TS_ID')
|
||||||
online_hosts = await self.get_online_hosts()
|
online_hosts = await self.get_online_hosts()
|
||||||
num_online_hosts = len(online_hosts)
|
num_online_hosts = len(online_hosts)
|
||||||
|
|
||||||
if num_online_hosts > 0:
|
if num_online_hosts > 0:
|
||||||
online_ts_ids = [host['ts_id'] for host in online_hosts if host['ts_id'] != local_ts_id]
|
online_ts_ids = [host['ts_id'] for host in online_hosts if host['ts_id'] != local_ts_id]
|
||||||
crit(f"Online hosts: {', '.join(online_ts_ids)}")
|
crit(f"Online hosts: {', '.join(online_ts_ids)}")
|
||||||
|
|
||||||
for pool_entry in online_hosts:
|
for pool_entry in online_hosts:
|
||||||
if pool_entry['ts_id'] == local_ts_id:
|
if pool_entry['ts_id'] == local_ts_id:
|
||||||
continue # Skip local database
|
continue # Skip local database
|
||||||
|
|
||||||
try:
|
try:
|
||||||
conn = await self.get_connection(pool_entry)
|
conn = await self.get_connection(pool_entry)
|
||||||
if conn is None:
|
if conn is None:
|
||||||
|
@ -639,14 +640,14 @@ class APIConfig(BaseModel):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tables = await conn.fetch("""
|
tables = await conn.fetch("""
|
||||||
SELECT tablename FROM pg_tables
|
SELECT tablename FROM pg_tables
|
||||||
WHERE schemaname = 'public'
|
WHERE schemaname = 'public'
|
||||||
""")
|
""")
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table_name = table['tablename']
|
table_name = table['tablename']
|
||||||
if table_name in self.SPECIAL_TABLES:
|
if table_name in self.SPECIAL_TABLES:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
result = await conn.fetchrow(f"""
|
result = await conn.fetchrow(f"""
|
||||||
SELECT MAX(version) as max_version, server_id
|
SELECT MAX(version) as max_version, server_id
|
||||||
|
@ -684,7 +685,7 @@ class APIConfig(BaseModel):
|
||||||
most_recent_source = next(host for host in online_hosts if host['ts_id'] != local_ts_id)
|
most_recent_source = next(host for host in online_hosts if host['ts_id'] != local_ts_id)
|
||||||
else:
|
else:
|
||||||
crit("No other online hosts available for sync.")
|
crit("No other online hosts available for sync.")
|
||||||
|
|
||||||
return most_recent_source
|
return most_recent_source
|
||||||
|
|
||||||
|
|
||||||
|
@ -737,7 +738,7 @@ class APIConfig(BaseModel):
|
||||||
try:
|
try:
|
||||||
# Execute the query
|
# Execute the query
|
||||||
result = await conn.fetch(query, *args)
|
result = await conn.fetch(query, *args)
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -846,9 +847,9 @@ class APIConfig(BaseModel):
|
||||||
columns = updated_row.keys()
|
columns = updated_row.keys()
|
||||||
placeholders = [f'${i+1}' for i in range(len(columns))]
|
placeholders = [f'${i+1}' for i in range(len(columns))]
|
||||||
primary_key = self.get_primary_key(table_name)
|
primary_key = self.get_primary_key(table_name)
|
||||||
|
|
||||||
remote_version = await remote_conn.fetchval(f"""
|
remote_version = await remote_conn.fetchval(f"""
|
||||||
SELECT version FROM "{table_name}"
|
SELECT version FROM "{table_name}"
|
||||||
WHERE "{primary_key}" = $1
|
WHERE "{primary_key}" = $1
|
||||||
""", updated_row[primary_key])
|
""", updated_row[primary_key])
|
||||||
|
|
||||||
|
@ -920,17 +921,17 @@ class APIConfig(BaseModel):
|
||||||
if all_rows:
|
if all_rows:
|
||||||
columns = list(all_rows[0].keys())
|
columns = list(all_rows[0].keys())
|
||||||
placeholders = [f'${i+1}' for i in range(len(columns))]
|
placeholders = [f'${i+1}' for i in range(len(columns))]
|
||||||
|
|
||||||
insert_query = f"""
|
insert_query = f"""
|
||||||
INSERT INTO "{table_name}" ({', '.join(f'"{col}"' for col in columns)})
|
INSERT INTO "{table_name}" ({', '.join(f'"{col}"' for col in columns)})
|
||||||
VALUES ({', '.join(placeholders)})
|
VALUES ({', '.join(placeholders)})
|
||||||
ON CONFLICT DO NOTHING
|
ON CONFLICT DO NOTHING
|
||||||
"""
|
"""
|
||||||
|
|
||||||
async with remote_conn.transaction():
|
async with remote_conn.transaction():
|
||||||
for row in all_rows:
|
for row in all_rows:
|
||||||
await remote_conn.execute(insert_query, *row.values())
|
await remote_conn.execute(insert_query, *row.values())
|
||||||
|
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
debug(f"No rows to push for table {table_name}")
|
debug(f"No rows to push for table {table_name}")
|
||||||
|
@ -976,7 +977,7 @@ class APIConfig(BaseModel):
|
||||||
try:
|
try:
|
||||||
columns = list(changes[0].keys())
|
columns = list(changes[0].keys())
|
||||||
placeholders = [f'${i+1}' for i in range(len(columns))]
|
placeholders = [f'${i+1}' for i in range(len(columns))]
|
||||||
|
|
||||||
if primary_key:
|
if primary_key:
|
||||||
insert_query = f"""
|
insert_query = f"""
|
||||||
INSERT INTO "{table_name}" ({', '.join(f'"{col}"' for col in columns)})
|
INSERT INTO "{table_name}" ({', '.join(f'"{col}"' for col in columns)})
|
||||||
|
@ -1057,7 +1058,7 @@ class APIConfig(BaseModel):
|
||||||
proj4text = $4::text
|
proj4text = $4::text
|
||||||
WHERE srid = $5::integer
|
WHERE srid = $5::integer
|
||||||
"""
|
"""
|
||||||
await dest_conn.execute(update_query,
|
await dest_conn.execute(update_query,
|
||||||
source_entry['auth_name'],
|
source_entry['auth_name'],
|
||||||
source_entry['auth_srid'],
|
source_entry['auth_srid'],
|
||||||
source_entry['srtext'],
|
source_entry['srtext'],
|
||||||
|
@ -1080,7 +1081,7 @@ class APIConfig(BaseModel):
|
||||||
# You might want to cache this information for performance
|
# You might want to cache this information for performance
|
||||||
# For now, we'll assume it's always 'id', but you should implement proper logic here
|
# For now, we'll assume it's always 'id', but you should implement proper logic here
|
||||||
return 'id'
|
return 'id'
|
||||||
|
|
||||||
|
|
||||||
async def add_primary_keys_to_local_tables(self):
|
async def add_primary_keys_to_local_tables(self):
|
||||||
conn = await self.get_connection()
|
conn = await self.get_connection()
|
||||||
|
@ -1091,10 +1092,10 @@ class APIConfig(BaseModel):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tables = await conn.fetch("""
|
tables = await conn.fetch("""
|
||||||
SELECT tablename FROM pg_tables
|
SELECT tablename FROM pg_tables
|
||||||
WHERE schemaname = 'public'
|
WHERE schemaname = 'public'
|
||||||
""")
|
""")
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table_name = table['tablename']
|
table_name = table['tablename']
|
||||||
if table_name not in self.SPECIAL_TABLES:
|
if table_name not in self.SPECIAL_TABLES:
|
||||||
|
@ -1104,7 +1105,7 @@ class APIConfig(BaseModel):
|
||||||
|
|
||||||
async def add_primary_keys_to_remote_tables(self):
|
async def add_primary_keys_to_remote_tables(self):
|
||||||
online_hosts = await self.get_online_hosts()
|
online_hosts = await self.get_online_hosts()
|
||||||
|
|
||||||
for pool_entry in online_hosts:
|
for pool_entry in online_hosts:
|
||||||
conn = await self.get_connection(pool_entry)
|
conn = await self.get_connection(pool_entry)
|
||||||
if conn is None:
|
if conn is None:
|
||||||
|
@ -1114,10 +1115,10 @@ class APIConfig(BaseModel):
|
||||||
try:
|
try:
|
||||||
info(f"Adding primary keys to existing tables on {pool_entry['ts_id']}...")
|
info(f"Adding primary keys to existing tables on {pool_entry['ts_id']}...")
|
||||||
tables = await conn.fetch("""
|
tables = await conn.fetch("""
|
||||||
SELECT tablename FROM pg_tables
|
SELECT tablename FROM pg_tables
|
||||||
WHERE schemaname = 'public'
|
WHERE schemaname = 'public'
|
||||||
""")
|
""")
|
||||||
|
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table_name = table['tablename']
|
table_name = table['tablename']
|
||||||
if table_name not in self.SPECIAL_TABLES:
|
if table_name not in self.SPECIAL_TABLES:
|
||||||
|
@ -1140,7 +1141,7 @@ class APIConfig(BaseModel):
|
||||||
close_tasks = []
|
close_tasks = []
|
||||||
for pool_key, pool in self.db_pools.items():
|
for pool_key, pool in self.db_pools.items():
|
||||||
close_tasks.append(self.close_pool_with_timeout(pool, pool_key))
|
close_tasks.append(self.close_pool_with_timeout(pool, pool_key))
|
||||||
|
|
||||||
await asyncio.gather(*close_tasks)
|
await asyncio.gather(*close_tasks)
|
||||||
self.db_pools.clear()
|
self.db_pools.clear()
|
||||||
info("All database connection pools closed.")
|
info("All database connection pools closed.")
|
||||||
|
@ -1510,4 +1511,4 @@ class WidgetUpdate(BaseModel):
|
||||||
color: Optional[str] = None
|
color: Optional[str] = None
|
||||||
url: Optional[str] = None
|
url: Optional[str] = None
|
||||||
shortcut: Optional[str] = None
|
shortcut: Optional[str] = None
|
||||||
graph: Optional[str] = None
|
graph: Optional[str] = None
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
'''
|
'''
|
||||||
Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming emails that match certain criteria and save the Text-To-Speech converted summaries into a specified "podcast" folder.
|
Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming emails that match certain criteria and save the Text-To-Speech converted summaries into a specified "podcast" folder.
|
||||||
'''
|
'''
|
||||||
#routers/email.py
|
#routers/email.py
|
||||||
|
|
||||||
|
@ -55,9 +55,9 @@ def get_smtp_connection(autoresponder: AutoResponder):
|
||||||
context = ssl.create_default_context()
|
context = ssl.create_default_context()
|
||||||
context.check_hostname = False
|
context.check_hostname = False
|
||||||
context.verify_mode = ssl.CERT_NONE
|
context.verify_mode = ssl.CERT_NONE
|
||||||
|
|
||||||
smtp_config = autoresponder.smtp
|
smtp_config = autoresponder.smtp
|
||||||
|
|
||||||
if smtp_config.encryption == 'SSL':
|
if smtp_config.encryption == 'SSL':
|
||||||
try:
|
try:
|
||||||
debug(f"Attempting SSL connection to {smtp_config.host}:{smtp_config.port}")
|
debug(f"Attempting SSL connection to {smtp_config.host}:{smtp_config.port}")
|
||||||
|
@ -73,6 +73,7 @@ def get_smtp_connection(autoresponder: AutoResponder):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"STARTTLS connection failed: {str(e)}")
|
err(f"STARTTLS connection failed: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
elif smtp_config.encryption == 'STARTTLS':
|
elif smtp_config.encryption == 'STARTTLS':
|
||||||
try:
|
try:
|
||||||
debug(f"Attempting STARTTLS connection to {smtp_config.host}:{smtp_config.port}")
|
debug(f"Attempting STARTTLS connection to {smtp_config.host}:{smtp_config.port}")
|
||||||
|
@ -82,6 +83,7 @@ def get_smtp_connection(autoresponder: AutoResponder):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"STARTTLS connection failed: {str(e)}")
|
err(f"STARTTLS connection failed: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
debug(f"Attempting unencrypted connection to {smtp_config.host}:{smtp_config.port}")
|
debug(f"Attempting unencrypted connection to {smtp_config.host}:{smtp_config.port}")
|
||||||
|
@ -128,8 +130,6 @@ async def send_response(to_email: str, subject: str, body: str, profile: AutoRes
|
||||||
err(f"Error closing SMTP connection: {str(e)}")
|
err(f"Error closing SMTP connection: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def clean_email_content(html_content):
|
def clean_email_content(html_content):
|
||||||
soup = BeautifulSoup(html_content, "html.parser")
|
soup = BeautifulSoup(html_content, "html.parser")
|
||||||
return re.sub(r'[ \t\r\n]+', ' ', soup.get_text()).strip()
|
return re.sub(r'[ \t\r\n]+', ' ', soup.get_text()).strip()
|
||||||
|
@ -141,11 +141,11 @@ async def extract_attachments(attachments) -> List[str]:
|
||||||
attachment_name = attachment.get('filename', 'tempfile.txt')
|
attachment_name = attachment.get('filename', 'tempfile.txt')
|
||||||
_, ext = os.path.splitext(attachment_name)
|
_, ext = os.path.splitext(attachment_name)
|
||||||
ext = ext.lower() if ext else '.txt'
|
ext = ext.lower() if ext else '.txt'
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tmp_file:
|
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tmp_file:
|
||||||
tmp_file.write(attachment['content'].getvalue())
|
tmp_file.write(attachment['content'].getvalue())
|
||||||
tmp_file_path = tmp_file.name
|
tmp_file_path = tmp_file.name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
attachment_text = await extract_text(tmp_file_path)
|
attachment_text = await extract_text(tmp_file_path)
|
||||||
attachment_texts.append(attachment_text)
|
attachment_texts.append(attachment_text)
|
||||||
|
@ -155,6 +155,7 @@ async def extract_attachments(attachments) -> List[str]:
|
||||||
|
|
||||||
return attachment_texts
|
return attachment_texts
|
||||||
|
|
||||||
|
|
||||||
async def process_account_archival(account: EmailAccount):
|
async def process_account_archival(account: EmailAccount):
|
||||||
summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
|
summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
|
||||||
os.makedirs(summarized_log.parent, exist_ok = True)
|
os.makedirs(summarized_log.parent, exist_ok = True)
|
||||||
|
@ -177,7 +178,7 @@ async def process_account_archival(account: EmailAccount):
|
||||||
recipients=recipients,
|
recipients=recipients,
|
||||||
subject=message.subject,
|
subject=message.subject,
|
||||||
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
|
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
|
||||||
attachments=message.attachments
|
attachments=message.attachments
|
||||||
)
|
)
|
||||||
md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
|
md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
|
||||||
md_summary = await summarize_single_email(this_email, account.podcast) if account.summarize == True else None
|
md_summary = await summarize_single_email(this_email, account.podcast) if account.summarize == True else None
|
||||||
|
@ -192,9 +193,10 @@ async def process_account_archival(account: EmailAccount):
|
||||||
# debug(f"Skipping {uid_str} because it was already processed.")
|
# debug(f"Skipping {uid_str} because it was already processed.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"An error occurred during summarization for account {account.name}: {e}")
|
err(f"An error occurred during summarization for account {account.name}: {e}")
|
||||||
|
|
||||||
await asyncio.sleep(account.refresh)
|
await asyncio.sleep(account.refresh)
|
||||||
|
|
||||||
|
|
||||||
async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False):
|
async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False):
|
||||||
tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
|
tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
|
||||||
summary = ""
|
summary = ""
|
||||||
|
@ -212,6 +214,7 @@ async def summarize_single_email(this_email: IncomingEmail, podcast: bool = Fals
|
||||||
|
|
||||||
return md_summary
|
return md_summary
|
||||||
|
|
||||||
|
|
||||||
async def archive_single_email(this_email: IncomingEmail, summary: str = None):
|
async def archive_single_email(this_email: IncomingEmail, summary: str = None):
|
||||||
try:
|
try:
|
||||||
markdown_content = f'''---
|
markdown_content = f'''---
|
||||||
|
@ -219,14 +222,14 @@ date: {this_email.datetime_received.strftime('%Y-%m-%d')}
|
||||||
tags:
|
tags:
|
||||||
- email
|
- email
|
||||||
---
|
---
|
||||||
| | | |
|
| | | |
|
||||||
| --: | :--: | :--: |
|
| --: | :--: | :--: |
|
||||||
| *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}** | |
|
| *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}** | |
|
||||||
| *from* | **[[{this_email.sender}]]** | |
|
| *from* | **[[{this_email.sender}]]** | |
|
||||||
| *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])} | |
|
| *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])} | |
|
||||||
| *subject* | **{this_email.subject}** | |
|
| *subject* | **{this_email.subject}** | |
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if summary:
|
if summary:
|
||||||
markdown_content += summary
|
markdown_content += summary
|
||||||
|
|
||||||
|
@ -235,11 +238,12 @@ tags:
|
||||||
{this_email.body}
|
{this_email.body}
|
||||||
'''
|
'''
|
||||||
return markdown_content
|
return markdown_content
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Exception: {e}")
|
err(f"Exception: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
async def save_email(md_path, md_content):
|
async def save_email(md_path, md_content):
|
||||||
try:
|
try:
|
||||||
with open(md_path, 'w', encoding='utf-8') as md_file:
|
with open(md_path, 'w', encoding='utf-8') as md_file:
|
||||||
|
@ -251,6 +255,7 @@ async def save_email(md_path, md_content):
|
||||||
err(f"Failed to save email: {e}")
|
err(f"Failed to save email: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
|
def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
|
||||||
debug(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
|
debug(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
|
||||||
def matches_list(item: str, this_email: IncomingEmail) -> bool:
|
def matches_list(item: str, this_email: IncomingEmail) -> bool:
|
||||||
|
@ -294,13 +299,14 @@ async def process_account_autoresponding(account: EmailAccount):
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"An error occurred during auto-responding for account {account.name}: {e}")
|
err(f"An error occurred during auto-responding for account {account.name}: {e}")
|
||||||
|
|
||||||
await asyncio.sleep(account.refresh)
|
await asyncio.sleep(account.refresh)
|
||||||
|
|
||||||
|
|
||||||
async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path):
|
async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path):
|
||||||
this_email = await create_incoming_email(message)
|
this_email = await create_incoming_email(message)
|
||||||
debug(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
|
debug(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
|
||||||
|
|
||||||
matching_profiles = get_matching_autoresponders(this_email, account)
|
matching_profiles = get_matching_autoresponders(this_email, account)
|
||||||
debug(f"Matching profiles: {matching_profiles}")
|
debug(f"Matching profiles: {matching_profiles}")
|
||||||
|
|
||||||
|
@ -319,6 +325,7 @@ async def autorespond_single_email(message, uid_str: str, account: EmailAccount,
|
||||||
else:
|
else:
|
||||||
warn(f"Unable to generate auto-response for {this_email.subject}")
|
warn(f"Unable to generate auto-response for {this_email.subject}")
|
||||||
|
|
||||||
|
|
||||||
async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]:
|
async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]:
|
||||||
info(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
|
info(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
|
||||||
|
|
||||||
|
@ -335,16 +342,16 @@ Body: {this_email.body}
|
||||||
Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}. Keep the response {profile.style} and to the point, but responsive to the sender's inquiry. Do not mention or recite this context information in your response.
|
Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}. Keep the response {profile.style} and to the point, but responsive to the sender's inquiry. Do not mention or recite this context information in your response.
|
||||||
'''
|
'''
|
||||||
sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
|
sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
|
response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
|
||||||
debug(f"query_ollama response: {response}")
|
debug(f"query_ollama response: {response}")
|
||||||
|
|
||||||
if isinstance(response, dict) and "message" in response and "content" in response["message"]:
|
if isinstance(response, dict) and "message" in response and "content" in response["message"]:
|
||||||
response = response["message"]["content"]
|
response = response["message"]["content"]
|
||||||
|
|
||||||
return response + "\n\n"
|
return response + "\n\n"
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error generating auto-response: {str(e)}")
|
err(f"Error generating auto-response: {str(e)}")
|
||||||
return None
|
return None
|
||||||
|
@ -363,22 +370,26 @@ async def create_incoming_email(message) -> IncomingEmail:
|
||||||
attachments=message.attachments
|
attachments=message.attachments
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def load_processed_uids(filename: Path) -> Set[str]:
|
async def load_processed_uids(filename: Path) -> Set[str]:
|
||||||
if filename.exists():
|
if filename.exists():
|
||||||
async with aiofiles.open(filename, 'r') as f:
|
async with aiofiles.open(filename, 'r') as f:
|
||||||
return set(line.strip().split(':')[-1] for line in await f.readlines())
|
return set(line.strip().split(':')[-1] for line in await f.readlines())
|
||||||
return set()
|
return set()
|
||||||
|
|
||||||
|
|
||||||
async def save_processed_uid(filename: Path, account_name: str, uid: str):
|
async def save_processed_uid(filename: Path, account_name: str, uid: str):
|
||||||
async with aiofiles.open(filename, 'a') as f:
|
async with aiofiles.open(filename, 'a') as f:
|
||||||
await f.write(f"{account_name}:{uid}\n")
|
await f.write(f"{account_name}:{uid}\n")
|
||||||
|
|
||||||
|
|
||||||
async def process_all_accounts():
|
async def process_all_accounts():
|
||||||
email_accounts = load_email_accounts(EMAIL_CONFIG)
|
email_accounts = load_email_accounts(EMAIL_CONFIG)
|
||||||
summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
|
summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
|
||||||
autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
|
autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
|
||||||
await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
|
await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
|
||||||
|
|
||||||
|
|
||||||
@email.on_event("startup")
|
@email.on_event("startup")
|
||||||
async def startup_event():
|
async def startup_event():
|
||||||
await asyncio.sleep(5)
|
await asyncio.sleep(5)
|
||||||
|
|
|
@ -22,7 +22,6 @@ from sijapi.routers import asr, cal, gis, img, llm, serve, timing, tts, weather
|
||||||
from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
|
from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
|
||||||
from sijapi.classes import Location
|
from sijapi.classes import Location
|
||||||
|
|
||||||
|
|
||||||
note = APIRouter()
|
note = APIRouter()
|
||||||
logger = L.get_module_logger("note")
|
logger = L.get_module_logger("note")
|
||||||
def debug(text: str): logger.debug(text)
|
def debug(text: str): logger.debug(text)
|
||||||
|
@ -43,7 +42,6 @@ async def note_add_endpoint(file: Optional[UploadFile] = File(None), text: Optio
|
||||||
return JSONResponse({"message": "Note added successfully", "entry": result}, status_code=201)
|
return JSONResponse({"message": "Note added successfully", "entry": result}, status_code=201)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None):
|
async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None):
|
||||||
now = dt_datetime.now()
|
now = dt_datetime.now()
|
||||||
transcription_entry = ""
|
transcription_entry = ""
|
||||||
|
@ -52,24 +50,24 @@ async def process_for_daily_note(file: Optional[UploadFile] = File(None), text:
|
||||||
debug("File received...")
|
debug("File received...")
|
||||||
file_content = await file.read()
|
file_content = await file.read()
|
||||||
audio_io = BytesIO(file_content)
|
audio_io = BytesIO(file_content)
|
||||||
|
|
||||||
# Improve error handling for file type guessing
|
# Improve error handling for file type guessing
|
||||||
guessed_type = mimetypes.guess_type(file.filename)
|
guessed_type = mimetypes.guess_type(file.filename)
|
||||||
file_type = guessed_type[0] if guessed_type[0] else "application/octet-stream"
|
file_type = guessed_type[0] if guessed_type[0] else "application/octet-stream"
|
||||||
|
|
||||||
debug(f"Processing as {file_type}...")
|
debug(f"Processing as {file_type}...")
|
||||||
|
|
||||||
# Extract the main type (e.g., 'audio', 'image', 'video')
|
# Extract the main type (e.g., 'audio', 'image', 'video')
|
||||||
main_type = file_type.split('/')[0]
|
main_type = file_type.split('/')[0]
|
||||||
subdir = main_type.title() if main_type else "Documents"
|
subdir = main_type.title() if main_type else "Documents"
|
||||||
|
|
||||||
absolute_path, relative_path = assemble_journal_path(now, subdir=subdir, filename=file.filename)
|
absolute_path, relative_path = assemble_journal_path(now, subdir=subdir, filename=file.filename)
|
||||||
debug(f"Destination path: {absolute_path}")
|
debug(f"Destination path: {absolute_path}")
|
||||||
|
|
||||||
with open(absolute_path, 'wb') as f:
|
with open(absolute_path, 'wb') as f:
|
||||||
f.write(file_content)
|
f.write(file_content)
|
||||||
debug(f"Processing {f.name}...")
|
debug(f"Processing {f.name}...")
|
||||||
|
|
||||||
if main_type == 'audio':
|
if main_type == 'audio':
|
||||||
transcription = await asr.transcribe_audio(file_path=absolute_path, params=asr.TranscribeParams(model="small-en", language="en", threads=6))
|
transcription = await asr.transcribe_audio(file_path=absolute_path, params=asr.TranscribeParams(model="small-en", language="en", threads=6))
|
||||||
file_entry = f"![[{relative_path}]]"
|
file_entry = f"![[{relative_path}]]"
|
||||||
|
@ -77,24 +75,22 @@ async def process_for_daily_note(file: Optional[UploadFile] = File(None), text:
|
||||||
file_entry = f"![[{relative_path}]]"
|
file_entry = f"![[{relative_path}]]"
|
||||||
else:
|
else:
|
||||||
file_entry = f"[Source]({relative_path})"
|
file_entry = f"[Source]({relative_path})"
|
||||||
|
|
||||||
text_entry = text if text else ""
|
text_entry = text if text else ""
|
||||||
debug(f"transcription: {transcription_entry}\nfile_entry: {file_entry}\ntext_entry: {text_entry}")
|
debug(f"transcription: {transcription_entry}\nfile_entry: {file_entry}\ntext_entry: {text_entry}")
|
||||||
return await add_to_daily_note(transcription_entry, file_entry, text_entry, now)
|
return await add_to_daily_note(transcription_entry, file_entry, text_entry, now)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: dt_datetime = None):
|
async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: dt_datetime = None):
|
||||||
date_time = date_time or dt_datetime.now()
|
date_time = date_time or dt_datetime.now()
|
||||||
note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True)
|
note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True)
|
||||||
time_str = date_time.strftime("%H:%M")
|
time_str = date_time.strftime("%H:%M")
|
||||||
|
|
||||||
entry_lines = []
|
entry_lines = []
|
||||||
if additional_text and additional_text.strip():
|
if additional_text and additional_text.strip():
|
||||||
entry_lines.append(f"\t* {additional_text.strip()}")
|
entry_lines.append(f"\t* {additional_text.strip()}")
|
||||||
if transcription and transcription.strip():
|
if transcription and transcription.strip():
|
||||||
entry_lines.append(f"\t* {transcription.strip()}")
|
entry_lines.append(f"\t* {transcription.strip()}")
|
||||||
if file_link and file_link.strip():
|
if file_link and file_link.strip():
|
||||||
entry_lines.append(f"\t\t {file_link.strip()}")
|
entry_lines.append(f"\t\t {file_link.strip()}")
|
||||||
|
|
||||||
|
@ -104,7 +100,7 @@ async def add_to_daily_note(transcription: str = None, file_link: str = None, ad
|
||||||
if note_path.exists():
|
if note_path.exists():
|
||||||
with open(note_path, 'a', encoding='utf-8') as note_file:
|
with open(note_path, 'a', encoding='utf-8') as note_file:
|
||||||
note_file.write(entry)
|
note_file.write(entry)
|
||||||
else:
|
else:
|
||||||
date_str = date_time.strftime("%Y-%m-%d")
|
date_str = date_time.strftime("%Y-%m-%d")
|
||||||
frontmatter = f"""---
|
frontmatter = f"""---
|
||||||
date: {date_str}
|
date: {date_str}
|
||||||
|
@ -121,7 +117,6 @@ tags:
|
||||||
return {"timestamp": time_str, "content": entry.strip()}
|
return {"timestamp": time_str, "content": entry.strip()}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def process_document(
|
async def process_document(
|
||||||
bg_tasks: BackgroundTasks,
|
bg_tasks: BackgroundTasks,
|
||||||
document: File,
|
document: File,
|
||||||
|
@ -195,6 +190,7 @@ added: {timestamp}
|
||||||
err(f"Failed to clip: {str(e)}")
|
err(f"Failed to clip: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
def list_and_correct_impermissible_files(root_dir, rename: bool = False):
|
def list_and_correct_impermissible_files(root_dir, rename: bool = False):
|
||||||
"""List and correct all files with impermissible names."""
|
"""List and correct all files with impermissible names."""
|
||||||
impermissible_files = []
|
impermissible_files = []
|
||||||
|
@ -204,11 +200,11 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
|
||||||
file_path = Path(dirpath) / filename
|
file_path = Path(dirpath) / filename
|
||||||
impermissible_files.append(file_path)
|
impermissible_files.append(file_path)
|
||||||
debug(f"Impermissible file found: {file_path}")
|
debug(f"Impermissible file found: {file_path}")
|
||||||
|
|
||||||
# Sanitize the file name
|
# Sanitize the file name
|
||||||
new_filename = sanitize_filename(filename)
|
new_filename = sanitize_filename(filename)
|
||||||
new_file_path = Path(dirpath) / new_filename
|
new_file_path = Path(dirpath) / new_filename
|
||||||
|
|
||||||
# Ensure the new file name does not already exist
|
# Ensure the new file name does not already exist
|
||||||
if new_file_path.exists():
|
if new_file_path.exists():
|
||||||
counter = 1
|
counter = 1
|
||||||
|
@ -217,24 +213,23 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
|
||||||
new_filename = f"{base_name}_{counter}{ext}"
|
new_filename = f"{base_name}_{counter}{ext}"
|
||||||
new_file_path = Path(dirpath) / new_filename
|
new_file_path = Path(dirpath) / new_filename
|
||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
# Rename the file
|
# Rename the file
|
||||||
if rename:
|
if rename:
|
||||||
os.rename(file_path, new_file_path)
|
os.rename(file_path, new_file_path)
|
||||||
debug(f"Renamed: {file_path} -> {new_file_path}")
|
debug(f"Renamed: {file_path} -> {new_file_path}")
|
||||||
|
|
||||||
return impermissible_files
|
return impermissible_files
|
||||||
|
|
||||||
journal = OBSIDIAN_VAULT_DIR / "journal"
|
journal = OBSIDIAN_VAULT_DIR / "journal"
|
||||||
list_and_correct_impermissible_files(journal, rename=True)
|
list_and_correct_impermissible_files(journal, rename=True)
|
||||||
|
|
||||||
### Daily Note Builder ###
|
|
||||||
|
|
||||||
@note.get("/note/bulk/{dt_start}/{dt_end}")
|
@note.get("/note/bulk/{dt_start}/{dt_end}")
|
||||||
async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
|
async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
|
||||||
start_date = dt_datetime.strptime(dt_start, "%Y-%m-%d")
|
start_date = dt_datetime.strptime(dt_start, "%Y-%m-%d")
|
||||||
end_date = dt_datetime.strptime(dt_end, "%Y-%m-%d")
|
end_date = dt_datetime.strptime(dt_end, "%Y-%m-%d")
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
current_date = start_date
|
current_date = start_date
|
||||||
while current_date <= end_date:
|
while current_date <= end_date:
|
||||||
|
@ -242,9 +237,8 @@ async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
|
||||||
result = await build_daily_note(formatted_date)
|
result = await build_daily_note(formatted_date)
|
||||||
results.append(result)
|
results.append(result)
|
||||||
current_date += timedelta(days=1)
|
current_date += timedelta(days=1)
|
||||||
|
|
||||||
return {"urls": results}
|
|
||||||
|
|
||||||
|
return {"urls": results}
|
||||||
|
|
||||||
|
|
||||||
@note.get("/note/create")
|
@note.get("/note/create")
|
||||||
|
@ -261,7 +255,7 @@ async def build_daily_note_getpoint():
|
||||||
date_time = dt_datetime.now(tz)
|
date_time = dt_datetime.now(tz)
|
||||||
path = await build_daily_note(date_time, loc.latitude, loc.longitude)
|
path = await build_daily_note(date_time, loc.latitude, loc.longitude)
|
||||||
path_str = str(path)
|
path_str = str(path)
|
||||||
|
|
||||||
info(f"Successfully created daily note at {path_str}")
|
info(f"Successfully created daily note at {path_str}")
|
||||||
return JSONResponse(content={"path": path_str}, status_code=200)
|
return JSONResponse(content={"path": path_str}, status_code=200)
|
||||||
|
|
||||||
|
@ -277,7 +271,6 @@ async def build_daily_note_getpoint():
|
||||||
raise HTTPException(status_code=500, detail="An unexpected error occurred")
|
raise HTTPException(status_code=500, detail="An unexpected error occurred")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@note.post("/note/create")
|
@note.post("/note/create")
|
||||||
async def build_daily_note_endpoint(
|
async def build_daily_note_endpoint(
|
||||||
date_str: Optional[str] = Form(dt_datetime.now().strftime("%Y-%m-%d")),
|
date_str: Optional[str] = Form(dt_datetime.now().strftime("%Y-%m-%d")),
|
||||||
|
@ -305,7 +298,7 @@ async def build_daily_note_endpoint(
|
||||||
path = await build_daily_note(date_time, lat, lon)
|
path = await build_daily_note(date_time, lat, lon)
|
||||||
|
|
||||||
path_str = str(path) # Convert PosixPath to string
|
path_str = str(path) # Convert PosixPath to string
|
||||||
|
|
||||||
return JSONResponse(content={"path": path_str}, status_code=200)
|
return JSONResponse(content={"path": path_str}, status_code=200)
|
||||||
|
|
||||||
|
|
||||||
|
@ -319,13 +312,13 @@ Obsidian helper. Takes a datetime and creates a new daily note. Note: it uses th
|
||||||
day_before = (date_time - timedelta(days=1)).strftime("%Y-%m-%d %A") # 2024-05-26 Sunday formatting
|
day_before = (date_time - timedelta(days=1)).strftime("%Y-%m-%d %A") # 2024-05-26 Sunday formatting
|
||||||
day_after = (date_time + timedelta(days=1)).strftime("%Y-%m-%d %A") # 2024-05-28 Tuesday formatting
|
day_after = (date_time + timedelta(days=1)).strftime("%Y-%m-%d %A") # 2024-05-28 Tuesday formatting
|
||||||
header = f"# [[{day_before}|← ]] {formatted_day} [[{day_after}| →]]\n\n"
|
header = f"# [[{day_before}|← ]] {formatted_day} [[{day_after}| →]]\n\n"
|
||||||
|
|
||||||
if not lat or not lon:
|
if not lat or not lon:
|
||||||
places = await gis.fetch_locations(date_time)
|
places = await gis.fetch_locations(date_time)
|
||||||
lat, lon = places[0].latitude, places[0].longitude
|
lat, lon = places[0].latitude, places[0].longitude
|
||||||
|
|
||||||
location = await GEO.code((lat, lon))
|
location = await GEO.code((lat, lon))
|
||||||
|
|
||||||
timeslips = await build_daily_timeslips(date_time)
|
timeslips = await build_daily_timeslips(date_time)
|
||||||
|
|
||||||
fm_day = date_time.strftime("%Y-%m-%d")
|
fm_day = date_time.strftime("%Y-%m-%d")
|
||||||
|
@ -349,7 +342,7 @@ Obsidian helper. Takes a datetime and creates a new daily note. Note: it uses th
|
||||||
map_embed = f"![[{map_path}]]"
|
map_embed = f"![[{map_path}]]"
|
||||||
|
|
||||||
_, banner_path = assemble_journal_path(date_time, filename="Banner", extension=".jpg", no_timestamp = True)
|
_, banner_path = assemble_journal_path(date_time, filename="Banner", extension=".jpg", no_timestamp = True)
|
||||||
|
|
||||||
body = f"""---
|
body = f"""---
|
||||||
date: "{fm_day}"
|
date: "{fm_day}"
|
||||||
banner: "![[{banner_path}]]"
|
banner: "![[{banner_path}]]"
|
||||||
|
@ -357,14 +350,14 @@ tags:
|
||||||
- daily-note
|
- daily-note
|
||||||
created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
|
created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
|
||||||
---
|
---
|
||||||
|
|
||||||
{header}
|
{header}
|
||||||
{weather_embed}
|
{weather_embed}
|
||||||
{map_path}
|
{map_path}
|
||||||
|
|
||||||
## Events
|
## Events
|
||||||
{event_embed}
|
{event_embed}
|
||||||
|
|
||||||
## Tasks
|
## Tasks
|
||||||
{task_embed}
|
{task_embed}
|
||||||
|
|
||||||
|
@ -381,19 +374,15 @@ created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
|
||||||
banner = await generate_banner(formatted_day, location, weather_note)
|
banner = await generate_banner(formatted_day, location, weather_note)
|
||||||
|
|
||||||
return absolute_path
|
return absolute_path
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def build_daily_timeslips(date):
|
async def build_daily_timeslips(date):
|
||||||
'''
|
|
||||||
|
|
||||||
'''
|
|
||||||
absolute_path, relative_path = assemble_journal_path(date, filename = "Timeslips", extension=".md", no_timestamp = True)
|
absolute_path, relative_path = assemble_journal_path(date, filename = "Timeslips", extension=".md", no_timestamp = True)
|
||||||
content = await timing.process_timing_markdown(date, date)
|
content = await timing.process_timing_markdown(date, date)
|
||||||
# document_content = await document.read()
|
# document_content = await document.read()
|
||||||
with open(absolute_path, 'wb') as f:
|
with open(absolute_path, 'wb') as f:
|
||||||
f.write(content.encode())
|
f.write(content.encode())
|
||||||
|
|
||||||
return f"![[{relative_path}]]"
|
return f"![[{relative_path}]]"
|
||||||
|
|
||||||
|
|
||||||
|
@ -402,21 +391,17 @@ async def update_frontmatter_endpoint(date: str, key: str, value: str):
|
||||||
date_time = dt_datetime.strptime(date, "%Y-%m-%d")
|
date_time = dt_datetime.strptime(date, "%Y-%m-%d")
|
||||||
result = await update_frontmatter(date_time, key, value)
|
result = await update_frontmatter(date_time, key, value)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
|
|
||||||
# Parse the date and format paths
|
|
||||||
file_path, relative_path = assemble_journal_path(date_time)
|
|
||||||
|
|
||||||
# Check if the file exists
|
|
||||||
|
async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
|
||||||
|
file_path, relative_path = assemble_journal_path(date_time)
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
crit(f"Markdown file not found at {file_path}")
|
crit(f"Markdown file not found at {file_path}")
|
||||||
raise HTTPException(status_code=404, detail="Markdown file not found.")
|
raise HTTPException(status_code=404, detail="Markdown file not found.")
|
||||||
|
|
||||||
# Read the file
|
|
||||||
with open(file_path, "r", encoding="utf-8") as file:
|
with open(file_path, "r", encoding="utf-8") as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
|
|
||||||
# Extract the frontmatter
|
|
||||||
try:
|
try:
|
||||||
start_index = lines.index("---\n") + 1
|
start_index = lines.index("---\n") + 1
|
||||||
end_index = lines[start_index:].index("---\n") + start_index
|
end_index = lines[start_index:].index("---\n") + start_index
|
||||||
|
@ -424,33 +409,22 @@ async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise HTTPException(status_code=500, detail="Frontmatter not found.")
|
raise HTTPException(status_code=500, detail="Frontmatter not found.")
|
||||||
|
|
||||||
# Remove the existing key if present
|
|
||||||
pattern = re.compile(f"^{key}:.*", re.IGNORECASE)
|
pattern = re.compile(f"^{key}:.*", re.IGNORECASE)
|
||||||
frontmatter = [line for line in frontmatter if not pattern.match(line)]
|
frontmatter = [line for line in frontmatter if not pattern.match(line)]
|
||||||
|
|
||||||
# Process value as a CSV string into a list
|
|
||||||
values = value.split(',')
|
values = value.split(',')
|
||||||
|
|
||||||
# Determine insertion format
|
|
||||||
if len(values) == 1:
|
if len(values) == 1:
|
||||||
# Single value, add as a simple key-value
|
|
||||||
new_entry = f"{key}: {values[0]}\n"
|
new_entry = f"{key}: {values[0]}\n"
|
||||||
else:
|
else:
|
||||||
# Multiple values, format as a list under the key
|
|
||||||
new_entry = f"{key}:\n" + "\n".join([f" - {val}" for val in values]) + "\n"
|
new_entry = f"{key}:\n" + "\n".join([f" - {val}" for val in values]) + "\n"
|
||||||
|
|
||||||
# Insert the new key-value(s)
|
|
||||||
frontmatter.append(new_entry)
|
frontmatter.append(new_entry)
|
||||||
|
|
||||||
# Reassemble the file
|
|
||||||
content = lines[:start_index] + frontmatter + ["---\n"] + lines[end_index + 1:]
|
content = lines[:start_index] + frontmatter + ["---\n"] + lines[end_index + 1:]
|
||||||
|
|
||||||
# Write changes back to the file
|
|
||||||
with open(file_path, "w", encoding="utf-8") as file:
|
with open(file_path, "w", encoding="utf-8") as file:
|
||||||
file.writelines(content)
|
file.writelines(content)
|
||||||
|
|
||||||
return {"message": "Frontmatter updated successfully."}
|
return {"message": "Frontmatter updated successfully."}
|
||||||
|
|
||||||
|
|
||||||
@note.post("/note/banner")
|
@note.post("/note/banner")
|
||||||
async def banner_endpoint(dt: str, location: str = None, forecast: str = None, mood: str = None, other_context: str = None):
|
async def banner_endpoint(dt: str, location: str = None, forecast: str = None, mood: str = None, other_context: str = None):
|
||||||
'''
|
'''
|
||||||
|
@ -483,6 +457,7 @@ async def generate_banner(dt, location: Location = None, forecast: str = None, m
|
||||||
await update_frontmatter(date_time, "banner", jpg_embed)
|
await update_frontmatter(date_time, "banner", jpg_embed)
|
||||||
return local_path
|
return local_path
|
||||||
|
|
||||||
|
|
||||||
async def generate_context(date_time, location: Location, forecast: str, mood: str, other_context: str):
|
async def generate_context(date_time, location: Location, forecast: str, mood: str, other_context: str):
|
||||||
display_name = "Location: "
|
display_name = "Location: "
|
||||||
if location and isinstance(location, Location):
|
if location and isinstance(location, Location):
|
||||||
|
@ -534,7 +509,7 @@ async def generate_context(date_time, location: Location, forecast: str, mood: s
|
||||||
additional_info = ', '.join(formatted_events) if formatted_events else ''
|
additional_info = ', '.join(formatted_events) if formatted_events else ''
|
||||||
|
|
||||||
other_context = f"{other_context}, {additional_info}" if other_context else additional_info
|
other_context = f"{other_context}, {additional_info}" if other_context else additional_info
|
||||||
other_context = f"Additional information: {other_context}" if other_context else ""
|
other_context = f"Additional information: {other_context}" if other_context else ""
|
||||||
|
|
||||||
prompt = "Generate an aesthetically appealing banner image for a daily note that helps to visualize the following scene information: "
|
prompt = "Generate an aesthetically appealing banner image for a daily note that helps to visualize the following scene information: "
|
||||||
prompt += "\n".join([display_name, forecast, mood, other_context])
|
prompt += "\n".join([display_name, forecast, mood, other_context])
|
||||||
|
@ -542,7 +517,6 @@ async def generate_context(date_time, location: Location, forecast: str, mood: s
|
||||||
return prompt
|
return prompt
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def get_note(date_time: dt_datetime):
|
async def get_note(date_time: dt_datetime):
|
||||||
date_time = await gis.dt(date_time);
|
date_time = await gis.dt(date_time);
|
||||||
absolute_path, local_path = assemble_journal_path(date_time, filename = "Notes", extension = ".md", no_timestamp = True)
|
absolute_path, local_path = assemble_journal_path(date_time, filename = "Notes", extension = ".md", no_timestamp = True)
|
||||||
|
@ -552,6 +526,7 @@ async def get_note(date_time: dt_datetime):
|
||||||
content = file.read()
|
content = file.read()
|
||||||
return content if content else None
|
return content if content else None
|
||||||
|
|
||||||
|
|
||||||
async def sentiment_analysis(date_time: dt_datetime):
|
async def sentiment_analysis(date_time: dt_datetime):
|
||||||
most_recent_note = await get_note(date_time)
|
most_recent_note = await get_note(date_time)
|
||||||
most_recent_note = most_recent_note or await get_note(date_time - timedelta(days=1))
|
most_recent_note = most_recent_note or await get_note(date_time - timedelta(days=1))
|
||||||
|
@ -578,14 +553,14 @@ async def note_weather_get(
|
||||||
debug(f"date: {date} .. date_time: {date_time}")
|
debug(f"date: {date} .. date_time: {date_time}")
|
||||||
content = await update_dn_weather(date_time, force_refresh_weather) #, lat, lon)
|
content = await update_dn_weather(date_time, force_refresh_weather) #, lat, lon)
|
||||||
return JSONResponse(content={"forecast": content}, status_code=200)
|
return JSONResponse(content={"forecast": content}, status_code=200)
|
||||||
|
|
||||||
except HTTPException as e:
|
except HTTPException as e:
|
||||||
return JSONResponse(content={"detail": str(e.detail)}, status_code=e.status_code)
|
return JSONResponse(content={"detail": str(e.detail)}, status_code=e.status_code)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error in note_weather_get: {str(e)}")
|
err(f"Error in note_weather_get: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
@note.post("/update/note/{date}")
|
@note.post("/update/note/{date}")
|
||||||
async def post_update_daily_weather_and_calendar_and_timeslips(date: str, refresh: str="False") -> PlainTextResponse:
|
async def post_update_daily_weather_and_calendar_and_timeslips(date: str, refresh: str="False") -> PlainTextResponse:
|
||||||
|
@ -597,6 +572,7 @@ async def post_update_daily_weather_and_calendar_and_timeslips(date: str, refres
|
||||||
await build_daily_timeslips(date_time)
|
await build_daily_timeslips(date_time)
|
||||||
return f"[Refresh]({API.URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
|
return f"[Refresh]({API.URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
|
||||||
|
|
||||||
|
|
||||||
async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False, lat: float = None, lon: float = None):
|
async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False, lat: float = None, lon: float = None):
|
||||||
warn(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
|
warn(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
|
||||||
try:
|
try:
|
||||||
|
@ -609,7 +585,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
place = places[0]
|
place = places[0]
|
||||||
lat = place.latitude
|
lat = place.latitude
|
||||||
lon = place.longitude
|
lon = place.longitude
|
||||||
|
|
||||||
debug(f"lat: {lat}, lon: {lon}, place: {place}")
|
debug(f"lat: {lat}, lon: {lon}, place: {place}")
|
||||||
city = GEO.find_override_location(lat, lon)
|
city = GEO.find_override_location(lat, lon)
|
||||||
if city:
|
if city:
|
||||||
|
@ -626,7 +602,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
city = location.name
|
city = location.name
|
||||||
city = city if city else location.city
|
city = city if city else location.city
|
||||||
city = city if city else location.house_number + ' ' + location.road
|
city = city if city else location.house_number + ' ' + location.road
|
||||||
|
|
||||||
debug(f"City geocoded: {city}")
|
debug(f"City geocoded: {city}")
|
||||||
|
|
||||||
# Assemble journal path
|
# Assemble journal path
|
||||||
|
@ -644,9 +620,9 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
# debug(f"Day: {DailyWeather}")
|
# debug(f"Day: {DailyWeather}")
|
||||||
icon = DailyWeather.get('icon')
|
icon = DailyWeather.get('icon')
|
||||||
debug(f"Icon: {icon}")
|
debug(f"Icon: {icon}")
|
||||||
|
|
||||||
weather_icon, admonition = get_icon_and_admonition(icon) if icon else (":LiSunMoon:", "ad-weather")
|
weather_icon, admonition = get_icon_and_admonition(icon) if icon else (":LiSunMoon:", "ad-weather")
|
||||||
|
|
||||||
temp = DailyWeather.get('feelslike')
|
temp = DailyWeather.get('feelslike')
|
||||||
|
|
||||||
if DailyWeather.get('tempmax', 0) > 85:
|
if DailyWeather.get('tempmax', 0) > 85:
|
||||||
|
@ -669,7 +645,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
sunset = DailyWeather.get('sunset')
|
sunset = DailyWeather.get('sunset')
|
||||||
srise_str = sunrise.time().strftime("%H:%M")
|
srise_str = sunrise.time().strftime("%H:%M")
|
||||||
sset_str = sunset.time().strftime("%H:%M")
|
sset_str = sunset.time().strftime("%H:%M")
|
||||||
|
|
||||||
|
|
||||||
date_str = date_time.strftime("%Y-%m-%d")
|
date_str = date_time.strftime("%Y-%m-%d")
|
||||||
now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
@ -701,17 +677,17 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
for hour in HourlyWeather:
|
for hour in HourlyWeather:
|
||||||
if hour.get('datetime').strftime("%H:%M:%S") in HOURLY_COLUMNS_MAPPING.values():
|
if hour.get('datetime').strftime("%H:%M:%S") in HOURLY_COLUMNS_MAPPING.values():
|
||||||
|
|
||||||
times.append(format_hourly_time(hour))
|
times.append(format_hourly_time(hour))
|
||||||
|
|
||||||
condition_symbols.append(format_hourly_icon(hour, sunrise, sunset))
|
condition_symbols.append(format_hourly_icon(hour, sunrise, sunset))
|
||||||
|
|
||||||
temps.append(format_hourly_temperature(hour))
|
temps.append(format_hourly_temperature(hour))
|
||||||
|
|
||||||
winds.append(format_hourly_wind(hour))
|
winds.append(format_hourly_wind(hour))
|
||||||
|
|
||||||
detailed_forecast += assemble_hourly_data_table(times, condition_symbols, temps, winds)
|
detailed_forecast += assemble_hourly_data_table(times, condition_symbols, temps, winds)
|
||||||
detailed_forecast += f"```\n\n"
|
detailed_forecast += f"```\n\n"
|
||||||
|
|
||||||
debug(f"Detailed forecast: {detailed_forecast}.")
|
debug(f"Detailed forecast: {detailed_forecast}.")
|
||||||
|
|
||||||
with open(absolute_path, 'w', encoding='utf-8') as note_file:
|
with open(absolute_path, 'w', encoding='utf-8') as note_file:
|
||||||
|
@ -725,12 +701,12 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
else:
|
else:
|
||||||
err(f"Failed to get day")
|
err(f"Failed to get day")
|
||||||
raise HTTPException(status_code=500, detail="Failed to retrieve weather data")
|
raise HTTPException(status_code=500, detail="Failed to retrieve weather data")
|
||||||
|
|
||||||
except HTTPException as e:
|
except HTTPException as e:
|
||||||
err(f"HTTP error: {e}")
|
err(f"HTTP error: {e}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error: {e}")
|
err(f"Error: {e}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
|
@ -740,12 +716,13 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
|
||||||
err(f"Value error in update_dn_weather: {str(ve)}")
|
err(f"Value error in update_dn_weather: {str(ve)}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
raise HTTPException(status_code=400, detail=f"Value error: {str(ve)}")
|
raise HTTPException(status_code=400, detail=f"Value error: {str(ve)}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error in update_dn_weather: {str(e)}")
|
err(f"Error in update_dn_weather: {str(e)}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
raise HTTPException(status_code=500, detail=f"Error in update_dn_weather: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"Error in update_dn_weather: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def format_hourly_time(hour):
|
def format_hourly_time(hour):
|
||||||
try:
|
try:
|
||||||
hour_12 = convert_to_12_hour_format(hour.get("datetime"))
|
hour_12 = convert_to_12_hour_format(hour.get("datetime"))
|
||||||
|
@ -754,21 +731,22 @@ def format_hourly_time(hour):
|
||||||
err(f"Error in format_hourly_time: {str(e)}")
|
err(f"Error in format_hourly_time: {str(e)}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def format_hourly_icon(hour, sunrise, sunset):
|
def format_hourly_icon(hour, sunrise, sunset):
|
||||||
try:
|
try:
|
||||||
icon_str = hour.get('icon', '')
|
icon_str = hour.get('icon', '')
|
||||||
icon, _ = get_icon_and_admonition(icon_str)
|
icon, _ = get_icon_and_admonition(icon_str)
|
||||||
|
|
||||||
precip = hour.get('precip', float(0.0))
|
precip = hour.get('precip', float(0.0))
|
||||||
precip_prob = hour.get('precipprob', float(0.0))
|
precip_prob = hour.get('precipprob', float(0.0))
|
||||||
debug(f"precip: {precip}, prob: {precip_prob}")
|
debug(f"precip: {precip}, prob: {precip_prob}")
|
||||||
|
|
||||||
sp_str = None
|
sp_str = None
|
||||||
|
|
||||||
if (precip > 0.05 and precip_prob > 25):
|
if (precip > 0.05 and precip_prob > 25):
|
||||||
precip_type = hour.get('preciptype', [''])
|
precip_type = hour.get('preciptype', [''])
|
||||||
sp_str = f"{str(precip)}mm"
|
sp_str = f"{str(precip)}mm"
|
||||||
|
|
||||||
if abs(hour.get('datetime') - sunrise) < timedelta(minutes=60):
|
if abs(hour.get('datetime') - sunrise) < timedelta(minutes=60):
|
||||||
icon = ":LiSunrise:"
|
icon = ":LiSunrise:"
|
||||||
|
@ -779,17 +757,18 @@ def format_hourly_icon(hour, sunrise, sunset):
|
||||||
elif hour.get('uvindex') > 8:
|
elif hour.get('uvindex') > 8:
|
||||||
icon = ":LiRadiation:"
|
icon = ":LiRadiation:"
|
||||||
sp_str = f"UV: {hour.get('uvindex', '')}"
|
sp_str = f"UV: {hour.get('uvindex', '')}"
|
||||||
|
|
||||||
formatted = f"{icon}" if icon else ""
|
formatted = f"{icon}" if icon else ""
|
||||||
formatted += f" {sp_str}" if sp_str else " "
|
formatted += f" {sp_str}" if sp_str else " "
|
||||||
|
|
||||||
return formatted
|
return formatted
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error in format_hourly_special: {str(e)}")
|
err(f"Error in format_hourly_special: {str(e)}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def format_hourly_temperature(hour):
|
def format_hourly_temperature(hour):
|
||||||
try:
|
try:
|
||||||
temp_str = f"{hour.get('temp', '')}˚ F"
|
temp_str = f"{hour.get('temp', '')}˚ F"
|
||||||
|
@ -798,7 +777,8 @@ def format_hourly_temperature(hour):
|
||||||
err(f"Error in format_hourly_temperature: {str(e)}")
|
err(f"Error in format_hourly_temperature: {str(e)}")
|
||||||
err(traceback.format_exc())
|
err(traceback.format_exc())
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def format_hourly_wind(hour):
|
def format_hourly_wind(hour):
|
||||||
try:
|
try:
|
||||||
windspeed = hour.get('windspeed', '')
|
windspeed = hour.get('windspeed', '')
|
||||||
|
@ -823,7 +803,7 @@ def get_icon_and_admonition(icon_str) -> Tuple:
|
||||||
debug(f"Received request for emoji {icon_str}")
|
debug(f"Received request for emoji {icon_str}")
|
||||||
if icon_str.startswith(":") and icon_str.endswith(":"):
|
if icon_str.startswith(":") and icon_str.endswith(":"):
|
||||||
return icon_str
|
return icon_str
|
||||||
|
|
||||||
icon_str = icon_str.lower()
|
icon_str = icon_str.lower()
|
||||||
|
|
||||||
if icon_str == "clear-day":
|
if icon_str == "clear-day":
|
||||||
|
@ -876,34 +856,24 @@ def get_icon_and_admonition(icon_str) -> Tuple:
|
||||||
ad = "ad-thunder"
|
ad = "ad-thunder"
|
||||||
else:
|
else:
|
||||||
icon = ":LiHelpCircle:"
|
icon = ":LiHelpCircle:"
|
||||||
ad = "ad-weather"
|
ad = "ad-weather"
|
||||||
|
|
||||||
return icon, ad
|
return icon, ad
|
||||||
|
|
||||||
def get_weather_emoji(weather_condition):
|
def get_weather_emoji(weather_condition):
|
||||||
condition = weather_condition.lower()
|
condition = weather_condition.lower()
|
||||||
if 'clear' in condition or 'sunny' in condition:
|
if 'clear' in condition or 'sunny' in condition: return "☀️"
|
||||||
return "☀️"
|
elif 'cloud' in condition or 'overcast' in condition: return "☁️"
|
||||||
elif 'cloud' in condition or 'overcast' in condition:
|
elif 'rain' in condition: return "🌧️"
|
||||||
return "☁️"
|
elif 'snow' in condition: return "❄️"
|
||||||
elif 'rain' in condition:
|
elif 'thunder' in condition or 'storm' in condition: return "⛈️"
|
||||||
return "🌧️"
|
elif 'fog' in condition or 'mist' in condition: return "🌫️"
|
||||||
elif 'snow' in condition:
|
elif 'wind' in condition: return "💨"
|
||||||
return "❄️"
|
elif 'hail' in condition: return "🌨️"
|
||||||
elif 'thunder' in condition or 'storm' in condition:
|
elif 'sleet' in condition: return "🌧️"
|
||||||
return "⛈️"
|
elif 'partly' in condition: return "⛅"
|
||||||
elif 'fog' in condition or 'mist' in condition:
|
else: return "🌡️" # Default emoji for unclassified weather
|
||||||
return "🌫️"
|
|
||||||
elif 'wind' in condition:
|
|
||||||
return "💨"
|
|
||||||
elif 'hail' in condition:
|
|
||||||
return "🌨️"
|
|
||||||
elif 'sleet' in condition:
|
|
||||||
return "🌧️"
|
|
||||||
elif 'partly' in condition:
|
|
||||||
return "⛅"
|
|
||||||
else:
|
|
||||||
return "🌡️" # Default emoji for unclassified weather
|
|
||||||
|
|
||||||
async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[str, str]]]]) -> str:
|
async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[str, str]]]]) -> str:
|
||||||
def remove_characters(s: str) -> str:
|
def remove_characters(s: str) -> str:
|
||||||
|
@ -911,7 +881,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
|
||||||
s = s.strip('\n')
|
s = s.strip('\n')
|
||||||
s = re.sub(r'^_+|_+$', '', s)
|
s = re.sub(r'^_+|_+$', '', s)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
date_str = event_data["date"]
|
date_str = event_data["date"]
|
||||||
now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
events_markdown = []
|
events_markdown = []
|
||||||
|
@ -940,7 +910,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
|
||||||
|
|
||||||
else:
|
else:
|
||||||
event_markdown += f"\n - [ ] **{event['start']}—{event['end']}** {markdown_name}"
|
event_markdown += f"\n - [ ] **{event['start']}—{event['end']}** {markdown_name}"
|
||||||
|
|
||||||
if event['attendees']:
|
if event['attendees']:
|
||||||
attendee_list = []
|
attendee_list = []
|
||||||
for att in event['attendees']:
|
for att in event['attendees']:
|
||||||
|
@ -961,10 +931,10 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
|
||||||
|
|
||||||
event_markdown += f"\n * {description}"
|
event_markdown += f"\n * {description}"
|
||||||
event_markdown += f"\n "
|
event_markdown += f"\n "
|
||||||
|
|
||||||
event_markdown += "\n```\n"
|
event_markdown += "\n```\n"
|
||||||
events_markdown.append(event_markdown)
|
events_markdown.append(event_markdown)
|
||||||
|
|
||||||
header = (
|
header = (
|
||||||
"---\n"
|
"---\n"
|
||||||
f"date: {date_str}\n"
|
f"date: {date_str}\n"
|
||||||
|
@ -973,23 +943,25 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
|
||||||
f"updated: {now}\n"
|
f"updated: {now}\n"
|
||||||
"---\n"
|
"---\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
detailed_events = (
|
detailed_events = (
|
||||||
f"{header}"
|
f"{header}"
|
||||||
f"{''.join(events_markdown)}"
|
f"{''.join(events_markdown)}"
|
||||||
)
|
)
|
||||||
return detailed_events
|
return detailed_events
|
||||||
|
|
||||||
|
|
||||||
@note.get("/note/events", response_class=PlainTextResponse)
|
@note.get("/note/events", response_class=PlainTextResponse)
|
||||||
async def note_events_endpoint(date: str = Query(None)):
|
async def note_events_endpoint(date: str = Query(None)):
|
||||||
|
|
||||||
date_time = await gis.dt(date) if date else await gis.dt(dt_datetime.now())
|
date_time = await gis.dt(date) if date else await gis.dt(dt_datetime.now())
|
||||||
response = await update_daily_note_events(date_time)
|
response = await update_daily_note_events(date_time)
|
||||||
return PlainTextResponse(content=response, status_code=200)
|
return PlainTextResponse(content=response, status_code=200)
|
||||||
|
|
||||||
|
|
||||||
async def update_daily_note_events(date_time: dt_datetime):
|
async def update_daily_note_events(date_time: dt_datetime):
|
||||||
debug(f"Looking up events on date: {date_time.strftime('%Y-%m-%d')}")
|
debug(f"Looking up events on date: {date_time.strftime('%Y-%m-%d')}")
|
||||||
try:
|
try:
|
||||||
events = await cal.get_events(date_time, date_time)
|
events = await cal.get_events(date_time, date_time)
|
||||||
debug(f"Raw events: {events}")
|
debug(f"Raw events: {events}")
|
||||||
event_data = {
|
event_data = {
|
||||||
|
@ -1009,6 +981,3 @@ async def update_daily_note_events(date_time: dt_datetime):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err(f"Error processing events: {e}")
|
err(f"Error processing events: {e}")
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue