From d4cfb5fbd20581454e41ca720a1f64c7f8a0062f Mon Sep 17 00:00:00 2001
From: sanj <67624670+iodrift@users.noreply.github.com>
Date: Sun, 4 Aug 2024 22:12:18 -0700
Subject: [PATCH] Auto-update: Sun Aug  4 22:12:18 PDT 2024

---
 sijapi/__main__.py      |  14 +--
 sijapi/classes.py       | 109 +++++++++++-----------
 sijapi/routers/email.py |  51 +++++++----
 sijapi/routers/note.py  | 197 +++++++++++++++++-----------------------
 4 files changed, 176 insertions(+), 195 deletions(-)

diff --git a/sijapi/__main__.py b/sijapi/__main__.py
index 5d70c40..accac93 100755
--- a/sijapi/__main__.py
+++ b/sijapi/__main__.py
@@ -11,7 +11,7 @@ from hypercorn.config import Config as HypercornConfig
 import sys
 import os
 import traceback
-import asyncio 
+import asyncio
 import httpx
 import argparse
 import json
@@ -56,7 +56,7 @@ async def lifespan(app: FastAPI):
     try:
         # Initialize sync structures on all databases
         await API.initialize_sync()
-        
+
     except Exception as e:
         crit(f"Error during startup: {str(e)}")
         crit(f"Traceback: {traceback.format_exc()}")
@@ -136,13 +136,13 @@ async def pull_changes():
         await API.add_primary_keys_to_local_tables()
         await API.add_primary_keys_to_remote_tables()
         try:
-            
+
             source = await API.get_most_recent_source()
-            
+
             if source:
                 # Pull changes from the source
                 total_changes = await API.pull_changes(source)
-                
+
                 return JSONResponse(content={
                     "status": "success",
                     "message": f"Pull complete. Total changes: {total_changes}",
@@ -154,12 +154,12 @@ async def pull_changes():
                     "status": "info",
                     "message": "No instances with more recent data found or all instances are offline."
                 })
-        
+
         except Exception as e:
             err(f"Error during pull: {str(e)}")
             err(f"Traceback: {traceback.format_exc()}")
             raise HTTPException(status_code=500, detail=f"Error during pull: {str(e)}")
-        
+
     except Exception as e:
             err(f"Error while ensuring primary keys to tables: {str(e)}")
             err(f"Traceback: {traceback.format_exc()}")
diff --git a/sijapi/classes.py b/sijapi/classes.py
index 313728c..efb39d7 100644
--- a/sijapi/classes.py
+++ b/sijapi/classes.py
@@ -29,6 +29,7 @@ from .logs import Logger
 L = Logger("classes", "classes")
 logger = L.get_module_logger("classes")
 
+# Logging functions
 def debug(text: str): logger.debug(text)
 def info(text: str): logger.info(text)
 def warn(text: str): logger.warning(text)
@@ -81,7 +82,7 @@ class Configuration(BaseModel):
             instance = cls.create_dynamic_model(**resolved_data)
             instance._dir_config = dir_config or instance
             return instance
-        
+
         except Exception as e:
             err(f"Error loading configuration: {str(e)}")
             raise
@@ -137,7 +138,7 @@ class Configuration(BaseModel):
             elif len(parts) == 2 and parts[0] == 'ENV':
                 replacement = os.getenv(parts[1], '')
             else:
-                replacement = value 
+                replacement = value
 
             value = value.replace('{{' + match + '}}', str(replacement))
 
@@ -314,7 +315,7 @@ class APIConfig(BaseModel):
     async def get_online_hosts(self) -> List[Dict[str, Any]]:
         current_time = time.time()
         cache_key = "online_hosts"
-        
+
         if cache_key in self.online_hosts_cache:
             cached_hosts, cache_time = self.online_hosts_cache[cache_key]
             if current_time - cache_time < self.online_hosts_cache_ttl:
@@ -322,7 +323,7 @@ class APIConfig(BaseModel):
 
         online_hosts = []
         local_ts_id = os.environ.get('TS_ID')
-        
+
         for pool_entry in self.POOL:
             if pool_entry['ts_id'] != local_ts_id:
                 pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}"
@@ -331,7 +332,7 @@ class APIConfig(BaseModel):
                         continue
                     else:
                         del self.offline_servers[pool_key]
-                
+
                 conn = await self.get_connection(pool_entry)
                 if conn is not None:
                     online_hosts.append(pool_entry)
@@ -343,16 +344,16 @@ class APIConfig(BaseModel):
     async def get_connection(self, pool_entry: Dict[str, Any] = None):
         if pool_entry is None:
             pool_entry = self.local_db
-        
+
         pool_key = f"{pool_entry['ts_ip']}:{pool_entry['db_port']}"
-        
+
         # Check if the server is marked as offline
         if pool_key in self.offline_servers:
             if time.time() - self.offline_servers[pool_key] < self.offline_timeout:
                 return None
             else:
                 del self.offline_servers[pool_key]
-        
+
         if pool_key not in self.db_pools:
             try:
                 self.db_pools[pool_key] = await asyncpg.create_pool(
@@ -385,7 +386,7 @@ class APIConfig(BaseModel):
     async def initialize_sync(self):
         local_ts_id = os.environ.get('TS_ID')
         online_hosts = await self.get_online_hosts()
-        
+
         for pool_entry in online_hosts:
             if pool_entry['ts_id'] == local_ts_id:
                 continue  # Skip local database
@@ -393,29 +394,29 @@ class APIConfig(BaseModel):
                 conn = await self.get_connection(pool_entry)
                 if conn is None:
                     continue  # Skip this database if connection failed
-                
+
                 debug(f"Starting sync initialization for {pool_entry['ts_ip']}...")
-                
+
                 # Check PostGIS installation
                 postgis_installed = await self.check_postgis(conn)
                 if not postgis_installed:
                     warn(f"PostGIS is not installed on {pool_entry['ts_id']} ({pool_entry['ts_ip']}). Some spatial operations may fail.")
-                
+
                 tables = await conn.fetch("""
-                    SELECT tablename FROM pg_tables 
+                    SELECT tablename FROM pg_tables
                     WHERE schemaname = 'public'
                 """)
-                
+
                 for table in tables:
                     table_name = table['tablename']
                     await self.ensure_sync_columns(conn, table_name)
-                
+
                 debug(f"Sync initialization complete for {pool_entry['ts_ip']}. All tables now have necessary sync columns and triggers.")
-                    
+
             except Exception as e:
                 err(f"Error initializing sync for {pool_entry['ts_ip']}: {str(e)}")
                 err(f"Traceback: {traceback.format_exc()}")
-    
+
     def _schedule_sync_task(self, table_name: str, pk_value: Any, version: int, server_id: str):
         # Use a background task manager to handle syncing
         task_key = f"{table_name}:{pk_value}" if pk_value else table_name
@@ -439,20 +440,20 @@ class APIConfig(BaseModel):
             if not primary_key:
                 # Add an id column as primary key if it doesn't exist
                 await conn.execute(f"""
-                    ALTER TABLE "{table_name}" 
+                    ALTER TABLE "{table_name}"
                     ADD COLUMN IF NOT EXISTS id SERIAL PRIMARY KEY;
                 """)
                 primary_key = 'id'
 
             # Ensure version column exists
             await conn.execute(f"""
-                ALTER TABLE "{table_name}" 
+                ALTER TABLE "{table_name}"
                 ADD COLUMN IF NOT EXISTS version INTEGER DEFAULT 1;
             """)
 
             # Ensure server_id column exists
             await conn.execute(f"""
-                ALTER TABLE "{table_name}" 
+                ALTER TABLE "{table_name}"
                 ADD COLUMN IF NOT EXISTS server_id TEXT DEFAULT '{os.environ.get('TS_ID')}';
             """)
 
@@ -487,7 +488,7 @@ class APIConfig(BaseModel):
 
             debug(f"Successfully ensured sync columns and trigger for table {table_name}")
             return primary_key
-        
+
         except Exception as e:
             err(f"Error ensuring sync columns for table {table_name}: {str(e)}")
             err(f"Traceback: {traceback.format_exc()}")
@@ -497,7 +498,7 @@ class APIConfig(BaseModel):
         if conn is None:
             debug(f"Skipping offline server...")
             return None
-        
+
         try:
             result = await conn.fetchval("SELECT PostGIS_version();")
             if result:
@@ -509,7 +510,7 @@ class APIConfig(BaseModel):
         except Exception as e:
             err(f"Error checking PostGIS: {str(e)}")
             return False
-        
+
 
     async def pull_changes(self, source_pool_entry, batch_size=10000):
         if source_pool_entry['ts_id'] == os.environ.get('TS_ID'):
@@ -538,10 +539,10 @@ class APIConfig(BaseModel):
                 return 0
 
             tables = await source_conn.fetch("""
-                SELECT tablename FROM pg_tables 
+                SELECT tablename FROM pg_tables
                 WHERE schemaname = 'public'
             """)
-            
+
             for table in tables:
                 table_name = table['tablename']
                 try:
@@ -550,7 +551,7 @@ class APIConfig(BaseModel):
                     else:
                         primary_key = await self.ensure_sync_columns(dest_conn, table_name)
                         last_synced_version = await self.get_last_synced_version(dest_conn, table_name, source_id)
-                        
+
                         while True:
                             changes = await source_conn.fetch(f"""
                                 SELECT * FROM "{table_name}"
@@ -558,16 +559,16 @@ class APIConfig(BaseModel):
                                 ORDER BY version ASC
                                 LIMIT $3
                             """, last_synced_version, source_id, batch_size)
-                            
+
                             if not changes:
                                 break  # No more changes for this table
-                            
+
                             changes_count = await self.apply_batch_changes(dest_conn, table_name, changes, primary_key)
                             total_changes += changes_count
-                            
+
                             if changes_count > 0:
                                 info(f"Synced batch for {table_name}: {changes_count} changes. Total so far: {total_changes}")
-                            
+
                             last_synced_version = changes[-1]['version']  # Update last synced version
 
                 except Exception as e:
@@ -599,7 +600,7 @@ class APIConfig(BaseModel):
         if conn is None:
             debug(f"Skipping offline server...")
             return 0
-        
+
         if table_name in self.SPECIAL_TABLES:
             debug(f"Skipping get_last_synced_version because {table_name} is special.")
             return 0  # Special handling for tables without version column
@@ -623,15 +624,15 @@ class APIConfig(BaseModel):
         local_ts_id = os.environ.get('TS_ID')
         online_hosts = await self.get_online_hosts()
         num_online_hosts = len(online_hosts)
-        
+
         if num_online_hosts > 0:
             online_ts_ids = [host['ts_id'] for host in online_hosts if host['ts_id'] != local_ts_id]
             crit(f"Online hosts: {', '.join(online_ts_ids)}")
-            
+
             for pool_entry in online_hosts:
                 if pool_entry['ts_id'] == local_ts_id:
                     continue  # Skip local database
-                
+
                 try:
                     conn = await self.get_connection(pool_entry)
                     if conn is None:
@@ -639,14 +640,14 @@ class APIConfig(BaseModel):
                         continue
 
                     tables = await conn.fetch("""
-                        SELECT tablename FROM pg_tables 
+                        SELECT tablename FROM pg_tables
                         WHERE schemaname = 'public'
                     """)
-                    
+
                     for table in tables:
                         table_name = table['tablename']
                         if table_name in self.SPECIAL_TABLES:
-                            continue 
+                            continue
                         try:
                             result = await conn.fetchrow(f"""
                                 SELECT MAX(version) as max_version, server_id
@@ -684,7 +685,7 @@ class APIConfig(BaseModel):
                 most_recent_source = next(host for host in online_hosts if host['ts_id'] != local_ts_id)
             else:
                 crit("No other online hosts available for sync.")
-        
+
         return most_recent_source
 
 
@@ -737,7 +738,7 @@ class APIConfig(BaseModel):
             try:
                 # Execute the query
                 result = await conn.fetch(query, *args)
-                
+
                 if not result:
                     continue
 
@@ -846,9 +847,9 @@ class APIConfig(BaseModel):
                         columns = updated_row.keys()
                         placeholders = [f'${i+1}' for i in range(len(columns))]
                         primary_key = self.get_primary_key(table_name)
-                        
+
                         remote_version = await remote_conn.fetchval(f"""
-                            SELECT version FROM "{table_name}" 
+                            SELECT version FROM "{table_name}"
                             WHERE "{primary_key}" = $1
                         """, updated_row[primary_key])
 
@@ -920,17 +921,17 @@ class APIConfig(BaseModel):
             if all_rows:
                 columns = list(all_rows[0].keys())
                 placeholders = [f'${i+1}' for i in range(len(columns))]
-                
+
                 insert_query = f"""
                     INSERT INTO "{table_name}" ({', '.join(f'"{col}"' for col in columns)})
                     VALUES ({', '.join(placeholders)})
                     ON CONFLICT DO NOTHING
                 """
-                
+
                 async with remote_conn.transaction():
                     for row in all_rows:
                         await remote_conn.execute(insert_query, *row.values())
-                
+
                 return True
             else:
                 debug(f"No rows to push for table {table_name}")
@@ -976,7 +977,7 @@ class APIConfig(BaseModel):
         try:
             columns = list(changes[0].keys())
             placeholders = [f'${i+1}' for i in range(len(columns))]
-            
+
             if primary_key:
                 insert_query = f"""
                     INSERT INTO "{table_name}" ({', '.join(f'"{col}"' for col in columns)})
@@ -1057,7 +1058,7 @@ class APIConfig(BaseModel):
                             proj4text = $4::text
                         WHERE srid = $5::integer
                     """
-                    await dest_conn.execute(update_query, 
+                    await dest_conn.execute(update_query,
                         source_entry['auth_name'],
                         source_entry['auth_srid'],
                         source_entry['srtext'],
@@ -1080,7 +1081,7 @@ class APIConfig(BaseModel):
         # You might want to cache this information for performance
         # For now, we'll assume it's always 'id', but you should implement proper logic here
         return 'id'
-    
+
 
     async def add_primary_keys_to_local_tables(self):
         conn = await self.get_connection()
@@ -1091,10 +1092,10 @@ class APIConfig(BaseModel):
 
         try:
             tables = await conn.fetch("""
-                SELECT tablename FROM pg_tables 
+                SELECT tablename FROM pg_tables
                 WHERE schemaname = 'public'
             """)
-            
+
             for table in tables:
                 table_name = table['tablename']
                 if table_name not in self.SPECIAL_TABLES:
@@ -1104,7 +1105,7 @@ class APIConfig(BaseModel):
 
     async def add_primary_keys_to_remote_tables(self):
         online_hosts = await self.get_online_hosts()
-        
+
         for pool_entry in online_hosts:
             conn = await self.get_connection(pool_entry)
             if conn is None:
@@ -1114,10 +1115,10 @@ class APIConfig(BaseModel):
             try:
                 info(f"Adding primary keys to existing tables on {pool_entry['ts_id']}...")
                 tables = await conn.fetch("""
-                    SELECT tablename FROM pg_tables 
+                    SELECT tablename FROM pg_tables
                     WHERE schemaname = 'public'
                 """)
-                
+
                 for table in tables:
                     table_name = table['tablename']
                     if table_name not in self.SPECIAL_TABLES:
@@ -1140,7 +1141,7 @@ class APIConfig(BaseModel):
         close_tasks = []
         for pool_key, pool in self.db_pools.items():
             close_tasks.append(self.close_pool_with_timeout(pool, pool_key))
-        
+
         await asyncio.gather(*close_tasks)
         self.db_pools.clear()
         info("All database connection pools closed.")
@@ -1510,4 +1511,4 @@ class WidgetUpdate(BaseModel):
     color: Optional[str] = None
     url: Optional[str] = None
     shortcut: Optional[str] = None
-    graph: Optional[str] = None
\ No newline at end of file
+    graph: Optional[str] = None
diff --git a/sijapi/routers/email.py b/sijapi/routers/email.py
index 74fac13..adfe790 100644
--- a/sijapi/routers/email.py
+++ b/sijapi/routers/email.py
@@ -1,5 +1,5 @@
 '''
-Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming emails that match certain criteria and save the Text-To-Speech converted summaries into a specified "podcast" folder. 
+Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming emails that match certain criteria and save the Text-To-Speech converted summaries into a specified "podcast" folder.
 '''
 #routers/email.py
 
@@ -55,9 +55,9 @@ def get_smtp_connection(autoresponder: AutoResponder):
     context = ssl.create_default_context()
     context.check_hostname = False
     context.verify_mode = ssl.CERT_NONE
-    
+
     smtp_config = autoresponder.smtp
-    
+
     if smtp_config.encryption == 'SSL':
         try:
             debug(f"Attempting SSL connection to {smtp_config.host}:{smtp_config.port}")
@@ -73,6 +73,7 @@ def get_smtp_connection(autoresponder: AutoResponder):
             except Exception as e:
                 err(f"STARTTLS connection failed: {str(e)}")
                 raise
+
     elif smtp_config.encryption == 'STARTTLS':
         try:
             debug(f"Attempting STARTTLS connection to {smtp_config.host}:{smtp_config.port}")
@@ -82,6 +83,7 @@ def get_smtp_connection(autoresponder: AutoResponder):
         except Exception as e:
             err(f"STARTTLS connection failed: {str(e)}")
             raise
+
     else:
         try:
             debug(f"Attempting unencrypted connection to {smtp_config.host}:{smtp_config.port}")
@@ -128,8 +130,6 @@ async def send_response(to_email: str, subject: str, body: str, profile: AutoRes
                 err(f"Error closing SMTP connection: {str(e)}")
 
 
-
-
 def clean_email_content(html_content):
     soup = BeautifulSoup(html_content, "html.parser")
     return re.sub(r'[ \t\r\n]+', ' ', soup.get_text()).strip()
@@ -141,11 +141,11 @@ async def extract_attachments(attachments) -> List[str]:
         attachment_name = attachment.get('filename', 'tempfile.txt')
         _, ext = os.path.splitext(attachment_name)
         ext = ext.lower() if ext else '.txt'
-        
+
         with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tmp_file:
             tmp_file.write(attachment['content'].getvalue())
             tmp_file_path = tmp_file.name
-        
+
         try:
             attachment_text = await extract_text(tmp_file_path)
             attachment_texts.append(attachment_text)
@@ -155,6 +155,7 @@ async def extract_attachments(attachments) -> List[str]:
 
     return attachment_texts
 
+
 async def process_account_archival(account: EmailAccount):
     summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
     os.makedirs(summarized_log.parent, exist_ok = True)
@@ -177,7 +178,7 @@ async def process_account_archival(account: EmailAccount):
                             recipients=recipients,
                             subject=message.subject,
                             body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
-                            attachments=message.attachments 
+                            attachments=message.attachments
                         )
                         md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
                         md_summary = await summarize_single_email(this_email, account.podcast) if account.summarize == True else None
@@ -192,9 +193,10 @@ async def process_account_archival(account: EmailAccount):
                     #     debug(f"Skipping {uid_str} because it was already processed.")
         except Exception as e:
             err(f"An error occurred during summarization for account {account.name}: {e}")
-        
+
         await asyncio.sleep(account.refresh)
 
+
 async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False):
     tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
     summary = ""
@@ -212,6 +214,7 @@ async def summarize_single_email(this_email: IncomingEmail, podcast: bool = Fals
 
     return md_summary
 
+
 async def archive_single_email(this_email: IncomingEmail, summary: str = None):
     try:
         markdown_content = f'''---
@@ -219,14 +222,14 @@ date: {this_email.datetime_received.strftime('%Y-%m-%d')}
 tags:
 - email
 ---
-|     |     |     | 
-| --: | :--: |  :--: | 
+|     |     |     |
+| --: | :--: |  :--: |
 |  *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}**    |    |
 |  *from* | **[[{this_email.sender}]]**    |    |
 |  *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])}   |    |
 |  *subject* | **{this_email.subject}**    |    |
 '''
-    
+
         if summary:
             markdown_content += summary
 
@@ -235,11 +238,12 @@ tags:
 {this_email.body}
 '''
         return markdown_content
-    
+
     except Exception as e:
         err(f"Exception: {e}")
         return False
-    
+
+
 async def save_email(md_path, md_content):
     try:
         with open(md_path, 'w', encoding='utf-8') as md_file:
@@ -251,6 +255,7 @@ async def save_email(md_path, md_content):
         err(f"Failed to save email: {e}")
         return False
 
+
 def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
     debug(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
     def matches_list(item: str, this_email: IncomingEmail) -> bool:
@@ -294,13 +299,14 @@ async def process_account_autoresponding(account: EmailAccount):
 
         except Exception as e:
             err(f"An error occurred during auto-responding for account {account.name}: {e}")
-        
+
         await asyncio.sleep(account.refresh)
 
+
 async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path):
     this_email = await create_incoming_email(message)
     debug(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
-    
+
     matching_profiles = get_matching_autoresponders(this_email, account)
     debug(f"Matching profiles: {matching_profiles}")
 
@@ -319,6 +325,7 @@ async def autorespond_single_email(message, uid_str: str, account: EmailAccount,
         else:
             warn(f"Unable to generate auto-response for {this_email.subject}")
 
+
 async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]:
     info(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
 
@@ -335,16 +342,16 @@ Body: {this_email.body}
 Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}. Keep the response {profile.style} and to the point, but responsive to the sender's inquiry. Do not mention or recite this context information in your response.
     '''
     sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
-    
+
     try:
         response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
         debug(f"query_ollama response: {response}")
-        
+
         if isinstance(response, dict) and "message" in response and "content" in response["message"]:
             response = response["message"]["content"]
-        
+
         return response + "\n\n"
-        
+
     except Exception as e:
         err(f"Error generating auto-response: {str(e)}")
         return None
@@ -363,22 +370,26 @@ async def create_incoming_email(message) -> IncomingEmail:
         attachments=message.attachments
     )
 
+
 async def load_processed_uids(filename: Path) -> Set[str]:
     if filename.exists():
         async with aiofiles.open(filename, 'r') as f:
             return set(line.strip().split(':')[-1] for line in await f.readlines())
     return set()
 
+
 async def save_processed_uid(filename: Path, account_name: str, uid: str):
     async with aiofiles.open(filename, 'a') as f:
         await f.write(f"{account_name}:{uid}\n")
 
+
 async def process_all_accounts():
     email_accounts = load_email_accounts(EMAIL_CONFIG)
     summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
     autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
     await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
 
+
 @email.on_event("startup")
 async def startup_event():
     await asyncio.sleep(5)
diff --git a/sijapi/routers/note.py b/sijapi/routers/note.py
index 2fb18ab..9fdcc9e 100644
--- a/sijapi/routers/note.py
+++ b/sijapi/routers/note.py
@@ -22,7 +22,6 @@ from sijapi.routers import asr, cal, gis, img, llm, serve, timing, tts, weather
 from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
 from sijapi.classes import Location
 
-
 note = APIRouter()
 logger = L.get_module_logger("note")
 def debug(text: str): logger.debug(text)
@@ -43,7 +42,6 @@ async def note_add_endpoint(file: Optional[UploadFile] = File(None), text: Optio
         return JSONResponse({"message": "Note added successfully", "entry": result}, status_code=201)
 
 
-
 async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None):
     now = dt_datetime.now()
     transcription_entry = ""
@@ -52,24 +50,24 @@ async def process_for_daily_note(file: Optional[UploadFile] = File(None), text:
         debug("File received...")
         file_content = await file.read()
         audio_io = BytesIO(file_content)
-        
+
         # Improve error handling for file type guessing
         guessed_type = mimetypes.guess_type(file.filename)
         file_type = guessed_type[0] if guessed_type[0] else "application/octet-stream"
-        
+
         debug(f"Processing as {file_type}...")
-        
+
         # Extract the main type (e.g., 'audio', 'image', 'video')
         main_type = file_type.split('/')[0]
         subdir = main_type.title() if main_type else "Documents"
-        
+
         absolute_path, relative_path = assemble_journal_path(now, subdir=subdir, filename=file.filename)
         debug(f"Destination path: {absolute_path}")
-        
+
         with open(absolute_path, 'wb') as f:
             f.write(file_content)
         debug(f"Processing {f.name}...")
-        
+
         if main_type == 'audio':
             transcription = await asr.transcribe_audio(file_path=absolute_path, params=asr.TranscribeParams(model="small-en", language="en", threads=6))
             file_entry = f"![[{relative_path}]]"
@@ -77,24 +75,22 @@ async def process_for_daily_note(file: Optional[UploadFile] = File(None), text:
             file_entry = f"![[{relative_path}]]"
         else:
             file_entry = f"[Source]({relative_path})"
-    
+
     text_entry = text if text else ""
     debug(f"transcription: {transcription_entry}\nfile_entry: {file_entry}\ntext_entry: {text_entry}")
     return await add_to_daily_note(transcription_entry, file_entry, text_entry, now)
 
 
-
-
 async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: dt_datetime = None):
     date_time = date_time or dt_datetime.now()
     note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True)
     time_str = date_time.strftime("%H:%M")
-    
+
     entry_lines = []
     if additional_text and additional_text.strip():
-        entry_lines.append(f"\t* {additional_text.strip()}") 
+        entry_lines.append(f"\t* {additional_text.strip()}")
     if transcription and transcription.strip():
-        entry_lines.append(f"\t* {transcription.strip()}") 
+        entry_lines.append(f"\t* {transcription.strip()}")
     if file_link and file_link.strip():
         entry_lines.append(f"\t\t {file_link.strip()}")
 
@@ -104,7 +100,7 @@ async def add_to_daily_note(transcription: str = None, file_link: str = None, ad
     if note_path.exists():
         with open(note_path, 'a', encoding='utf-8') as note_file:
             note_file.write(entry)
-    else: 
+    else:
         date_str = date_time.strftime("%Y-%m-%d")
         frontmatter = f"""---
 date: {date_str}
@@ -121,7 +117,6 @@ tags:
     return {"timestamp": time_str, "content": entry.strip()}
 
 
-
 async def process_document(
     bg_tasks: BackgroundTasks,
     document: File,
@@ -195,6 +190,7 @@ added: {timestamp}
         err(f"Failed to clip: {str(e)}")
         raise HTTPException(status_code=500, detail=str(e))
 
+
 def list_and_correct_impermissible_files(root_dir, rename: bool = False):
     """List and correct all files with impermissible names."""
     impermissible_files = []
@@ -204,11 +200,11 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
                 file_path = Path(dirpath) / filename
                 impermissible_files.append(file_path)
                 debug(f"Impermissible file found: {file_path}")
-                
+
                 # Sanitize the file name
                 new_filename = sanitize_filename(filename)
                 new_file_path = Path(dirpath) / new_filename
-                
+
                 # Ensure the new file name does not already exist
                 if new_file_path.exists():
                     counter = 1
@@ -217,24 +213,23 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
                         new_filename = f"{base_name}_{counter}{ext}"
                         new_file_path = Path(dirpath) / new_filename
                         counter += 1
-                
+
                 # Rename the file
                 if rename:
                     os.rename(file_path, new_file_path)
                     debug(f"Renamed: {file_path} -> {new_file_path}")
-    
+
     return impermissible_files
 
 journal = OBSIDIAN_VAULT_DIR / "journal"
 list_and_correct_impermissible_files(journal, rename=True)
 
-### Daily Note Builder ###
 
 @note.get("/note/bulk/{dt_start}/{dt_end}")
 async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
     start_date = dt_datetime.strptime(dt_start, "%Y-%m-%d")
     end_date = dt_datetime.strptime(dt_end, "%Y-%m-%d")
-    
+
     results = []
     current_date = start_date
     while current_date <= end_date:
@@ -242,9 +237,8 @@ async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
         result = await build_daily_note(formatted_date)
         results.append(result)
         current_date += timedelta(days=1)
-    
-    return {"urls": results}
 
+    return {"urls": results}
 
 
 @note.get("/note/create")
@@ -261,7 +255,7 @@ async def build_daily_note_getpoint():
         date_time = dt_datetime.now(tz)
         path = await build_daily_note(date_time, loc.latitude, loc.longitude)
         path_str = str(path)
-        
+
         info(f"Successfully created daily note at {path_str}")
         return JSONResponse(content={"path": path_str}, status_code=200)
 
@@ -277,7 +271,6 @@ async def build_daily_note_getpoint():
         raise HTTPException(status_code=500, detail="An unexpected error occurred")
 
 
-
 @note.post("/note/create")
 async def build_daily_note_endpoint(
     date_str: Optional[str] = Form(dt_datetime.now().strftime("%Y-%m-%d")),
@@ -305,7 +298,7 @@ async def build_daily_note_endpoint(
     path = await build_daily_note(date_time, lat, lon)
 
     path_str = str(path)  # Convert PosixPath to string
-    
+
     return JSONResponse(content={"path": path_str}, status_code=200)
 
 
@@ -319,13 +312,13 @@ Obsidian helper. Takes a datetime and creates a new daily note. Note: it uses th
     day_before = (date_time - timedelta(days=1)).strftime("%Y-%m-%d %A")  # 2024-05-26 Sunday formatting
     day_after = (date_time + timedelta(days=1)).strftime("%Y-%m-%d %A")  # 2024-05-28 Tuesday formatting
     header = f"# [[{day_before}|← ]] {formatted_day} [[{day_after}| →]]\n\n"
-    
+
     if not lat or not lon:
         places = await gis.fetch_locations(date_time)
         lat, lon = places[0].latitude, places[0].longitude
 
     location = await GEO.code((lat, lon))
-    
+
     timeslips = await build_daily_timeslips(date_time)
 
     fm_day = date_time.strftime("%Y-%m-%d")
@@ -349,7 +342,7 @@ Obsidian helper. Takes a datetime and creates a new daily note. Note: it uses th
     map_embed = f"![[{map_path}]]"
 
     _, banner_path = assemble_journal_path(date_time, filename="Banner", extension=".jpg", no_timestamp = True)
-   
+
     body = f"""---
 date: "{fm_day}"
 banner: "![[{banner_path}]]"
@@ -357,14 +350,14 @@ tags:
  - daily-note
 created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
 ---
-    
+
 {header}
 {weather_embed}
 {map_path}
 
 ## Events
 {event_embed}
- 
+
 ## Tasks
 {task_embed}
 
@@ -381,19 +374,15 @@ created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
     banner = await generate_banner(formatted_day, location, weather_note)
 
     return absolute_path
-    
 
 
 async def build_daily_timeslips(date):
-    '''
-
-    '''
     absolute_path, relative_path = assemble_journal_path(date, filename = "Timeslips", extension=".md", no_timestamp = True)
     content = await timing.process_timing_markdown(date, date)
     # document_content = await document.read()
     with open(absolute_path, 'wb') as f:
         f.write(content.encode())
-    
+
     return f"![[{relative_path}]]"
 
 
@@ -402,21 +391,17 @@ async def update_frontmatter_endpoint(date: str, key: str, value: str):
     date_time = dt_datetime.strptime(date, "%Y-%m-%d")
     result = await update_frontmatter(date_time, key, value)
     return result
-    
-async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
-    # Parse the date and format paths
-    file_path, relative_path = assemble_journal_path(date_time)
 
-    # Check if the file exists
+
+async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
+    file_path, relative_path = assemble_journal_path(date_time)
     if not file_path.exists():
         crit(f"Markdown file not found at {file_path}")
         raise HTTPException(status_code=404, detail="Markdown file not found.")
 
-    # Read the file
     with open(file_path, "r", encoding="utf-8") as file:
         lines = file.readlines()
-    
-    # Extract the frontmatter
+
     try:
         start_index = lines.index("---\n") + 1
         end_index = lines[start_index:].index("---\n") + start_index
@@ -424,33 +409,22 @@ async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
     except ValueError:
         raise HTTPException(status_code=500, detail="Frontmatter not found.")
 
-    # Remove the existing key if present
     pattern = re.compile(f"^{key}:.*", re.IGNORECASE)
     frontmatter = [line for line in frontmatter if not pattern.match(line)]
-
-    # Process value as a CSV string into a list
     values = value.split(',')
-
-    # Determine insertion format
     if len(values) == 1:
-        # Single value, add as a simple key-value
         new_entry = f"{key}: {values[0]}\n"
     else:
-        # Multiple values, format as a list under the key
         new_entry = f"{key}:\n" + "\n".join([f" - {val}" for val in values]) + "\n"
 
-    # Insert the new key-value(s)
     frontmatter.append(new_entry)
-
-    # Reassemble the file
     content = lines[:start_index] + frontmatter + ["---\n"] + lines[end_index + 1:]
-
-    # Write changes back to the file
     with open(file_path, "w", encoding="utf-8") as file:
         file.writelines(content)
 
     return {"message": "Frontmatter updated successfully."}
 
+
 @note.post("/note/banner")
 async def banner_endpoint(dt: str, location: str = None, forecast: str = None, mood: str = None, other_context: str = None):
     '''
@@ -483,6 +457,7 @@ async def generate_banner(dt, location: Location = None, forecast: str = None, m
     await update_frontmatter(date_time, "banner", jpg_embed)
     return local_path
 
+
 async def generate_context(date_time, location: Location, forecast: str, mood: str, other_context: str):
     display_name = "Location: "
     if location and isinstance(location, Location):
@@ -534,7 +509,7 @@ async def generate_context(date_time, location: Location, forecast: str, mood: s
     additional_info = ', '.join(formatted_events) if formatted_events else ''
 
     other_context = f"{other_context}, {additional_info}" if other_context else additional_info
-    other_context = f"Additional information: {other_context}" if other_context else "" 
+    other_context = f"Additional information: {other_context}" if other_context else ""
 
     prompt = "Generate an aesthetically appealing banner image for a daily note that helps to visualize the following scene information: "
     prompt += "\n".join([display_name, forecast, mood, other_context])
@@ -542,7 +517,6 @@ async def generate_context(date_time, location: Location, forecast: str, mood: s
     return prompt
 
 
-
 async def get_note(date_time: dt_datetime):
     date_time = await gis.dt(date_time);
     absolute_path, local_path = assemble_journal_path(date_time, filename = "Notes", extension = ".md", no_timestamp = True)
@@ -552,6 +526,7 @@ async def get_note(date_time: dt_datetime):
             content = file.read()
         return content if content else None
 
+
 async def sentiment_analysis(date_time: dt_datetime):
     most_recent_note = await get_note(date_time)
     most_recent_note = most_recent_note or await get_note(date_time - timedelta(days=1))
@@ -578,14 +553,14 @@ async def note_weather_get(
         debug(f"date: {date} .. date_time: {date_time}")
         content = await update_dn_weather(date_time, force_refresh_weather) #, lat, lon)
         return JSONResponse(content={"forecast": content}, status_code=200)
-    
+
     except HTTPException as e:
         return JSONResponse(content={"detail": str(e.detail)}, status_code=e.status_code)
 
     except Exception as e:
         err(f"Error in note_weather_get: {str(e)}")
         raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
-                    
+
 
 @note.post("/update/note/{date}")
 async def post_update_daily_weather_and_calendar_and_timeslips(date: str, refresh: str="False") -> PlainTextResponse:
@@ -597,6 +572,7 @@ async def post_update_daily_weather_and_calendar_and_timeslips(date: str, refres
     await build_daily_timeslips(date_time)
     return f"[Refresh]({API.URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
 
+
 async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False, lat: float = None, lon: float = None):
     warn(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
     try:
@@ -609,7 +585,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
             place = places[0]
             lat = place.latitude
             lon = place.longitude
-        
+
         debug(f"lat: {lat}, lon: {lon}, place: {place}")
         city = GEO.find_override_location(lat, lon)
         if city:
@@ -626,7 +602,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
                 city = location.name
                 city = city if city else location.city
                 city = city if city else location.house_number + ' ' + location.road
-                
+
                 debug(f"City geocoded: {city}")
 
         # Assemble journal path
@@ -644,9 +620,9 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
                     # debug(f"Day: {DailyWeather}")
                     icon = DailyWeather.get('icon')
                     debug(f"Icon: {icon}")
-                    
+
                     weather_icon, admonition = get_icon_and_admonition(icon) if icon else (":LiSunMoon:", "ad-weather")
-                    
+
                     temp = DailyWeather.get('feelslike')
 
                     if DailyWeather.get('tempmax', 0) > 85:
@@ -669,7 +645,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
                     sunset = DailyWeather.get('sunset')
                     srise_str = sunrise.time().strftime("%H:%M")
                     sset_str = sunset.time().strftime("%H:%M")
-                    
+
 
                     date_str = date_time.strftime("%Y-%m-%d")
                     now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
@@ -701,17 +677,17 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
                         for hour in HourlyWeather:
                             if hour.get('datetime').strftime("%H:%M:%S") in HOURLY_COLUMNS_MAPPING.values():
 
-                                times.append(format_hourly_time(hour)) 
+                                times.append(format_hourly_time(hour))
 
                                 condition_symbols.append(format_hourly_icon(hour, sunrise, sunset))
 
                                 temps.append(format_hourly_temperature(hour))
 
                                 winds.append(format_hourly_wind(hour))
-                        
+
                         detailed_forecast += assemble_hourly_data_table(times, condition_symbols, temps, winds)
                         detailed_forecast += f"```\n\n"
-                    
+
                     debug(f"Detailed forecast: {detailed_forecast}.")
 
                     with open(absolute_path, 'w', encoding='utf-8') as note_file:
@@ -725,12 +701,12 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
             else:
                 err(f"Failed to get day")
                 raise HTTPException(status_code=500, detail="Failed to retrieve weather data")
-            
+
         except HTTPException as e:
             err(f"HTTP error: {e}")
             err(traceback.format_exc())
             raise e
-        
+
         except Exception as e:
             err(f"Error: {e}")
             err(traceback.format_exc())
@@ -740,12 +716,13 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
         err(f"Value error in update_dn_weather: {str(ve)}")
         err(traceback.format_exc())
         raise HTTPException(status_code=400, detail=f"Value error: {str(ve)}")
-    
+
     except Exception as e:
         err(f"Error in update_dn_weather: {str(e)}")
         err(traceback.format_exc())
         raise HTTPException(status_code=500, detail=f"Error in update_dn_weather: {str(e)}")
 
+
 def format_hourly_time(hour):
     try:
         hour_12 = convert_to_12_hour_format(hour.get("datetime"))
@@ -754,21 +731,22 @@ def format_hourly_time(hour):
         err(f"Error in format_hourly_time: {str(e)}")
         err(traceback.format_exc())
         return ""
-    
+
+
 def format_hourly_icon(hour, sunrise, sunset):
     try:
         icon_str = hour.get('icon', '')
         icon, _ = get_icon_and_admonition(icon_str)
-            
+
         precip = hour.get('precip', float(0.0))
         precip_prob = hour.get('precipprob', float(0.0))
         debug(f"precip: {precip}, prob: {precip_prob}")
-        
+
         sp_str = None
 
         if (precip > 0.05 and precip_prob > 25):
             precip_type = hour.get('preciptype', [''])
-            sp_str = f"{str(precip)}mm" 
+            sp_str = f"{str(precip)}mm"
 
         if abs(hour.get('datetime') - sunrise) < timedelta(minutes=60):
             icon = ":LiSunrise:"
@@ -779,17 +757,18 @@ def format_hourly_icon(hour, sunrise, sunset):
         elif hour.get('uvindex') > 8:
             icon = ":LiRadiation:"
             sp_str = f"UV: {hour.get('uvindex', '')}"
-        
+
         formatted = f"{icon}" if icon else ""
         formatted += f" {sp_str}" if sp_str else " "
 
         return formatted
-    
+
     except Exception as e:
         err(f"Error in format_hourly_special: {str(e)}")
         err(traceback.format_exc())
         return ""
 
+
 def format_hourly_temperature(hour):
     try:
         temp_str = f"{hour.get('temp', '')}˚ F"
@@ -798,7 +777,8 @@ def format_hourly_temperature(hour):
         err(f"Error in format_hourly_temperature: {str(e)}")
         err(traceback.format_exc())
         return ""
-    
+
+
 def format_hourly_wind(hour):
     try:
         windspeed = hour.get('windspeed', '')
@@ -823,7 +803,7 @@ def get_icon_and_admonition(icon_str) -> Tuple:
     debug(f"Received request for emoji {icon_str}")
     if icon_str.startswith(":") and icon_str.endswith(":"):
         return icon_str
-    
+
     icon_str = icon_str.lower()
 
     if icon_str == "clear-day":
@@ -876,34 +856,24 @@ def get_icon_and_admonition(icon_str) -> Tuple:
         ad = "ad-thunder"
     else:
         icon = ":LiHelpCircle:"
-        ad = "ad-weather" 
-    
+        ad = "ad-weather"
+
     return icon, ad
 
 def get_weather_emoji(weather_condition):
     condition = weather_condition.lower()
-    if 'clear' in condition or 'sunny' in condition:
-        return "☀️"
-    elif 'cloud' in condition or 'overcast' in condition:
-        return "☁️"
-    elif 'rain' in condition:
-        return "🌧️"
-    elif 'snow' in condition:
-        return "❄️"
-    elif 'thunder' in condition or 'storm' in condition:
-        return "⛈️"
-    elif 'fog' in condition or 'mist' in condition:
-        return "🌫️"
-    elif 'wind' in condition:
-        return "💨"
-    elif 'hail' in condition:
-        return "🌨️"
-    elif 'sleet' in condition:
-        return "🌧️"
-    elif 'partly' in condition:
-        return "⛅"
-    else:
-        return "🌡️"  # Default emoji for unclassified weather
+    if 'clear' in condition or 'sunny' in condition: return "☀️"
+    elif 'cloud' in condition or 'overcast' in condition: return "☁️"
+    elif 'rain' in condition: return "🌧️"
+    elif 'snow' in condition: return "❄️"
+    elif 'thunder' in condition or 'storm' in condition: return "⛈️"
+    elif 'fog' in condition or 'mist' in condition: return "🌫️"
+    elif 'wind' in condition: return "💨"
+    elif 'hail' in condition: return "🌨️"
+    elif 'sleet' in condition: return "🌧️"
+    elif 'partly' in condition: return "⛅"
+    else: return "🌡️"  # Default emoji for unclassified weather
+
 
 async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[str, str]]]]) -> str:
     def remove_characters(s: str) -> str:
@@ -911,7 +881,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
         s = s.strip('\n')
         s = re.sub(r'^_+|_+$', '', s)
         return s
-    
+
     date_str = event_data["date"]
     now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
     events_markdown = []
@@ -940,7 +910,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
 
             else:
                 event_markdown += f"\n - [ ] **{event['start']}—{event['end']}** {markdown_name}"
-                
+
             if event['attendees']:
                 attendee_list = []
                 for att in event['attendees']:
@@ -961,10 +931,10 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
 
                 event_markdown += f"\n     * {description}"
             event_markdown += f"\n "
-   
+
     event_markdown += "\n```\n"
     events_markdown.append(event_markdown)
-    
+
     header = (
         "---\n"
         f"date: {date_str}\n"
@@ -973,23 +943,25 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
         f"updated: {now}\n"
         "---\n"
     )
-    
+
     detailed_events = (
         f"{header}"
         f"{''.join(events_markdown)}"
     )
     return detailed_events
 
+
 @note.get("/note/events", response_class=PlainTextResponse)
 async def note_events_endpoint(date: str = Query(None)):
-        
+
     date_time = await gis.dt(date) if date else await gis.dt(dt_datetime.now())
     response = await update_daily_note_events(date_time)
     return PlainTextResponse(content=response, status_code=200)
 
+
 async def update_daily_note_events(date_time: dt_datetime):
     debug(f"Looking up events on date: {date_time.strftime('%Y-%m-%d')}")
-    try:    
+    try:
         events = await cal.get_events(date_time, date_time)
         debug(f"Raw events: {events}")
         event_data = {
@@ -1009,6 +981,3 @@ async def update_daily_note_events(date_time: dt_datetime):
     except Exception as e:
         err(f"Error processing events: {e}")
         raise HTTPException(status_code=500, detail=str(e))
-
-
-