updated exclusions, and added r2r

This commit is contained in:
sanj 2024-06-25 03:12:07 -07:00
parent ded78ba571
commit bd26ea0b0e
14 changed files with 274 additions and 217 deletions

1
.gitignore vendored
View file

@ -9,6 +9,7 @@ sijapi/data/sd/workflows/private
sijapi/data/*.pbf sijapi/data/*.pbf
sijapi/data/geonames.txt sijapi/data/geonames.txt
sijapi/data/sd/images/ sijapi/data/sd/images/
sijapi/config/*.yaml
sijapi/config/O365/ sijapi/config/O365/
sijapi/local_only/ sijapi/local_only/
sijapi/testbed/ sijapi/testbed/

3
.gitmodules vendored Normal file
View file

@ -0,0 +1,3 @@
[submodule "r2r"]
path = r2r
url = https://github.com/SciPhi-AI/R2R-Dashboard.git

View file

@ -1,5 +1,6 @@
import os import os
import json import json
import yaml
from pathlib import Path from pathlib import Path
import ipaddress import ipaddress
import multiprocessing import multiprocessing
@ -7,9 +8,11 @@ from dotenv import load_dotenv
from dateutil import tz from dateutil import tz
from pathlib import Path from pathlib import Path
from pydantic import BaseModel from pydantic import BaseModel
from typing import List, Optional
import traceback import traceback
import logging import logging
from .logs import Logger from .logs import Logger
from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail
# from sijapi.config.config import load_config # from sijapi.config.config import load_config
# cfg = load_config() # cfg = load_config()
@ -116,6 +119,7 @@ SUMMARY_INSTRUCT_TTS = os.getenv('SUMMARY_INSTRUCT_TTS', "You are an AI assistan
DEFAULT_LLM = os.getenv("DEFAULT_LLM", "dolphin-mistral") DEFAULT_LLM = os.getenv("DEFAULT_LLM", "dolphin-mistral")
DEFAULT_VISION = os.getenv("DEFAULT_VISION", "llava") DEFAULT_VISION = os.getenv("DEFAULT_VISION", "llava")
DEFAULT_VOICE = os.getenv("DEFAULT_VOICE", "Luna") DEFAULT_VOICE = os.getenv("DEFAULT_VOICE", "Luna")
DEFAULT_11L_VOICE = os.getenv("DEFAULT_11L_VOICE", "Victoria")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
### Stable diffusion ### Stable diffusion
@ -164,29 +168,15 @@ MS365_TOGGLE = True if os.getenv("MS365_TOGGLE") == "True" else False
ICAL_TOGGLE = True if os.getenv("ICAL_TOGGLE") == "True" else False ICAL_TOGGLE = True if os.getenv("ICAL_TOGGLE") == "True" else False
ICS_PATH = DATA_DIR / 'calendar.ics' # deprecated now, but maybe revive? ICS_PATH = DATA_DIR / 'calendar.ics' # deprecated now, but maybe revive?
ICALENDARS = os.getenv('ICALENDARS', 'NULL,VOID').split(',') ICALENDARS = os.getenv('ICALENDARS', 'NULL,VOID').split(',')
class IMAP_DETAILS(BaseModel):
email: str
password: str
host: str
imap_port: int
smtp_port: int
imap_encryption: str = None
smtp_encryption: str = None
IMAP = IMAP_DETAILS( def load_email_accounts(yaml_path: str) -> List[EmailAccount]:
email = os.getenv('IMAP_EMAIL'), with open(yaml_path, 'r') as file:
password = os.getenv('IMAP_PASSWORD'), config = yaml.safe_load(file)
host = os.getenv('IMAP_HOST', '127.0.0.1'), return [EmailAccount(**account) for account in config['accounts']]
imap_port = int(os.getenv('IMAP_PORT', 1143)),
smtp_port = int(os.getenv('SMTP_PORT', 469)), EMAIL_CONFIG = CONFIG_DIR / "email.yaml"
imap_encryption = os.getenv('IMAP_ENCRYPTION', None), EMAIL_ACCOUNTS = load_email_accounts(EMAIL_CONFIG)
smtp_encryption = os.getenv('SMTP_ENCRYPTION', None) AUTORESPOND = True
)
AUTORESPONSE_WHITELIST = os.getenv('AUTORESPONSE_WHITELIST', '').split(',')
AUTORESPONSE_BLACKLIST = os.getenv('AUTORESPONSE_BLACKLIST', '').split(',')
AUTORESPONSE_BLACKLIST.extend(["no-reply@", "noreply@", "@uscourts.gov", "@doi.gov"])
AUTORESPONSE_CONTEXT = os.getenv('AUTORESPONSE_CONTEXT', None)
AUTORESPOND = AUTORESPONSE_CONTEXT != None
### Courtlistener & other webhooks ### Courtlistener & other webhooks
COURTLISTENER_DOCKETS_DIR = DATA_DIR / "courtlistener" / "dockets" COURTLISTENER_DOCKETS_DIR = DATA_DIR / "courtlistener" / "dockets"

View file

@ -20,7 +20,7 @@ from datetime import datetime
import argparse import argparse
from . import LOGGER, LOGS_DIR, OBSIDIAN_VAULT_DIR from . import LOGGER, LOGS_DIR, OBSIDIAN_VAULT_DIR
from .logs import Logger from .logs import Logger
from .utilities import fix_nextcloud_filenames from .utilities import list_and_correct_impermissible_files
parser = argparse.ArgumentParser(description='Personal API.') parser = argparse.ArgumentParser(description='Personal API.')
parser.add_argument('--debug', action='store_true', help='Set log level to INFO') parser.add_argument('--debug', action='store_true', help='Set log level to INFO')
@ -139,7 +139,7 @@ def main(argv):
load_router(router_name) load_router(router_name)
journal = OBSIDIAN_VAULT_DIR / "journal" journal = OBSIDIAN_VAULT_DIR / "journal"
fix_nextcloud_filenames(journal, rename=True) list_and_correct_impermissible_files(journal, rename=True)
config = Config() config = Config()
config.keep_alive_timeout = 1200 config.keep_alive_timeout = 1200
config.bind = [HOST] config.bind = [HOST]

45
sijapi/classes.py Normal file
View file

@ -0,0 +1,45 @@
from pydantic import BaseModel
from typing import List, Optional, Any
from datetime import datetime
class AutoResponder(BaseModel):
name: str
style: str
context: str
whitelist: List[str]
blacklist: List[str]
img_gen_prompt: Optional[str] = None
class IMAPConfig(BaseModel):
username: str
password: str
host: str
port: int
encryption: str = None
class SMTPConfig(BaseModel):
username: str
password: str
host: str
port: int
encryption: str = None
class EmailAccount(BaseModel):
name: str
fullname: Optional[str]
bio: Optional[str]
imap: IMAPConfig
smtp: SMTPConfig
autoresponders: Optional[List[AutoResponder]]
class EmailContact(BaseModel):
email: str
name: str
class IncomingEmail(BaseModel):
sender: str
recipients: List[EmailContact]
datetime_received: datetime
subject: str
body: str
attachments: Optional[List[Any]] = None

View file

@ -17,7 +17,7 @@ from datetime import datetime, timedelta
from Foundation import NSDate, NSRunLoop from Foundation import NSDate, NSRunLoop
import EventKit as EK import EventKit as EK
from sijapi import ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH from sijapi import ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
from sijapi.utilities import localize_dt, localize_dt from sijapi.utilities import localize_datetime, localize_datetime
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL
calendar = APIRouter() calendar = APIRouter()
@ -215,8 +215,8 @@ def datetime_to_nsdate(dt: datetime) -> NSDate:
@calendar.get("/events") @calendar.get("/events")
async def get_events_endpoint(start_date: str, end_date: str): async def get_events_endpoint(start_date: str, end_date: str):
start_dt = localize_dt(start_date) start_dt = localize_datetime(start_date)
end_dt = localize_dt(end_date) end_dt = localize_datetime(end_date)
datetime.strptime(start_date, "%Y-%m-%d") or datetime.now() datetime.strptime(start_date, "%Y-%m-%d") or datetime.now()
end_dt = datetime.strptime(end_date, "%Y-%m-%d") or datetime.now() end_dt = datetime.strptime(end_date, "%Y-%m-%d") or datetime.now()
response = await get_events(start_dt, end_dt) response = await get_events(start_dt, end_dt)
@ -342,8 +342,8 @@ async def get_ms365_events(start_date: datetime, end_date: datetime):
async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]): async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]):
range_start = localize_dt(range_start) range_start = localize_datetime(range_start)
range_end = localize_dt(range_end) range_end = localize_datetime(range_end)
event_list = [] event_list = []
for event in events: for event in events:
@ -362,13 +362,13 @@ async def parse_calendar_for_day(range_start: datetime, range_end: datetime, eve
INFO(f"End date string not a dict") INFO(f"End date string not a dict")
try: try:
start_date = localize_dt(start_str) if start_str else None start_date = localize_datetime(start_str) if start_str else None
except (ValueError, TypeError) as e: except (ValueError, TypeError) as e:
ERR(f"Invalid start date format: {start_str}, error: {e}") ERR(f"Invalid start date format: {start_str}, error: {e}")
continue continue
try: try:
end_date = localize_dt(end_str) if end_str else None end_date = localize_datetime(end_str) if end_str else None
except (ValueError, TypeError) as e: except (ValueError, TypeError) as e:
ERR(f"Invalid end date format: {end_str}, error: {e}") ERR(f"Invalid end date format: {end_str}, error: {e}")
continue continue
@ -377,13 +377,13 @@ async def parse_calendar_for_day(range_start: datetime, range_end: datetime, eve
if start_date: if start_date:
# Ensure start_date is timezone-aware # Ensure start_date is timezone-aware
start_date = localize_dt(start_date) start_date = localize_datetime(start_date)
# If end_date is not provided, assume it's the same as start_date # If end_date is not provided, assume it's the same as start_date
if not end_date: if not end_date:
end_date = start_date end_date = start_date
else: else:
end_date = localize_dt(end_date) end_date = localize_datetime(end_date)
# Check if the event overlaps with the given range # Check if the event overlaps with the given range
if (start_date < range_end) and (end_date > range_start): if (start_date < range_end) and (end_date > range_start):

View file

@ -1,6 +1,5 @@
''' '''
IN DEVELOPMENT Email module. Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming emails that match certain criteria and save the Text-To-Speech converted summaries into a specified "podcast" folder. Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming emails that match certain criteria and save the Text-To-Speech converted summaries into a specified "podcast" folder.
UNIMPLEMENTED: AI auto-responder.
''' '''
from fastapi import APIRouter from fastapi import APIRouter
import asyncio import asyncio
@ -15,36 +14,87 @@ import ssl
from smtplib import SMTP_SSL from smtplib import SMTP_SSL
from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from datetime import datetime as dt_datetime from datetime import datetime as dt_datetime
from pydantic import BaseModel from pydantic import BaseModel
from typing import List, Optional, Any from typing import List, Optional, Any
import yaml
from typing import List, Dict, Optional
from pydantic import BaseModel
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL
from sijapi import HOME_DIR, DATA_DIR, OBSIDIAN_VAULT_DIR, PODCAST_DIR, IMAP, OBSIDIAN_JOURNAL_DIR, DEFAULT_VOICE, AUTORESPONSE_BLACKLIST, AUTORESPONSE_WHITELIST, AUTORESPONSE_CONTEXT, USER_FULLNAME, USER_BIO, AUTORESPOND, TZ from sijapi import PODCAST_DIR, DEFAULT_VOICE, TZ, EMAIL_ACCOUNTS, EmailAccount, IMAPConfig, SMTPConfig
from sijapi.routers import summarize, tts, llm from sijapi.routers import summarize, tts, llm, sd
from sijapi.utilities import clean_text, assemble_journal_path, localize_dt, extract_text, prefix_lines from sijapi.utilities import clean_text, assemble_journal_path, localize_datetime, extract_text, prefix_lines
from sijapi.classes import EmailAccount, IncomingEmail, EmailContact
email = APIRouter(tags=["private"]) email = APIRouter(tags=["private"])
def get_account_by_email(email: str) -> Optional[EmailAccount]:
for account in EMAIL_ACCOUNTS:
if account.imap.username.lower() == email.lower():
return account
return None
class Contact(BaseModel): def get_imap_details(email: str) -> Optional[IMAPConfig]:
email: str account = get_account_by_email(email)
name: str return account.imap if account else None
class EmailModel(BaseModel):
sender: str def get_smtp_details(email: str) -> Optional[SMTPConfig]:
recipients: List[Contact] account = get_account_by_email(email)
datetime_received: dt_datetime return account.smtp if account else None
subject: str
body: str
attachments: Optional[List[Any]] = None def get_imap_connection(account: EmailAccount):
return Imbox(account.imap.host,
username=account.imap.username,
password=account.imap.password,
port=account.imap.port,
ssl=account.imap.encryption == 'SSL',
starttls=account.imap.encryption == 'STARTTLS')
def get_matching_autoresponders(email: IncomingEmail, account: EmailAccount) -> List[Dict]:
matching_profiles = []
def matches_list(item: str, email: IncomingEmail) -> bool:
if '@' in item:
return item in email.sender
else:
return item.lower() in email.subject.lower() or item.lower() in email.body.lower()
for profile in account.autoresponders:
whitelist_match = not profile.whitelist or any(matches_list(item, email) for item in profile.whitelist)
blacklist_match = any(matches_list(item, email) for item in profile.blacklist)
if whitelist_match and not blacklist_match:
matching_profiles.append({
'USER_FULLNAME': account.fullname,
'RESPONSE_STYLE': profile.style,
'AUTORESPONSE_CONTEXT': profile.context,
'IMG_GEN_PROMPT': profile.img_gen_prompt,
'USER_BIO': account.bio
})
return matching_profiles
async def generate_auto_response_body(e: IncomingEmail, profile: Dict) -> str:
age = dt_datetime.now(TZ) - e.datetime_received
prompt = f'''
Please generate a personalized auto-response to the following email. The email is from {e.sender} and was sent {age} ago with the subject line "{e.subject}." You are auto-responding on behalf of {profile['USER_FULLNAME']}, who is described by the following short bio (strictly for your context -- do not recite this in the response): "{profile['USER_BIO']}." {profile['USER_FULLNAME']} is unable to respond personally, because {profile['AUTORESPONSE_CONTEXT']}. Everything from here to ~~//END//~~ is the email body.
{e.body}
~~//END//~~
Keep your auto-response {profile['RESPONSE_STYLE']} and to the point, but do aim to make it responsive specifically to the sender's inquiry.
'''
try:
response = await llm.query_ollama(prompt, 400)
return response
except Exception as e:
ERR(f"Error generating auto-response: {str(e)}")
return "Thank you for your email. Unfortunately, an error occurred while generating the auto-response. We apologize for any inconvenience."
def imap_conn():
return Imbox(IMAP.host,
username=IMAP.email,
password=IMAP.password,
port=IMAP.imap_port,
ssl=IMAP.imap_encryption == 'SSL',
starttls=IMAP.imap_encryption == 'STARTTLS')
def clean_email_content(html_content): def clean_email_content(html_content):
@ -73,68 +123,76 @@ async def extract_attachments(attachments) -> List[str]:
return attachment_texts return attachment_texts
async def process_unread_emails(auto_respond: bool = AUTORESPOND, summarize_emails: bool = True, podcast: bool = True): async def process_unread_emails(summarize_emails: bool = True, podcast: bool = True):
while True: while True:
try: for account in EMAIL_ACCOUNTS:
with imap_conn() as inbox: DEBUG(f"Connecting to {account.name} to check for unread emails...")
unread_messages = inbox.messages(unread=True) try:
for uid, message in unread_messages: with get_imap_connection(account) as inbox:
recipients = [Contact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to] DEBUG(f"Connected to {account.name}, checking for unread emails now...")
this_email = EmailModel( unread_messages = inbox.messages(unread=True)
sender=message.sent_from[0]['email'], for uid, message in unread_messages:
datetime_received=localize_dt(message.date), recipients = [EmailContact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to]
recipients=recipients, this_email = IncomingEmail(
subject=message.subject, sender=message.sent_from[0]['email'],
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "", datetime_received=localize_datetime(message.date),
attachments=message.attachments recipients=recipients,
) subject=message.subject,
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
attachments=message.attachments
)
DEBUG(f"\n\nProcessing email: {this_email.subject}\n\n") DEBUG(f"\n\nProcessing email for account {account.name}: {this_email.subject}\n\n")
md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
if summarize_emails:
email_content = f'At {this_email.datetime_received}, {this_email.sender} sent an email with the subject line "{this_email.subject}". The email in its entirety reads: \n\n{this_email.body}\n"'
if this_email.attachments:
attachment_texts = await extract_attachments(this_email.attachments)
email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
summary = await summarize.summarize_text(email_content) md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = podcast, output_path = tts_path) tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
if summarize_emails:
if podcast: email_content = f'At {this_email.datetime_received}, {this_email.sender} sent an email with the subject line "{this_email.subject}". The email in its entirety reads: \n\n{this_email.body}\n"'
if PODCAST_DIR.exists(): if this_email.attachments:
tts.copy_to_podcast_dir(tts_path) attachment_texts = await extract_attachments(this_email.attachments)
else: email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
ERR(f"PODCAST_DIR does not exist: {PODCAST_DIR}")
save_email_as_markdown(this_email, summary, md_path, tts_relative) summary = await summarize.summarize_text(email_content)
else: await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = podcast, output_path = tts_path)
save_email_as_markdown(this_email, None, md_path, None)
if podcast:
if PODCAST_DIR.exists():
tts.copy_to_podcast_dir(tts_path)
else:
ERR(f"PODCAST_DIR does not exist: {PODCAST_DIR}")
if auto_respond and should_auto_respond(this_email): save_email_as_markdown(this_email, summary, md_path, tts_relative)
DEBUG(f"Auto-responding to {this_email.subject}") DEBUG(f"Email '{this_email.subject}' saved to {md_relative}.")
auto_response_subject = 'Auto-Response Re:' + this_email.subject else:
auto_response_body = await generate_auto_response_body(this_email) save_email_as_markdown(this_email, None, md_path, None)
DEBUG(f"Auto-response: {auto_response_body}")
await send_auto_response(this_email.sender, auto_response_subject, auto_response_body)
inbox.mark_seen(uid)
await asyncio.sleep(30) matching_profiles = get_matching_autoresponders(this_email, account)
except Exception as e:
ERR(f"An error occurred: {e}") for profile in matching_profiles:
await asyncio.sleep(30) DEBUG(f"Auto-responding to {this_email.subject} with profile: {profile['USER_FULLNAME']}")
auto_response_subject = f"Auto-Response Re: {this_email.subject}"
auto_response_body = await generate_auto_response_body(this_email, profile)
DEBUG(f"Auto-response: {auto_response_body}")
await send_auto_response(this_email.sender, auto_response_subject, auto_response_body, profile, account)
inbox.mark_seen(uid)
await asyncio.sleep(30)
except Exception as e:
ERR(f"An error occurred for account {account.name}: {e}")
await asyncio.sleep(30)
def save_email_as_markdown(email: EmailModel, summary: str, md_path: Path, tts_path: Path):
def save_email_as_markdown(email: IncomingEmail, summary: str, md_path: Path, tts_path: Path):
''' '''
Saves an email as a markdown file in the specified directory. Saves an email as a markdown file in the specified directory.
Args: Args:
email (EmailModel): The email object containing email details. email (IncomingEmail): The email object containing email details.
summary (str): The summary of the email. summary (str): The summary of the email.
tts_path (str): The path to the text-to-speech audio file. tts_path (str): The path to the text-to-speech audio file.
''' '''
DEBUG(f"Saving email to {md_path}...")
# Sanitize filename to avoid issues with filesystems # Sanitize filename to avoid issues with filesystems
filename = f"{email.datetime_received.strftime('%Y%m%d%H%M%S')}_{email.subject.replace('/', '-')}.md".replace(':', '-').replace(' ', '_') filename = f"{email.datetime_received.strftime('%Y%m%d%H%M%S')}_{email.subject.replace('/', '-')}.md".replace(':', '-').replace(' ', '_')
@ -175,79 +233,45 @@ tags:
DEBUG(f"Saved markdown to {md_path}") DEBUG(f"Saved markdown to {md_path}")
AUTORESPONSE_SYS = "You are a helpful AI assistant that generates personalized auto-response messages to incoming emails." async def send_auto_response(to_email, subject, body, profile, account):
DEBUG(f"Sending auto response to {to_email}...")
async def generate_auto_response_body(e: EmailModel, response_style: str = "professional") -> str:
age = dt_datetime.now(TZ) - e.datetime_received
prompt = f'''
Please generate a personalized auto-response to the following email. The email is from {e.sender} and was sent {age} ago with the subject line "{e.subject}." You are auto-responding on behalf of {USER_FULLNAME}, who is described by the following short bio (strictly for your context -- do not recite this in the response): "{USER_BIO}." {USER_FULLNAME} is unable to respond himself, because {AUTORESPONSE_CONTEXT}. Everything from here to ~~//END//~~ is the email body.
{e.body}
~~//END//~~
Keep your auto-response {response_style} and to the point, but do aim to make it responsive specifically to the sender's inquiry.
'''
try:
response = await llm.query_ollama(prompt, AUTORESPONSE_SYS, 400)
return response
except Exception as e:
ERR(f"Error generating auto-response: {str(e)}")
return "Thank you for your email. Unfortunately, an error occurred while generating the auto-response. We apologize for any inconvenience."
async def send_auto_response(to_email, subject, body):
try: try:
message = MIMEMultipart() message = MIMEMultipart()
message['From'] = IMAP.email # smtp_username message['From'] = account.smtp.username
message['To'] = to_email message['To'] = to_email
message['Subject'] = subject message['Subject'] = subject
message.attach(MIMEText(body, 'plain')) message.attach(MIMEText(body, 'plain'))
# DEBUG(f"Attempting to send auto_response to {to_email} concerning {subject}. We will use {IMAP.host}:{IMAP.smtp_port}, un: {IMAP.email}, pw: {IMAP.password}") if profile['IMG_GEN_PROMPT']:
jpg_path = sd.workflow(profile['IMG_GEN_PROMPT'], earlyout=False, downscale_to_fit=True)
if jpg_path and os.path.exists(jpg_path):
with open(jpg_path, 'rb') as img_file:
img = MIMEImage(img_file.read(), name=os.path.basename(jpg_path))
message.attach(img)
try: context = ssl._create_unverified_context()
DEBUG(f"Initiating attempt to send auto-response via SMTP at {IMAP.host}:{IMAP.smtp_port}...") with SMTP_SSL(account.smtp.host, account.smtp.port, context=context) as server:
context = ssl._create_unverified_context() server.login(account.smtp.username, account.smtp.password)
server.send_message(message)
with SMTP_SSL(IMAP.host, IMAP.smtp_port, context=context) as server: INFO(f"Auto-response sent to {to_email} concerning {subject} from account {account.name}")
server.login(IMAP.email, IMAP.password)
DEBUG(f"Successfully logged in to {IMAP.host} at {IMAP.smtp_port} as {IMAP.email}. Attempting to send email now.")
server.send_message(message)
INFO(f"Auto-response sent to {to_email} concerning {subject}")
except Exception as e:
ERR(f"Failed to send auto-response email to {to_email}: {e}")
raise e
except Exception as e: except Exception as e:
ERR(f"Error in preparing/sending auto-response: {e}") ERR(f"Error in preparing/sending auto-response from account {account.name}: {e}")
raise e raise e
def should_auto_respond(email: EmailModel) -> bool:
def matches_list(item: str, email: EmailModel) -> bool:
if '@' in item:
if item in email.sender:
return True
else:
if item.lower() in email.subject.lower() or item.lower() in email.body.lower():
return True
return False
if AUTORESPONSE_WHITELIST:
for item in AUTORESPONSE_WHITELIST:
if matches_list(item, email):
if AUTORESPONSE_BLACKLIST:
for blacklist_item in AUTORESPONSE_BLACKLIST:
if matches_list(blacklist_item, email):
return False
return True
return False
else:
if AUTORESPONSE_BLACKLIST:
for item in AUTORESPONSE_BLACKLIST:
if matches_list(item, email):
return False
return True
@email.on_event("startup") @email.on_event("startup")
async def startup_event(): async def startup_event():
asyncio.create_task(process_unread_emails()) asyncio.create_task(process_unread_emails())
####

View file

@ -17,7 +17,7 @@ from typing import Optional, Any, Dict, List, Union
from datetime import datetime, timedelta, time from datetime import datetime, timedelta, time
from sijapi import LOCATION_OVERRIDES, TZ from sijapi import LOCATION_OVERRIDES, TZ
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL
from sijapi.utilities import get_db_connection, haversine, localize_dt from sijapi.utilities import get_db_connection, haversine, localize_datetime
# from osgeo import gdal # from osgeo import gdal
# import elevation # import elevation
@ -228,12 +228,12 @@ def get_elevation(latitude, longitude):
async def fetch_locations(start: datetime, end: datetime = None) -> List[Location]: async def fetch_locations(start: datetime, end: datetime = None) -> List[Location]:
start_datetime = localize_dt(start) start_datetime = localize_datetime(start)
if end is None: if end is None:
end_datetime = localize_dt(start_datetime.replace(hour=23, minute=59, second=59)) end_datetime = localize_datetime(start_datetime.replace(hour=23, minute=59, second=59))
else: else:
end_datetime = localize_dt(end) end_datetime = localize_datetime(end)
if start_datetime.time() == datetime.min.time() and end.time() == datetime.min.time(): if start_datetime.time() == datetime.min.time() and end.time() == datetime.min.time():
end_datetime = end_datetime.replace(hour=23, minute=59, second=59) end_datetime = end_datetime.replace(hour=23, minute=59, second=59)
@ -305,7 +305,7 @@ async def fetch_locations(start: datetime, end: datetime = None) -> List[Locatio
# Function to fetch the last location before the specified datetime # Function to fetch the last location before the specified datetime
async def fetch_last_location_before(datetime: datetime) -> Optional[Location]: async def fetch_last_location_before(datetime: datetime) -> Optional[Location]:
datetime = localize_dt(datetime) datetime = localize_datetime(datetime)
DEBUG(f"Fetching last location before {datetime}") DEBUG(f"Fetching last location before {datetime}")
conn = await get_db_connection() conn = await get_db_connection()
@ -337,8 +337,8 @@ async def fetch_last_location_before(datetime: datetime) -> Optional[Location]:
@locate.get("/map/start_date={start_date_str}&end_date={end_date_str}", response_class=HTMLResponse) @locate.get("/map/start_date={start_date_str}&end_date={end_date_str}", response_class=HTMLResponse)
async def generate_map_endpoint(start_date_str: str, end_date_str: str): async def generate_map_endpoint(start_date_str: str, end_date_str: str):
try: try:
start_date = localize_dt(start_date_str) start_date = localize_datetime(start_date_str)
end_date = localize_dt(end_date_str) end_date = localize_datetime(end_date_str)
except ValueError: except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format") raise HTTPException(status_code=400, detail="Invalid date format")
@ -349,8 +349,8 @@ async def generate_map_endpoint(start_date_str: str, end_date_str: str):
@locate.get("/map", response_class=HTMLResponse) @locate.get("/map", response_class=HTMLResponse)
async def generate_alltime_map_endpoint(): async def generate_alltime_map_endpoint():
try: try:
start_date = localize_dt(datetime.fromisoformat("2022-01-01")) start_date = localize_datetime(datetime.fromisoformat("2022-01-01"))
end_date = localize_dt(datetime.now()) end_date = localize_datetime(datetime.now())
except ValueError: except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format") raise HTTPException(status_code=400, detail="Invalid date format")
@ -397,7 +397,7 @@ async def post_location(location: Location):
device_os = context.get('device_os', 'Unknown') device_os = context.get('device_os', 'Unknown')
# Parse and localize the datetime # Parse and localize the datetime
localized_datetime = localize_dt(location.datetime) localized_datetime = localize_datetime(location.datetime)
await conn.execute(''' await conn.execute('''
INSERT INTO locations (datetime, location, city, state, zip, street, action, device_type, device_model, device_name, device_os) INSERT INTO locations (datetime, location, city, state, zip, street, action, device_type, device_model, device_name, device_os)
@ -452,7 +452,7 @@ async def post_locate_endpoint(locations: Union[Location, List[Location]]):
DEBUG(f"datetime before localization: {location.datetime}") DEBUG(f"datetime before localization: {location.datetime}")
# Convert datetime string to timezone-aware datetime object # Convert datetime string to timezone-aware datetime object
location.datetime = localize_dt(location.datetime) location.datetime = localize_datetime(location.datetime)
DEBUG(f"datetime after localization: {location.datetime}") DEBUG(f"datetime after localization: {location.datetime}")
location_entry = await post_location(location) location_entry = await post_location(location)
@ -484,7 +484,7 @@ async def get_last_location() -> JSONResponse:
@locate.get("/locate/{datetime_str}", response_model=List[Location]) @locate.get("/locate/{datetime_str}", response_model=List[Location])
async def get_locate(datetime_str: str, all: bool = False): async def get_locate(datetime_str: str, all: bool = False):
try: try:
date_time = localize_dt(datetime_str) date_time = localize_datetime(datetime_str)
except ValueError as e: except ValueError as e:
ERR(f"Invalid datetime string provided: {datetime_str}") ERR(f"Invalid datetime string provided: {datetime_str}")
return ["ERROR: INVALID DATETIME PROVIDED. USE YYYYMMDDHHmmss or YYYYMMDD format."] return ["ERROR: INVALID DATETIME PROVIDED. USE YYYYMMDDHHmmss or YYYYMMDD format."]

View file

@ -17,12 +17,12 @@ from requests.adapters import HTTPAdapter
import re import re
import os import os
from datetime import timedelta, datetime, time as dt_time, date as dt_date from datetime import timedelta, datetime, time as dt_time, date as dt_date
from sijapi.utilities import localize_dt from sijapi.utilities import localize_datetime
from fastapi import HTTPException, status from fastapi import HTTPException, status
from pathlib import Path from pathlib import Path
from fastapi import APIRouter, Query, HTTPException from fastapi import APIRouter, Query, HTTPException
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL, INFO from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL, INFO
from sijapi import YEAR_FMT, MONTH_FMT, DAY_FMT, DAY_SHORT_FMT, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_VOICE, TZ from sijapi import YEAR_FMT, MONTH_FMT, DAY_FMT, DAY_SHORT_FMT, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, TZ
from sijapi.routers import tts, time, sd, locate, weather, asr, calendar, summarize from sijapi.routers import tts, time, sd, locate, weather, asr, calendar, summarize
from sijapi.routers.locate import Location from sijapi.routers.locate import Location
from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, HOURLY_COLUMNS_MAPPING from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, HOURLY_COLUMNS_MAPPING
@ -39,7 +39,7 @@ async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
results = [] results = []
current_date = start_date current_date = start_date
while current_date <= end_date: while current_date <= end_date:
formatted_date = localize_dt(current_date) formatted_date = localize_datetime(current_date)
result = await build_daily_note(formatted_date) result = await build_daily_note(formatted_date)
results.append(result) results.append(result)
current_date += timedelta(days=1) current_date += timedelta(days=1)
@ -134,7 +134,7 @@ async def clip_post(
source: Optional[str] = Form(None), source: Optional[str] = Form(None),
title: Optional[str] = Form(None), title: Optional[str] = Form(None),
tts: str = Form('summary'), tts: str = Form('summary'),
voice: str = Form('Luna'), voice: str = Form(DEFAULT_VOICE),
encoding: str = Form('utf-8') encoding: str = Form('utf-8')
): ):
markdown_filename = await process_article(background_tasks, url, title, encoding, source, tts, voice) markdown_filename = await process_article(background_tasks, url, title, encoding, source, tts, voice)
@ -159,7 +159,7 @@ async def clip_get(
title: Optional[str] = Query(None), title: Optional[str] = Query(None),
encoding: str = Query('utf-8'), encoding: str = Query('utf-8'),
tts: str = Query('summary'), tts: str = Query('summary'),
voice: str = Query('Luna') voice: str = Query(DEFAULT_VOICE)
): ):
markdown_filename = await process_article(background_tasks, url, title, encoding, tts=tts, voice=voice) markdown_filename = await process_article(background_tasks, url, title, encoding, tts=tts, voice=voice)
return {"message": "Clip saved successfully", "markdown_filename": markdown_filename} return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
@ -337,7 +337,7 @@ async def process_article(
encoding: str = 'utf-8', encoding: str = 'utf-8',
source: Optional[str] = None, source: Optional[str] = None,
tts_mode: str = "summary", tts_mode: str = "summary",
voice: str = DEFAULT_VOICE voice: str = DEFAULT_11L_VOICE
): ):
timestamp = datetime.now().strftime('%b %d, %Y at %H:%M') timestamp = datetime.now().strftime('%b %d, %Y at %H:%M')
@ -442,7 +442,7 @@ def parse_article(url: str, source: Optional[str] = None):
title = np3k.title or traf.title title = np3k.title or traf.title
authors = np3k.authors or traf.author authors = np3k.authors or traf.author
authors = authors if isinstance(authors, List) else [authors] authors = authors if isinstance(authors, List) else [authors]
date = np3k.publish_date or localize_dt(traf.date) date = np3k.publish_date or localize_datetime(traf.date)
excerpt = np3k.meta_description or traf.description excerpt = np3k.meta_description or traf.description
content = trafilatura.extract(source, output_format="markdown", include_comments=False) or np3k.text content = trafilatura.extract(source, output_format="markdown", include_comments=False) or np3k.text
image = np3k.top_image or traf.image image = np3k.top_image or traf.image
@ -635,7 +635,7 @@ async def banner_endpoint(dt: str, location: str = None, mood: str = None, other
Endpoint (POST) that generates a new banner image for the Obsidian daily note for a specified date, taking into account optional additional information, then updates the frontmatter if necessary. Endpoint (POST) that generates a new banner image for the Obsidian daily note for a specified date, taking into account optional additional information, then updates the frontmatter if necessary.
''' '''
DEBUG(f"banner_endpoint requested with date: {dt} ({type(dt)})") DEBUG(f"banner_endpoint requested with date: {dt} ({type(dt)})")
date_time = localize_dt(dt) date_time = localize_datetime(dt)
DEBUG(f"date_time after localization: {date_time} ({type(date_time)})") DEBUG(f"date_time after localization: {date_time} ({type(date_time)})")
jpg_path = await generate_banner(date_time, location, mood=mood, other_context=other_context) jpg_path = await generate_banner(date_time, location, mood=mood, other_context=other_context)
return jpg_path return jpg_path
@ -643,7 +643,7 @@ async def banner_endpoint(dt: str, location: str = None, mood: str = None, other
async def generate_banner(dt, location: Location = None, forecast: str = None, mood: str = None, other_context: str = None): async def generate_banner(dt, location: Location = None, forecast: str = None, mood: str = None, other_context: str = None):
DEBUG(f"Location: {location}, forecast: {forecast}, mood: {mood}, other_context: {other_context}") DEBUG(f"Location: {location}, forecast: {forecast}, mood: {mood}, other_context: {other_context}")
date_time = localize_dt(dt) date_time = localize_datetime(dt)
DEBUG(f"generate_banner called with date_time: {date_time}") DEBUG(f"generate_banner called with date_time: {date_time}")
destination_path, local_path = assemble_journal_path(date_time, filename="Banner", extension=".jpg", no_timestamp = True) destination_path, local_path = assemble_journal_path(date_time, filename="Banner", extension=".jpg", no_timestamp = True)
DEBUG(f"destination path generated: {destination_path}") DEBUG(f"destination path generated: {destination_path}")
@ -699,7 +699,7 @@ async def note_weather_get(
): ):
try: try:
date_time = datetime.now() if date == "0" else localize_dt(date) date_time = datetime.now() if date == "0" else localize_datetime(date)
DEBUG(f"date: {date} .. date_time: {date_time}") DEBUG(f"date: {date} .. date_time: {date_time}")
content = await update_dn_weather(date_time) #, lat, lon) content = await update_dn_weather(date_time) #, lat, lon)
return JSONResponse(content={"forecast": content}, status_code=200) return JSONResponse(content={"forecast": content}, status_code=200)
@ -714,7 +714,7 @@ async def note_weather_get(
@note.post("/update/note/{date}") @note.post("/update/note/{date}")
async def post_update_daily_weather_and_calendar_and_timeslips(date: str) -> PlainTextResponse: async def post_update_daily_weather_and_calendar_and_timeslips(date: str) -> PlainTextResponse:
date_time = localize_dt(date) date_time = localize_datetime(date)
await update_dn_weather(date_time) await update_dn_weather(date_time)
await update_daily_note_events(date_time) await update_daily_note_events(date_time)
await build_daily_timeslips(date_time) await build_daily_timeslips(date_time)
@ -1117,7 +1117,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
@note.get("/note/events", response_class=PlainTextResponse) @note.get("/note/events", response_class=PlainTextResponse)
async def note_events_endpoint(date: str = Query(None)): async def note_events_endpoint(date: str = Query(None)):
date_time = localize_dt(date) if date else datetime.now(TZ) date_time = localize_datetime(date) if date else datetime.now(TZ)
response = await update_daily_note_events(date_time) response = await update_daily_note_events(date_time)
return PlainTextResponse(content=response, status_code=200) return PlainTextResponse(content=response, status_code=200)

View file

@ -14,7 +14,7 @@ from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support import expected_conditions as EC
from pathlib import Path from pathlib import Path
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL
from sijapi.utilities import bool_convert, sanitize_filename, assemble_journal_path, localize_dt from sijapi.utilities import bool_convert, sanitize_filename, assemble_journal_path, localize_datetime
from sijapi import DATA_DIR, SD_IMAGE_DIR, PUBLIC_KEY, OBSIDIAN_VAULT_DIR from sijapi import DATA_DIR, SD_IMAGE_DIR, PUBLIC_KEY, OBSIDIAN_VAULT_DIR
serve = APIRouter(tags=["public"]) serve = APIRouter(tags=["public"])
@ -50,7 +50,7 @@ def is_valid_date(date_str: str) -> bool:
@serve.get("/notes/{file_path:path}") @serve.get("/notes/{file_path:path}")
async def get_file(file_path: str): async def get_file(file_path: str):
try: try:
date_time = localize_dt(file_path); date_time = localize_datetime(file_path);
absolute_path, local_path = assemble_journal_path(date_time, no_timestamp = True) absolute_path, local_path = assemble_journal_path(date_time, no_timestamp = True)
except ValueError as e: except ValueError as e:
DEBUG(f"Unable to parse {file_path} as a date, now trying to use it as a local path") DEBUG(f"Unable to parse {file_path} as a date, now trying to use it as a local path")

View file

@ -17,7 +17,7 @@ from fastapi import APIRouter, UploadFile, File, Response, Header, Query, Depend
from fastapi.responses import FileResponse, JSONResponse from fastapi.responses import FileResponse, JSONResponse
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from datetime import datetime, timedelta from datetime import datetime, timedelta
from sijapi.utilities import localize_dt from sijapi.utilities import localize_datetime
from decimal import Decimal, ROUND_UP from decimal import Decimal, ROUND_UP
from typing import Optional, List, Dict, Union, Tuple from typing import Optional, List, Dict, Union, Tuple
from collections import defaultdict from collections import defaultdict
@ -100,8 +100,8 @@ def truncate_project_title(title):
async def fetch_and_prepare_timing_data(start: datetime, end: Optional[datetime] = None) -> List[Dict]: async def fetch_and_prepare_timing_data(start: datetime, end: Optional[datetime] = None) -> List[Dict]:
# start_date = localize_dt(start) # start_date = localize_datetime(start)
# end_date = localize_dt(end) if end else None # end_date = localize_datetime(end) if end else None
# Adjust the start date to include the day before and format the end date # Adjust the start date to include the day before and format the end date
start_date_adjusted = (start - timedelta(days=1)).strftime("%Y-%m-%dT00:00:00") start_date_adjusted = (start - timedelta(days=1)).strftime("%Y-%m-%dT00:00:00")
end_date_formatted = f"{datetime.strftime(end, '%Y-%m-%d')}T23:59:59" if end else f"{datetime.strftime(start, '%Y-%m-%d')}T23:59:59" end_date_formatted = f"{datetime.strftime(end, '%Y-%m-%d')}T23:59:59" if end else f"{datetime.strftime(start, '%Y-%m-%d')}T23:59:59"
@ -315,8 +315,8 @@ async def get_timing_markdown3(
): ):
# Fetch and process timing data # Fetch and process timing data
start = localize_dt(start_date) start = localize_datetime(start_date)
end = localize_dt(end_date) if end_date else None end = localize_datetime(end_date) if end_date else None
timing_data = await fetch_and_prepare_timing_data(start, end) timing_data = await fetch_and_prepare_timing_data(start, end)
# Retain these for processing Markdown data with the correct timezone # Retain these for processing Markdown data with the correct timezone
@ -375,8 +375,8 @@ async def get_timing_markdown(
start: str = Query(..., regex=r"\d{4}-\d{2}-\d{2}"), start: str = Query(..., regex=r"\d{4}-\d{2}-\d{2}"),
end: Optional[str] = Query(None, regex=r"\d{4}-\d{2}-\d{2}") end: Optional[str] = Query(None, regex=r"\d{4}-\d{2}-\d{2}")
): ):
start_date = localize_dt(start) start_date = localize_datetime(start)
end_date = localize_dt(end) end_date = localize_datetime(end)
markdown_formatted_data = await process_timing_markdown(start_date, end_date) markdown_formatted_data = await process_timing_markdown(start_date, end_date)
return Response(content=markdown_formatted_data, media_type="text/markdown") return Response(content=markdown_formatted_data, media_type="text/markdown")
@ -444,8 +444,8 @@ async def get_timing_json(
): ):
# Fetch and process timing data # Fetch and process timing data
start = localize_dt(start_date) start = localize_datetime(start_date)
end = localize_dt(end_date) end = localize_datetime(end_date)
timing_data = await fetch_and_prepare_timing_data(start, end) timing_data = await fetch_and_prepare_timing_data(start, end)
# Convert processed data to the required JSON structure # Convert processed data to the required JSON structure

View file

@ -165,6 +165,8 @@ async def get_model(voice: str = None, voice_file: UploadFile = None):
raise HTTPException(status_code=400, detail="No model or voice specified") raise HTTPException(status_code=400, detail="No model or voice specified")
async def determine_voice_id(voice_name: str) -> str: async def determine_voice_id(voice_name: str) -> str:
DEBUG(f"Searching for voice id for {voice_name}")
hardcoded_voices = { hardcoded_voices = {
"alloy": "E3A1KVbKoWSIKSZwSUsW", "alloy": "E3A1KVbKoWSIKSZwSUsW",
"echo": "b42GBisbu9r5m5n6pHF7", "echo": "b42GBisbu9r5m5n6pHF7",
@ -172,7 +174,7 @@ async def determine_voice_id(voice_name: str) -> str:
"onyx": "clQb8NxY08xZ6mX6wCPE", "onyx": "clQb8NxY08xZ6mX6wCPE",
"nova": "6TayTBKLMOsghG7jYuMX", "nova": "6TayTBKLMOsghG7jYuMX",
"shimmer": "E7soeOyjpmuZFurvoxZ2", "shimmer": "E7soeOyjpmuZFurvoxZ2",
DEFAULT_VOICE: "6TayTBKLMOsghG7jYuMX", "Luna": "6TayTBKLMOsghG7jYuMX",
"Sangye": "E7soeOyjpmuZFurvoxZ2", "Sangye": "E7soeOyjpmuZFurvoxZ2",
"Herzog": "KAX2Y6tTs0oDWq7zZXW7", "Herzog": "KAX2Y6tTs0oDWq7zZXW7",
"Attenborough": "b42GBisbu9r5m5n6pHF7" "Attenborough": "b42GBisbu9r5m5n6pHF7"
@ -198,7 +200,8 @@ async def determine_voice_id(voice_name: str) -> str:
except Exception as e: except Exception as e:
ERR(f"Error determining voice ID: {str(e)}") ERR(f"Error determining voice ID: {str(e)}")
return "6TayTBKLMOsghG7jYuMX" # as a last fallback, rely on David Attenborough
return "b42GBisbu9r5m5n6pHF7"
async def elevenlabs_tts(model: str, input_text: str, voice: str, title: str = None, output_dir: str = None): async def elevenlabs_tts(model: str, input_text: str, voice: str, title: str = None, output_dir: str = None):

View file

@ -7,7 +7,7 @@ from typing import Dict
from datetime import datetime from datetime import datetime
from shapely.wkb import loads from shapely.wkb import loads
from binascii import unhexlify from binascii import unhexlify
from sijapi.utilities import localize_dt from sijapi.utilities import localize_datetime
from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL from sijapi import DEBUG, INFO, WARN, ERR, CRITICAL
from sijapi import VISUALCROSSING_API_KEY, TZ from sijapi import VISUALCROSSING_API_KEY, TZ
from sijapi.utilities import get_db_connection, haversine from sijapi.utilities import get_db_connection, haversine
@ -25,7 +25,7 @@ async def get_weather(date_time: datetime, latitude: float, longitude: float):
try: try:
DEBUG(f"Daily weather data from db: {daily_weather_data}") DEBUG(f"Daily weather data from db: {daily_weather_data}")
last_updated = str(daily_weather_data['DailyWeather'].get('last_updated')) last_updated = str(daily_weather_data['DailyWeather'].get('last_updated'))
last_updated = localize_dt(last_updated) last_updated = localize_datetime(last_updated)
stored_loc_data = unhexlify(daily_weather_data['DailyWeather'].get('location')) stored_loc_data = unhexlify(daily_weather_data['DailyWeather'].get('location'))
stored_loc = loads(stored_loc_data) stored_loc = loads(stored_loc_data)
stored_lat = stored_loc.y stored_lat = stored_loc.y
@ -103,7 +103,7 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
location_point = f"POINTZ({longitude} {latitude} {elevation})" if longitude and latitude and elevation else None location_point = f"POINTZ({longitude} {latitude} {elevation})" if longitude and latitude and elevation else None
# Correct for the datetime objects # Correct for the datetime objects
day_data['datetime'] = localize_dt(day_data.get('datetime')) #day_data.get('datetime')) day_data['datetime'] = localize_datetime(day_data.get('datetime')) #day_data.get('datetime'))
day_data['sunrise'] = day_data['datetime'].replace(hour=int(day_data.get('sunrise').split(':')[0]), minute=int(day_data.get('sunrise').split(':')[1])) day_data['sunrise'] = day_data['datetime'].replace(hour=int(day_data.get('sunrise').split(':')[0]), minute=int(day_data.get('sunrise').split(':')[1]))
day_data['sunset'] = day_data['datetime'].replace(hour=int(day_data.get('sunset').split(':')[0]), minute=int(day_data.get('sunset').split(':')[1])) day_data['sunset'] = day_data['datetime'].replace(hour=int(day_data.get('sunset').split(':')[0]), minute=int(day_data.get('sunset').split(':')[1]))
@ -160,7 +160,7 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
await asyncio.sleep(0.1) await asyncio.sleep(0.1)
# hour_data['datetime'] = parse_date(hour_data.get('datetime')) # hour_data['datetime'] = parse_date(hour_data.get('datetime'))
hour_timestamp = date_str + ' ' + hour_data['datetime'] hour_timestamp = date_str + ' ' + hour_data['datetime']
hour_data['datetime'] = localize_dt(hour_timestamp) hour_data['datetime'] = localize_datetime(hour_timestamp)
DEBUG(f"Processing hours now...") DEBUG(f"Processing hours now...")
# DEBUG(f"Processing {hour_data['datetime']}") # DEBUG(f"Processing {hour_data['datetime']}")

View file

@ -210,7 +210,7 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
if check_file_name(filename): if check_file_name(filename):
file_path = Path(dirpath) / filename file_path = Path(dirpath) / filename
impermissible_files.append(file_path) impermissible_files.append(file_path)
print(f"Impermissible file found: {file_path}") DEBUG(f"Impermissible file found: {file_path}")
# Sanitize the file name # Sanitize the file name
new_filename = sanitize_filename(filename) new_filename = sanitize_filename(filename)
@ -228,19 +228,10 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
# Rename the file # Rename the file
if rename: if rename:
os.rename(file_path, new_file_path) os.rename(file_path, new_file_path)
print(f"Renamed: {file_path} -> {new_file_path}") DEBUG(f"Renamed: {file_path} -> {new_file_path}")
return impermissible_files return impermissible_files
def fix_nextcloud_filenames(dir_to_fix, rename: bool = False):
impermissible_files = list_and_correct_impermissible_files(dir_to_fix, rename)
if impermissible_files:
print("\nList of impermissible files found and corrected:")
for file in impermissible_files:
print(file)
else:
print("No impermissible files found.")
def bool_convert(value: str = Form(None)): def bool_convert(value: str = Form(None)):
return value.lower() in ["true", "1", "t", "y", "yes"] return value.lower() in ["true", "1", "t", "y", "yes"]
@ -454,7 +445,7 @@ def convert_degrees_to_cardinal(d):
return dirs[ix % len(dirs)] return dirs[ix % len(dirs)]
def localize_dt(dt): def localize_datetime(dt):
initial_dt = dt initial_dt = dt
try: try:
if isinstance(dt, str): if isinstance(dt, str):