Auto-update: Sun Aug 4 22:12:18 PDT 2024

This commit is contained in:
sanj 2024-08-04 22:12:18 -07:00
parent ae7648ebd5
commit c5e4cbbb4e
5 changed files with 176 additions and 253 deletions

View file

@ -29,6 +29,7 @@ from .logs import Logger
L = Logger("classes", "classes") L = Logger("classes", "classes")
logger = L.get_module_logger("classes") logger = L.get_module_logger("classes")
# Logging functions
def debug(text: str): logger.debug(text) def debug(text: str): logger.debug(text)
def info(text: str): logger.info(text) def info(text: str): logger.info(text)
def warn(text: str): logger.warning(text) def warn(text: str): logger.warning(text)

View file

@ -1,58 +0,0 @@
# cli.py
import click
import asyncio
from datetime import datetime as dt_datetime, timedelta
# Import your async functions and dependencies
from your_main_app import build_daily_note_range_endpoint, loc
def async_command(f):
@click.command()
@click.pass_context
def wrapper(ctx, *args, **kwargs):
async def run():
return await f(*args, **kwargs)
return asyncio.run(run())
return wrapper
@click.group()
def cli():
"""CLI for your application."""
pass
@cli.command()
@click.argument('dt_start')
@click.argument('dt_end')
@async_command
async def bulk_note_range(dt_start: str, dt_end: str):
"""
Build daily notes for a date range.
DT_START and DT_END should be in YYYY-MM-DD format.
"""
try:
start_date = dt_datetime.strptime(dt_start, "%Y-%m-%d")
end_date = dt_datetime.strptime(dt_end, "%Y-%m-%d")
except ValueError:
click.echo("Error: Dates must be in YYYY-MM-DD format.")
return
if start_date > end_date:
click.echo("Error: Start date must be before or equal to end date.")
return
results = []
current_date = start_date
while current_date <= end_date:
formatted_date = await loc.dt(current_date)
result = await build_daily_note(formatted_date)
results.append(result)
current_date += timedelta(days=1)
click.echo("Generated notes for the following dates:")
for url in results:
click.echo(url)
if __name__ == '__main__':
cli()

View file

@ -73,6 +73,7 @@ def get_smtp_connection(autoresponder: AutoResponder):
except Exception as e: except Exception as e:
err(f"STARTTLS connection failed: {str(e)}") err(f"STARTTLS connection failed: {str(e)}")
raise raise
elif smtp_config.encryption == 'STARTTLS': elif smtp_config.encryption == 'STARTTLS':
try: try:
debug(f"Attempting STARTTLS connection to {smtp_config.host}:{smtp_config.port}") debug(f"Attempting STARTTLS connection to {smtp_config.host}:{smtp_config.port}")
@ -82,6 +83,7 @@ def get_smtp_connection(autoresponder: AutoResponder):
except Exception as e: except Exception as e:
err(f"STARTTLS connection failed: {str(e)}") err(f"STARTTLS connection failed: {str(e)}")
raise raise
else: else:
try: try:
debug(f"Attempting unencrypted connection to {smtp_config.host}:{smtp_config.port}") debug(f"Attempting unencrypted connection to {smtp_config.host}:{smtp_config.port}")
@ -128,8 +130,6 @@ async def send_response(to_email: str, subject: str, body: str, profile: AutoRes
err(f"Error closing SMTP connection: {str(e)}") err(f"Error closing SMTP connection: {str(e)}")
def clean_email_content(html_content): def clean_email_content(html_content):
soup = BeautifulSoup(html_content, "html.parser") soup = BeautifulSoup(html_content, "html.parser")
return re.sub(r'[ \t\r\n]+', ' ', soup.get_text()).strip() return re.sub(r'[ \t\r\n]+', ' ', soup.get_text()).strip()
@ -155,6 +155,7 @@ async def extract_attachments(attachments) -> List[str]:
return attachment_texts return attachment_texts
async def process_account_archival(account: EmailAccount): async def process_account_archival(account: EmailAccount):
summarized_log = EMAIL_LOGS / account.name / "summarized.txt" summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
os.makedirs(summarized_log.parent, exist_ok = True) os.makedirs(summarized_log.parent, exist_ok = True)
@ -195,6 +196,7 @@ async def process_account_archival(account: EmailAccount):
await asyncio.sleep(account.refresh) await asyncio.sleep(account.refresh)
async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False): async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False):
tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav") tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
summary = "" summary = ""
@ -212,6 +214,7 @@ async def summarize_single_email(this_email: IncomingEmail, podcast: bool = Fals
return md_summary return md_summary
async def archive_single_email(this_email: IncomingEmail, summary: str = None): async def archive_single_email(this_email: IncomingEmail, summary: str = None):
try: try:
markdown_content = f'''--- markdown_content = f'''---
@ -240,6 +243,7 @@ tags:
err(f"Exception: {e}") err(f"Exception: {e}")
return False return False
async def save_email(md_path, md_content): async def save_email(md_path, md_content):
try: try:
with open(md_path, 'w', encoding='utf-8') as md_file: with open(md_path, 'w', encoding='utf-8') as md_file:
@ -251,6 +255,7 @@ async def save_email(md_path, md_content):
err(f"Failed to save email: {e}") err(f"Failed to save email: {e}")
return False return False
def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]: def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
debug(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"") debug(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
def matches_list(item: str, this_email: IncomingEmail) -> bool: def matches_list(item: str, this_email: IncomingEmail) -> bool:
@ -297,6 +302,7 @@ async def process_account_autoresponding(account: EmailAccount):
await asyncio.sleep(account.refresh) await asyncio.sleep(account.refresh)
async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path): async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path):
this_email = await create_incoming_email(message) this_email = await create_incoming_email(message)
debug(f"Evaluating {this_email.subject} for autoresponse-worthiness...") debug(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
@ -319,6 +325,7 @@ async def autorespond_single_email(message, uid_str: str, account: EmailAccount,
else: else:
warn(f"Unable to generate auto-response for {this_email.subject}") warn(f"Unable to generate auto-response for {this_email.subject}")
async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]: async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]:
info(f"Generating auto-response to {this_email.subject} with profile: {profile.name}") info(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
@ -363,22 +370,26 @@ async def create_incoming_email(message) -> IncomingEmail:
attachments=message.attachments attachments=message.attachments
) )
async def load_processed_uids(filename: Path) -> Set[str]: async def load_processed_uids(filename: Path) -> Set[str]:
if filename.exists(): if filename.exists():
async with aiofiles.open(filename, 'r') as f: async with aiofiles.open(filename, 'r') as f:
return set(line.strip().split(':')[-1] for line in await f.readlines()) return set(line.strip().split(':')[-1] for line in await f.readlines())
return set() return set()
async def save_processed_uid(filename: Path, account_name: str, uid: str): async def save_processed_uid(filename: Path, account_name: str, uid: str):
async with aiofiles.open(filename, 'a') as f: async with aiofiles.open(filename, 'a') as f:
await f.write(f"{account_name}:{uid}\n") await f.write(f"{account_name}:{uid}\n")
async def process_all_accounts(): async def process_all_accounts():
email_accounts = load_email_accounts(EMAIL_CONFIG) email_accounts = load_email_accounts(EMAIL_CONFIG)
summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts] summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts] autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
await asyncio.gather(*summarization_tasks, *autoresponding_tasks) await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
@email.on_event("startup") @email.on_event("startup")
async def startup_event(): async def startup_event():
await asyncio.sleep(5) await asyncio.sleep(5)

View file

@ -22,7 +22,6 @@ from sijapi.routers import asr, cal, gis, img, llm, serve, timing, tts, weather
from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING from sijapi.utilities import assemble_journal_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, check_file_name, HOURLY_COLUMNS_MAPPING
from sijapi.classes import Location from sijapi.classes import Location
note = APIRouter() note = APIRouter()
logger = L.get_module_logger("note") logger = L.get_module_logger("note")
def debug(text: str): logger.debug(text) def debug(text: str): logger.debug(text)
@ -43,7 +42,6 @@ async def note_add_endpoint(file: Optional[UploadFile] = File(None), text: Optio
return JSONResponse({"message": "Note added successfully", "entry": result}, status_code=201) return JSONResponse({"message": "Note added successfully", "entry": result}, status_code=201)
async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None): async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None):
now = dt_datetime.now() now = dt_datetime.now()
transcription_entry = "" transcription_entry = ""
@ -83,8 +81,6 @@ async def process_for_daily_note(file: Optional[UploadFile] = File(None), text:
return await add_to_daily_note(transcription_entry, file_entry, text_entry, now) return await add_to_daily_note(transcription_entry, file_entry, text_entry, now)
async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: dt_datetime = None): async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: dt_datetime = None):
date_time = date_time or dt_datetime.now() date_time = date_time or dt_datetime.now()
note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True) note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True)
@ -121,7 +117,6 @@ tags:
return {"timestamp": time_str, "content": entry.strip()} return {"timestamp": time_str, "content": entry.strip()}
async def process_document( async def process_document(
bg_tasks: BackgroundTasks, bg_tasks: BackgroundTasks,
document: File, document: File,
@ -195,6 +190,7 @@ added: {timestamp}
err(f"Failed to clip: {str(e)}") err(f"Failed to clip: {str(e)}")
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
def list_and_correct_impermissible_files(root_dir, rename: bool = False): def list_and_correct_impermissible_files(root_dir, rename: bool = False):
"""List and correct all files with impermissible names.""" """List and correct all files with impermissible names."""
impermissible_files = [] impermissible_files = []
@ -228,7 +224,6 @@ def list_and_correct_impermissible_files(root_dir, rename: bool = False):
journal = OBSIDIAN_VAULT_DIR / "journal" journal = OBSIDIAN_VAULT_DIR / "journal"
list_and_correct_impermissible_files(journal, rename=True) list_and_correct_impermissible_files(journal, rename=True)
### Daily Note Builder ###
@note.get("/note/bulk/{dt_start}/{dt_end}") @note.get("/note/bulk/{dt_start}/{dt_end}")
async def build_daily_note_range_endpoint(dt_start: str, dt_end: str): async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
@ -246,7 +241,6 @@ async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
return {"urls": results} return {"urls": results}
@note.get("/note/create") @note.get("/note/create")
async def build_daily_note_getpoint(): async def build_daily_note_getpoint():
try: try:
@ -277,7 +271,6 @@ async def build_daily_note_getpoint():
raise HTTPException(status_code=500, detail="An unexpected error occurred") raise HTTPException(status_code=500, detail="An unexpected error occurred")
@note.post("/note/create") @note.post("/note/create")
async def build_daily_note_endpoint( async def build_daily_note_endpoint(
date_str: Optional[str] = Form(dt_datetime.now().strftime("%Y-%m-%d")), date_str: Optional[str] = Form(dt_datetime.now().strftime("%Y-%m-%d")),
@ -383,11 +376,7 @@ created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
return absolute_path return absolute_path
async def build_daily_timeslips(date): async def build_daily_timeslips(date):
'''
'''
absolute_path, relative_path = assemble_journal_path(date, filename = "Timeslips", extension=".md", no_timestamp = True) absolute_path, relative_path = assemble_journal_path(date, filename = "Timeslips", extension=".md", no_timestamp = True)
content = await timing.process_timing_markdown(date, date) content = await timing.process_timing_markdown(date, date)
# document_content = await document.read() # document_content = await document.read()
@ -403,20 +392,16 @@ async def update_frontmatter_endpoint(date: str, key: str, value: str):
result = await update_frontmatter(date_time, key, value) result = await update_frontmatter(date_time, key, value)
return result return result
async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
# Parse the date and format paths
file_path, relative_path = assemble_journal_path(date_time)
# Check if the file exists async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
file_path, relative_path = assemble_journal_path(date_time)
if not file_path.exists(): if not file_path.exists():
crit(f"Markdown file not found at {file_path}") crit(f"Markdown file not found at {file_path}")
raise HTTPException(status_code=404, detail="Markdown file not found.") raise HTTPException(status_code=404, detail="Markdown file not found.")
# Read the file
with open(file_path, "r", encoding="utf-8") as file: with open(file_path, "r", encoding="utf-8") as file:
lines = file.readlines() lines = file.readlines()
# Extract the frontmatter
try: try:
start_index = lines.index("---\n") + 1 start_index = lines.index("---\n") + 1
end_index = lines[start_index:].index("---\n") + start_index end_index = lines[start_index:].index("---\n") + start_index
@ -424,33 +409,22 @@ async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
except ValueError: except ValueError:
raise HTTPException(status_code=500, detail="Frontmatter not found.") raise HTTPException(status_code=500, detail="Frontmatter not found.")
# Remove the existing key if present
pattern = re.compile(f"^{key}:.*", re.IGNORECASE) pattern = re.compile(f"^{key}:.*", re.IGNORECASE)
frontmatter = [line for line in frontmatter if not pattern.match(line)] frontmatter = [line for line in frontmatter if not pattern.match(line)]
# Process value as a CSV string into a list
values = value.split(',') values = value.split(',')
# Determine insertion format
if len(values) == 1: if len(values) == 1:
# Single value, add as a simple key-value
new_entry = f"{key}: {values[0]}\n" new_entry = f"{key}: {values[0]}\n"
else: else:
# Multiple values, format as a list under the key
new_entry = f"{key}:\n" + "\n".join([f" - {val}" for val in values]) + "\n" new_entry = f"{key}:\n" + "\n".join([f" - {val}" for val in values]) + "\n"
# Insert the new key-value(s)
frontmatter.append(new_entry) frontmatter.append(new_entry)
# Reassemble the file
content = lines[:start_index] + frontmatter + ["---\n"] + lines[end_index + 1:] content = lines[:start_index] + frontmatter + ["---\n"] + lines[end_index + 1:]
# Write changes back to the file
with open(file_path, "w", encoding="utf-8") as file: with open(file_path, "w", encoding="utf-8") as file:
file.writelines(content) file.writelines(content)
return {"message": "Frontmatter updated successfully."} return {"message": "Frontmatter updated successfully."}
@note.post("/note/banner") @note.post("/note/banner")
async def banner_endpoint(dt: str, location: str = None, forecast: str = None, mood: str = None, other_context: str = None): async def banner_endpoint(dt: str, location: str = None, forecast: str = None, mood: str = None, other_context: str = None):
''' '''
@ -483,6 +457,7 @@ async def generate_banner(dt, location: Location = None, forecast: str = None, m
await update_frontmatter(date_time, "banner", jpg_embed) await update_frontmatter(date_time, "banner", jpg_embed)
return local_path return local_path
async def generate_context(date_time, location: Location, forecast: str, mood: str, other_context: str): async def generate_context(date_time, location: Location, forecast: str, mood: str, other_context: str):
display_name = "Location: " display_name = "Location: "
if location and isinstance(location, Location): if location and isinstance(location, Location):
@ -542,7 +517,6 @@ async def generate_context(date_time, location: Location, forecast: str, mood: s
return prompt return prompt
async def get_note(date_time: dt_datetime): async def get_note(date_time: dt_datetime):
date_time = await gis.dt(date_time); date_time = await gis.dt(date_time);
absolute_path, local_path = assemble_journal_path(date_time, filename = "Notes", extension = ".md", no_timestamp = True) absolute_path, local_path = assemble_journal_path(date_time, filename = "Notes", extension = ".md", no_timestamp = True)
@ -552,6 +526,7 @@ async def get_note(date_time: dt_datetime):
content = file.read() content = file.read()
return content if content else None return content if content else None
async def sentiment_analysis(date_time: dt_datetime): async def sentiment_analysis(date_time: dt_datetime):
most_recent_note = await get_note(date_time) most_recent_note = await get_note(date_time)
most_recent_note = most_recent_note or await get_note(date_time - timedelta(days=1)) most_recent_note = most_recent_note or await get_note(date_time - timedelta(days=1))
@ -597,6 +572,7 @@ async def post_update_daily_weather_and_calendar_and_timeslips(date: str, refres
await build_daily_timeslips(date_time) await build_daily_timeslips(date_time)
return f"[Refresh]({API.URL}/update/note/{date_time.strftime('%Y-%m-%d')}" return f"[Refresh]({API.URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False, lat: float = None, lon: float = None): async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False, lat: float = None, lon: float = None):
warn(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.") warn(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
try: try:
@ -746,6 +722,7 @@ async def update_dn_weather(date_time: dt_datetime, force_refresh: bool = False,
err(traceback.format_exc()) err(traceback.format_exc())
raise HTTPException(status_code=500, detail=f"Error in update_dn_weather: {str(e)}") raise HTTPException(status_code=500, detail=f"Error in update_dn_weather: {str(e)}")
def format_hourly_time(hour): def format_hourly_time(hour):
try: try:
hour_12 = convert_to_12_hour_format(hour.get("datetime")) hour_12 = convert_to_12_hour_format(hour.get("datetime"))
@ -755,6 +732,7 @@ def format_hourly_time(hour):
err(traceback.format_exc()) err(traceback.format_exc())
return "" return ""
def format_hourly_icon(hour, sunrise, sunset): def format_hourly_icon(hour, sunrise, sunset):
try: try:
icon_str = hour.get('icon', '') icon_str = hour.get('icon', '')
@ -790,6 +768,7 @@ def format_hourly_icon(hour, sunrise, sunset):
err(traceback.format_exc()) err(traceback.format_exc())
return "" return ""
def format_hourly_temperature(hour): def format_hourly_temperature(hour):
try: try:
temp_str = f"{hour.get('temp', '')}˚ F" temp_str = f"{hour.get('temp', '')}˚ F"
@ -799,6 +778,7 @@ def format_hourly_temperature(hour):
err(traceback.format_exc()) err(traceback.format_exc())
return "" return ""
def format_hourly_wind(hour): def format_hourly_wind(hour):
try: try:
windspeed = hour.get('windspeed', '') windspeed = hour.get('windspeed', '')
@ -882,28 +862,18 @@ def get_icon_and_admonition(icon_str) -> Tuple:
def get_weather_emoji(weather_condition): def get_weather_emoji(weather_condition):
condition = weather_condition.lower() condition = weather_condition.lower()
if 'clear' in condition or 'sunny' in condition: if 'clear' in condition or 'sunny' in condition: return "☀️"
return "☀️" elif 'cloud' in condition or 'overcast' in condition: return "☁️"
elif 'cloud' in condition or 'overcast' in condition: elif 'rain' in condition: return "🌧️"
return "☁️" elif 'snow' in condition: return "❄️"
elif 'rain' in condition: elif 'thunder' in condition or 'storm' in condition: return "⛈️"
return "🌧️" elif 'fog' in condition or 'mist' in condition: return "🌫️"
elif 'snow' in condition: elif 'wind' in condition: return "💨"
return "❄️" elif 'hail' in condition: return "🌨️"
elif 'thunder' in condition or 'storm' in condition: elif 'sleet' in condition: return "🌧️"
return "⛈️" elif 'partly' in condition: return ""
elif 'fog' in condition or 'mist' in condition: else: return "🌡️" # Default emoji for unclassified weather
return "🌫️"
elif 'wind' in condition:
return "💨"
elif 'hail' in condition:
return "🌨️"
elif 'sleet' in condition:
return "🌧️"
elif 'partly' in condition:
return ""
else:
return "🌡️" # Default emoji for unclassified weather
async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[str, str]]]]) -> str: async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[str, str]]]]) -> str:
def remove_characters(s: str) -> str: def remove_characters(s: str) -> str:
@ -980,6 +950,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
) )
return detailed_events return detailed_events
@note.get("/note/events", response_class=PlainTextResponse) @note.get("/note/events", response_class=PlainTextResponse)
async def note_events_endpoint(date: str = Query(None)): async def note_events_endpoint(date: str = Query(None)):
@ -987,6 +958,7 @@ async def note_events_endpoint(date: str = Query(None)):
response = await update_daily_note_events(date_time) response = await update_daily_note_events(date_time)
return PlainTextResponse(content=response, status_code=200) return PlainTextResponse(content=response, status_code=200)
async def update_daily_note_events(date_time: dt_datetime): async def update_daily_note_events(date_time: dt_datetime):
debug(f"Looking up events on date: {date_time.strftime('%Y-%m-%d')}") debug(f"Looking up events on date: {date_time.strftime('%Y-%m-%d')}")
try: try:
@ -1009,6 +981,3 @@ async def update_daily_note_events(date_time: dt_datetime):
except Exception as e: except Exception as e:
err(f"Error processing events: {e}") err(f"Error processing events: {e}")
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))