From 0660455eeacac5449a4ea1fc1e275aa5b0bda38d Mon Sep 17 00:00:00 2001
From: sanj <67624670+iodrift@users.noreply.github.com>
Date: Fri, 28 Jun 2024 22:22:58 -0700
Subject: [PATCH] Auto-update: Fri Jun 28 22:22:58 PDT 2024
---
Extras/Pythonista/uploadGPS.py | 2 +-
sijapi/__init__.py | 5 +-
sijapi/classes.py | 389 ++++++---
sijapi/config/.env-example | 18 +-
sijapi/routers/{calendar.py => cal.py} | 28 +-
sijapi/routers/email.py | 439 +++++-----
sijapi/routers/llm.py | 8 -
sijapi/routers/loc.py | 385 +++++++++
sijapi/routers/locate.py | 609 -------------
sijapi/routers/note.py | 1102 +++++++++++++-----------
sijapi/routers/serve.py | 4 +-
sijapi/routers/time.py | 18 +-
sijapi/routers/weather.py | 53 +-
13 files changed, 1513 insertions(+), 1547 deletions(-)
rename sijapi/routers/{calendar.py => cal.py} (95%)
create mode 100644 sijapi/routers/loc.py
delete mode 100644 sijapi/routers/locate.py
diff --git a/Extras/Pythonista/uploadGPS.py b/Extras/Pythonista/uploadGPS.py
index 13e6154..140adbf 100644
--- a/Extras/Pythonista/uploadGPS.py
+++ b/Extras/Pythonista/uploadGPS.py
@@ -4,7 +4,7 @@ import json
filename = 'location_log.json'
server = '!{!{ ENTER A PUBLIC URL TO YOUR SIJAPI INSTANCE }!}!'
-api_key = !{!{ ENTER YOUR GLOBAL_API_KEY HERE }!}!
+api_key = '!{!{ ENTER YOUR GLOBAL_API_KEY HERE }!}!'
def upload_location_data(data):
headers = {
diff --git a/sijapi/__init__.py b/sijapi/__init__.py
index 9cd99bc..cfe5388 100644
--- a/sijapi/__init__.py
+++ b/sijapi/__init__.py
@@ -12,7 +12,7 @@ from typing import List, Optional
import traceback
import logging
from .logs import Logger
-from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, TimezoneTracker, Database, PyGeolocator
+from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, TimezoneTracker, Database, Geocoder
# from sijapi.config.config import load_config
# cfg = load_config()
@@ -68,7 +68,7 @@ GEONAMES_TXT = DATA_DIR / "geonames.txt"
LOCATIONS_CSV = DATA_DIR / "US.csv"
TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
TZ_CACHE = DATA_DIR / "tzcache.json"
-DynamicTZ = TimezoneTracker(TZ_CACHE)
+GEO = Geocoder(NAMED_LOCATIONS, TZ_CACHE)
### Obsidian & notes
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
@@ -90,7 +90,6 @@ YEAR_FMT = os.getenv("YEAR_FMT")
MONTH_FMT = os.getenv("MONTH_FMT")
DAY_FMT = os.getenv("DAY_FMT")
DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
-GEOLOCATOR = PyGeolocator
### Large language model
LLM_URL = os.getenv("LLM_URL", "http://localhost:11434")
diff --git a/sijapi/classes.py b/sijapi/classes.py
index f3444d3..4b94561 100644
--- a/sijapi/classes.py
+++ b/sijapi/classes.py
@@ -1,54 +1,88 @@
-from pydantic import BaseModel
-from typing import List, Optional, Any, Tuple, Dict, Union, Tuple
-from datetime import datetime, timedelta
-import asyncio
-import asyncpg
-import json
from pydantic import BaseModel, Field
-from typing import Optional
-import asyncpg
-import os
-from typing import Optional, Tuple, Union
-from datetime import datetime, timedelta
+from typing import List, Optional, Any, Tuple, Dict, Union, Tuple
+from datetime import datetime, timedelta, timezone
+import asyncio
import json
from timezonefinder import TimezoneFinder
from pathlib import Path
-
-from pydantic import BaseModel, Field
-from typing import Optional
-
-from pydantic import BaseModel, Field
-from typing import Optional
-import asyncpg
-
-from pydantic import BaseModel, Field
-from typing import Optional
import asyncpg
+import aiohttp
+import aiofiles
from contextlib import asynccontextmanager
-
+from concurrent.futures import ThreadPoolExecutor
import reverse_geocoder as rg
from timezonefinder import TimezoneFinder
from srtm import get_data
-class PyGeolocator:
- def __init__(self):
+class Location(BaseModel):
+ latitude: float
+ longitude: float
+ datetime: datetime
+ elevation: Optional[float] = None
+ altitude: Optional[float] = None
+ zip: Optional[str] = None
+ street: Optional[str] = None
+ city: Optional[str] = None
+ state: Optional[str] = None
+ country: Optional[str] = None
+ context: Optional[Dict[str, Any]] = None
+ class_: Optional[str] = None
+ type: Optional[str] = None
+ name: Optional[str] = None
+ display_name: Optional[str] = None
+ boundingbox: Optional[List[str]] = None
+ amenity: Optional[str] = None
+ house_number: Optional[str] = None
+ road: Optional[str] = None
+ quarter: Optional[str] = None
+ neighbourhood: Optional[str] = None
+ suburb: Optional[str] = None
+ county: Optional[str] = None
+ country_code: Optional[str] = None
+
+ class Config:
+ json_encoders = {
+ datetime: lambda dt: dt.isoformat(),
+ }
+
+
+class Geocoder:
+ def __init__(self, named_locs: Union[str, Path] = None, cache_file: Union[str, Path] = 'timezone_cache.json'):
self.tf = TimezoneFinder()
self.srtm_data = get_data()
+ self.named_locs = Path(named_locs) if named_locs else None
+ self.cache_file = Path(cache_file)
+ self.last_timezone: str = "America/Los_Angeles"
+ self.last_update: Optional[datetime] = None
+ self.last_location: Optional[Tuple[float, float]] = None
+ self.executor = ThreadPoolExecutor()
- def get_location(self, lat, lon):
- result = rg.search((lat, lon))
- return result[0]['name'], result[0]['admin1'], result[0]['cc']
+ async def location(self, lat: float, lon: float):
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(self.executor, rg.search, [(lat, lon)])
- def get_elevation(self, lat, lon):
- return self.srtm_data.get_elevation(lat, lon)
+ async def elevation(self, latitude: float, longitude: float, unit: str = "m") -> float:
+ loop = asyncio.get_running_loop()
+ elevation = await loop.run_in_executor(self.executor, self.srtm_data.get_elevation, latitude, longitude)
+
+ if unit == "m":
+ return elevation
+ elif unit == "km":
+ return elevation / 1000
+ elif unit == "ft" or unit == "'":
+ return elevation * 3.280839895
+ else:
+ raise ValueError(f"Unsupported unit: {unit}")
- def get_timezone(self, lat, lon):
- return self.tf.timezone_at(lat=lat, lng=lon)
+ async def timezone(self, lat: float, lon: float):
+ loop = asyncio.get_running_loop()
+ timezone = await loop.run_in_executor(self.executor, self.tf.timezone_at, lat, lon)
+ return timezone if timezone else 'Unknown'
- def lookup(self, lat, lon):
- city, state, country = self.get_location(lat, lon)
- elevation = self.get_elevation(lat, lon)
- timezone = self.get_timezone(lat, lon)
+ async def lookup(self, lat: float, lon: float):
+ city, state, country = (await self.location(lat, lon))[0]['name'], (await self.location(lat, lon))[0]['admin1'], (await self.location(lat, lon))[0]['cc']
+ elevation = await self.elevation(lat, lon)
+ timezone = await self.timezone(lat, lon)
return {
"city": city,
@@ -58,6 +92,179 @@ class PyGeolocator:
"timezone": timezone
}
+ async def code(self, locations: Union[Location, Tuple[float, float], List[Union[Location, Tuple[float, float]]]]) -> Union[Location, List[Location]]:
+ if isinstance(locations, (Location, tuple)):
+ locations = [locations]
+
+ processed_locations = []
+ for loc in locations:
+ if isinstance(loc, tuple):
+ processed_locations.append(Location(latitude=loc[0], longitude=loc[1]))
+ elif isinstance(loc, Location):
+ processed_locations.append(loc)
+ else:
+ raise ValueError(f"Unsupported location type: {type(loc)}")
+
+ coordinates = [(location.latitude, location.longitude) for location in processed_locations]
+
+ geocode_results = await self.location(*zip(*coordinates))
+ elevations = await asyncio.gather(*[self.elevation(lat, lon) for lat, lon in coordinates])
+ timezones = await asyncio.gather(*[self.timezone(lat, lon) for lat, lon in coordinates])
+
+ geocoded_locations = []
+ for location, result, elevation, timezone in zip(processed_locations, geocode_results, elevations, timezones):
+ geocoded_location = Location(
+ latitude=location.latitude,
+ longitude=location.longitude,
+ elevation=elevation,
+ datetime=location.datetime or datetime.now(timezone.utc),
+ zip=result.get("admin2"),
+ city=result.get("name"),
+ state=result.get("admin1"),
+ country=result.get("cc"),
+ context=location.context or {},
+ name=result.get("name"),
+ display_name=f"{result.get('name')}, {result.get('admin1')}, {result.get('cc')}",
+ country_code=result.get("cc"),
+ timezone=timezone
+ )
+
+ # Merge original location data with geocoded data
+ for field in location.__fields__:
+ if getattr(location, field) is None:
+ setattr(location, field, getattr(geocoded_location, field))
+
+ geocoded_locations.append(location)
+
+ return geocoded_locations[0] if len(geocoded_locations) == 1 else geocoded_locations
+
+ async def geocode_osm(self, latitude: float, longitude: float, email: str):
+ url = f"https://nominatim.openstreetmap.org/reverse?format=json&lat={latitude}&lon={longitude}"
+ headers = {
+ 'User-Agent': f'sijapi/1.0 ({email})', # replace with your app name and email
+ }
+ async with aiohttp.ClientSession() as session:
+ async with session.get(url, headers=headers) as response:
+ response.raise_for_status()
+ data = await response.json()
+
+ address = data.get("address", {})
+ elevation = await self.elevation(latitude, longitude)
+ return Location(
+ latitude=latitude,
+ longitude=longitude,
+ elevation=elevation,
+ datetime=datetime.now(timezone.utc),
+ zip=address.get("postcode"),
+ street=address.get("road"),
+ city=address.get("city"),
+ state=address.get("state"),
+ country=address.get("country"),
+ context={},
+ class_=data.get("class"),
+ type=data.get("type"),
+ name=data.get("name"),
+ display_name=data.get("display_name"),
+ amenity=address.get("amenity"),
+ house_number=address.get("house_number"),
+ road=address.get("road"),
+ quarter=address.get("quarter"),
+ neighbourhood=address.get("neighbourhood"),
+ suburb=address.get("suburb"),
+ county=address.get("county"),
+ country_code=address.get("country_code"),
+ timezone=await self.timezone(latitude, longitude)
+ )
+
+
+ def load_override_locations(self):
+ if self.named_locs and self.named_locs.exists():
+ with open(self.named_locs, 'r') as file:
+ return yaml.safe_load(file)
+ return []
+
+
+ def haversine(self, lat1, lon1, lat2, lon2):
+ R = 6371 # Earth's radius in kilometers
+
+ lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2])
+ dlat = lat2 - lat1
+ dlon = lon2 - lon1
+
+ a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
+ c = 2 * atan2(sqrt(a), sqrt(1-a))
+
+ return R * c
+
+ async def find_override_location(self, lat: float, lon: float) -> Optional[str]:
+ closest_location = None
+ closest_distance = float('inf')
+
+ for location in self.override_locations:
+ loc_name = location.get("name")
+ loc_lat = location.get("latitude")
+ loc_lon = location.get("longitude")
+ loc_radius = location.get("radius")
+
+ distance = self.haversine(lat, lon, loc_lat, loc_lon)
+
+ if distance <= loc_radius:
+ if distance < closest_distance:
+ closest_distance = distance
+ closest_location = loc_name
+
+ return closest_location
+
+ async def refresh_timezone(self, location: Union[Location, Tuple[float, float]], force: bool = False) -> str:
+ if isinstance(location, Location):
+ lat, lon = location.latitude, location.longitude
+ else:
+ lat, lon = location
+
+ current_time = datetime.now()
+ if (force or
+ not self.last_update or
+ current_time - self.last_update > timedelta(hours=1) or
+ self.last_location != (lat, lon)):
+ new_timezone = await self.timezone(lat, lon)
+ self.last_timezone = new_timezone
+ self.last_update = current_time
+ self.last_location = (lat, lon)
+ await self.tz_save()
+ return self.last_timezone
+
+ async def tz_save(self):
+ cache_data = {
+ 'last_timezone': self.last_timezone,
+ 'last_update': self.last_update.isoformat() if self.last_update else None,
+ 'last_location': self.last_location
+ }
+ async with aiofiles.open(self.cache_file, 'w') as f:
+ await f.write(json.dumps(cache_data))
+
+ async def tz_cached(self):
+ try:
+ async with aiofiles.open(self.cache_file, 'r') as f:
+ cache_data = json.loads(await f.read())
+ self.last_timezone = cache_data.get('last_timezone')
+ self.last_update = datetime.fromisoformat(cache_data['last_update']) if cache_data.get('last_update') else None
+ self.last_location = tuple(cache_data['last_location']) if cache_data.get('last_location') else None
+ except (FileNotFoundError, json.JSONDecodeError):
+ # If file doesn't exist or is invalid, we'll start fresh
+ pass
+
+ async def tz_current(self, location: Union[Location, Tuple[float, float]]) -> str:
+ await self.tz_cached()
+ return await self.refresh_timezone(location)
+
+ async def tz_last(self) -> Optional[str]:
+ await self.tz_cached()
+ return self.last_timezone
+
+ def __del__(self):
+ self.executor.shutdown()
+
+
class Database(BaseModel):
host: str = Field(..., description="Database host")
port: int = Field(5432, description="Database port")
@@ -98,15 +305,6 @@ class Database(BaseModel):
return self.dict(exclude_none=True)
-class AutoResponder(BaseModel):
- name: str
- style: str
- context: str
- ollama_model: str = "llama3"
- whitelist: List[str]
- blacklist: List[str]
- image_prompt: Optional[str] = None
-
class IMAPConfig(BaseModel):
username: str
password: str
@@ -121,6 +319,16 @@ class SMTPConfig(BaseModel):
port: int
encryption: str = None
+class AutoResponder(BaseModel):
+ name: str
+ style: str
+ context: str
+ ollama_model: str = "llama3"
+ whitelist: List[str]
+ blacklist: List[str]
+ image_prompt: Optional[str] = None
+ smtp: SMTPConfig
+
class EmailAccount(BaseModel):
name: str
refresh: int
@@ -129,7 +337,6 @@ class EmailAccount(BaseModel):
summarize: bool = False
podcast: bool = False
imap: IMAPConfig
- smtp: SMTPConfig
autoresponders: Optional[List[AutoResponder]]
class EmailContact(BaseModel):
@@ -143,95 +350,3 @@ class IncomingEmail(BaseModel):
subject: str
body: str
attachments: List[dict] = []
-
-
-class Location(BaseModel):
- latitude: float
- longitude: float
- datetime: datetime
- elevation: Optional[float] = None
- altitude: Optional[float] = None
- zip: Optional[str] = None
- street: Optional[str] = None
- city: Optional[str] = None
- state: Optional[str] = None
- country: Optional[str] = None
- context: Optional[Dict[str, Any]] = None
- class_: Optional[str] = None
- type: Optional[str] = None
- name: Optional[str] = None
- display_name: Optional[str] = None
- boundingbox: Optional[List[str]] = None
- amenity: Optional[str] = None
- house_number: Optional[str] = None
- road: Optional[str] = None
- quarter: Optional[str] = None
- neighbourhood: Optional[str] = None
- suburb: Optional[str] = None
- county: Optional[str] = None
- country_code: Optional[str] = None
-
- class Config:
- json_encoders = {
- datetime: lambda dt: dt.isoformat(),
- }
-
-
-
-class TimezoneTracker:
- def __init__(self, cache_file: Union[str, Path] = 'timezone_cache.json'):
- self.cache_file = Path(cache_file)
- self.last_timezone: str = "America/Los_Angeles"
- self.last_update: Optional[datetime] = None
- self.last_location: Optional[Tuple[float, float]] = None
- self.tf = TimezoneFinder()
-
- def find(self, lat: float, lon: float) -> str:
- timezone = self.tf.timezone_at(lat=lat, lng=lon)
- return timezone if timezone else 'Unknown'
-
- async def refresh(self, location: Union[Location, Tuple[float, float]], force: bool = False) -> str:
- if isinstance(location, Location):
- lat, lon = location.latitude, location.longitude
- else:
- lat, lon = location
-
- current_time = datetime.now()
- if (force or
- not self.last_update or
- current_time - self.last_update > timedelta(hours=1) or
- self.last_location != (lat, lon)):
- new_timezone = self.find(lat, lon)
- self.last_timezone = new_timezone
- self.last_update = current_time
- self.last_location = (lat, lon)
- await self.save_to_cache()
- return self.last_timezone
-
- async def save_to_cache(self):
- cache_data = {
- 'last_timezone': self.last_timezone,
- 'last_update': self.last_update.isoformat() if self.last_update else None,
- 'last_location': self.last_location
- }
- with self.cache_file.open('w') as f:
- json.dump(cache_data, f)
-
- async def load_from_cache(self):
- try:
- with self.cache_file.open('r') as f:
- cache_data = json.load(f)
- self.last_timezone = cache_data.get('last_timezone')
- self.last_update = datetime.fromisoformat(cache_data['last_update']) if cache_data.get('last_update') else None
- self.last_location = tuple(cache_data['last_location']) if cache_data.get('last_location') else None
- except (FileNotFoundError, json.JSONDecodeError):
- # If file doesn't exist or is invalid, we'll start fresh
- pass
-
- async def get_current(self, location: Union[Location, Tuple[float, float]]) -> str:
- await self.load_from_cache()
- return await self.refresh(location)
-
- async def get_last(self) -> Optional[str]:
- await self.load_from_cache()
- return self.last_timezone
diff --git a/sijapi/config/.env-example b/sijapi/config/.env-example
index 5a23da9..3ef3678 100644
--- a/sijapi/config/.env-example
+++ b/sijapi/config/.env-example
@@ -96,7 +96,7 @@ TRUSTED_SUBNETS=127.0.0.1/32,10.13.37.0/24,100.64.64.0/24
# ──────────
#
#─── router selection: ────────────────────────────────────────────────────────────
-ROUTERS=asr,calendar,cf,email,health,llm,locate,note,rag,sd,serve,time,tts,weather
+ROUTERS=asr,cal,cf,email,health,llm,loc,note,rag,sd,serve,time,tts,weather
UNLOADED=ig
#─── notes: ──────────────────────────────────────────────────────────────────────
#
@@ -115,7 +115,7 @@ UNLOADED=ig
# asr: requires faster_whisper — $ pip install faster_whisper — and
# downloading the model file specified in ASR_DEFAULT_MODEL.
#
-# calendar: requires (1) a Microsoft 365 account with a properly configured
+# cal: requires (1) a Microsoft 365 account with a properly configured
# Azure Active Directory app, and/or (2) Calendars on macOS.
#
# cf: interfaces with the Cloudflare API and Caddy to register new
@@ -138,7 +138,7 @@ UNLOADED=ig
# configured separately in the ig_config.json file; relies heavily
# on the llm and sd routers which have their own dependencies.
#
-# locate: some endpoints work as is, but the core location tracking
+# loc: some endpoints work as is, but the core location tracking
# functionality requires Postgresql + PostGIS extension and are
# designed specifically to pair with a mobile device where
# Pythonista is installed and configured to run the
@@ -148,8 +148,8 @@ UNLOADED=ig
# note: designed for use with Obsidian plus the Daily Notes and Tasks
# core extensions; and the Admonitions, Banners, Icons (with the
# Lucide pack), and Make.md community extensions. Moreover `notes`
-# relies heavily on the calendar, llm, locate, sd, summarize, time,
-# tts, and weather routers and accordingly on the external
+# relies heavily on the cal, llm, loc, sd, summarize, time, loc,
+# and weather routers and accordingly on the external
# dependencies of each.
#
# sd: requires ComfyUI plus any modules and StableDiffusion models
@@ -165,7 +165,7 @@ UNLOADED=ig
#
# weather: requires a VisualCrossing API key and is designed for (but doesn't
# itself strictly require) Postgresql with the PostGIS extension;
-# (... but it presently relies on the locate router, which does).
+# (... but it presently relies on the loc router, which does).
#
#
# ... Whew! that was a lot, right? I'm so glad we're in this together...
@@ -217,7 +217,7 @@ TAILSCALE_API_KEY=¿SECRET? # <--- enter your own TS API key
# ░▒ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░ ░ ░
# ░░ ░ ░T̷ O̷ G̷ E̷ T̷ H̷ ░ R̷. ░ ░ ░ ░ ░
# J U S T ░
-#─── frag, or weat,and locate modules:── H O L D M Y H A N D.
+#─── frag, or weat,and loc modules:────── H O L D M Y H A N D.
DB_NAME=db
#
DB_HOST=127.0.0.1
@@ -237,12 +237,12 @@ DB_SSH_PASS=¿SECRET? # <--- enter SSH password for pg server (if not l
# variables allow database access over an SSH tunnel.
#
# In the current implementation, we rely on Postgres to hold:
-# i. user-logged location data (locate module), and
+# i. user-logged location data (loc module), and
# ii. results from past weather forecast checks (weather module).
#
# A future version will hopefully make use of PostGIS's geocoding capabilities,
# and add a vector database for the LLM module. Until then it's up to you if the
-# locate and weather modules are worth the hassle of maintaining Postgres.
+# loc and weather modules are worth the hassle of maintaining Postgres.
# ──────────
#
#─────────────────────────────── 𝐼 𝐵 𝐸 𝑇 𝑌 𝑂 𝑈 ─────────────────────────────────
diff --git a/sijapi/routers/calendar.py b/sijapi/routers/cal.py
similarity index 95%
rename from sijapi/routers/calendar.py
rename to sijapi/routers/cal.py
index 2d0122b..7cca17a 100644
--- a/sijapi/routers/calendar.py
+++ b/sijapi/routers/cal.py
@@ -17,16 +17,16 @@ from datetime import datetime, timedelta
from Foundation import NSDate, NSRunLoop
import EventKit as EK
from sijapi import L, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
-from sijapi.routers.locate import localize_datetime
+from sijapi.routers import loc
-calendar = APIRouter()
+cal = APIRouter()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
timeout = httpx.Timeout(12)
if MS365_TOGGLE is True:
L.CRIT(f"Visit https://api.sij.ai/o365/login to obtain your Microsoft 365 authentication token.")
- @calendar.get("/o365/login")
+ @cal.get("/o365/login")
async def login():
L.DEBUG(f"Received request to /o365/login")
L.DEBUG(f"SCOPE: {MS365_SCOPE}")
@@ -40,7 +40,7 @@ if MS365_TOGGLE is True:
L.INFO(f"Redirecting to authorization URL: {authorization_url}")
return RedirectResponse(authorization_url)
- @calendar.get("/o365/oauth_redirect")
+ @cal.get("/o365/oauth_redirect")
async def oauth_redirect(code: str = None, error: str = None):
L.DEBUG(f"Received request to /o365/oauth_redirect")
if error:
@@ -73,7 +73,7 @@ if MS365_TOGGLE is True:
detail="Failed to obtain access token"
)
- @calendar.get("/o365/me")
+ @cal.get("/o365/me")
async def read_items():
L.DEBUG(f"Received request to /o365/me")
token = await load_token()
@@ -212,10 +212,10 @@ def datetime_to_nsdate(dt: datetime) -> NSDate:
return NSDate.dateWithTimeIntervalSince1970_(dt.timestamp())
-@calendar.get("/events")
+@cal.get("/events")
async def get_events_endpoint(start_date: str, end_date: str):
- start_dt = await localize_datetime(start_date)
- end_dt = await localize_datetime(end_date)
+ start_dt = await loc.dt(start_date)
+ end_dt = await loc.dt(end_date)
datetime.strptime(start_date, "%Y-%m-%d") or datetime.now()
end_dt = datetime.strptime(end_date, "%Y-%m-%d") or datetime.now()
response = await get_events(start_dt, end_dt)
@@ -341,8 +341,8 @@ async def get_ms365_events(start_date: datetime, end_date: datetime):
async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]):
- range_start = await localize_datetime(range_start)
- range_end = await localize_datetime(range_end)
+ range_start = await loc.dt(range_start)
+ range_end = await loc.dt(range_end)
event_list = []
for event in events:
@@ -361,13 +361,13 @@ async def parse_calendar_for_day(range_start: datetime, range_end: datetime, eve
L.INFO(f"End date string not a dict")
try:
- start_date = await localize_datetime(start_str) if start_str else None
+ start_date = await loc.dt(start_str) if start_str else None
except (ValueError, TypeError) as e:
L.ERR(f"Invalid start date format: {start_str}, error: {e}")
continue
try:
- end_date = await localize_datetime(end_str) if end_str else None
+ end_date = await loc.dt(end_str) if end_str else None
except (ValueError, TypeError) as e:
L.ERR(f"Invalid end date format: {end_str}, error: {e}")
continue
@@ -376,13 +376,13 @@ async def parse_calendar_for_day(range_start: datetime, range_end: datetime, eve
if start_date:
# Ensure start_date is timezone-aware
- start_date = await localize_datetime(start_date)
+ start_date = await loc.dt(start_date)
# If end_date is not provided, assume it's the same as start_date
if not end_date:
end_date = start_date
else:
- end_date = await localize_datetime(end_date)
+ end_date = await loc.dt(end_date)
# Check if the event overlaps with the given range
if (start_date < range_end) and (end_date > range_start):
diff --git a/sijapi/routers/email.py b/sijapi/routers/email.py
index f8f5ea9..7c066b0 100644
--- a/sijapi/routers/email.py
+++ b/sijapi/routers/email.py
@@ -3,6 +3,7 @@ Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming
'''
from fastapi import APIRouter
import asyncio
+import aiofiles
from imbox import Imbox
from bs4 import BeautifulSoup
import os
@@ -19,35 +20,20 @@ import yaml
from typing import List, Dict, Optional, Set
from datetime import datetime as dt_datetime
from sijapi import L, PODCAST_DIR, DEFAULT_VOICE, EMAIL_CONFIG, EMAIL_LOGS
-from sijapi.routers import tts, llm, sd, locate
+from sijapi.routers import loc, tts, llm, sd
from sijapi.utilities import clean_text, assemble_journal_path, extract_text, prefix_lines
from sijapi.classes import EmailAccount, IMAPConfig, SMTPConfig, IncomingEmail, EmailContact, AutoResponder
from sijapi.classes import EmailAccount
email = APIRouter(tags=["private"])
+
def load_email_accounts(yaml_path: str) -> List[EmailAccount]:
with open(yaml_path, 'r') as file:
config = yaml.safe_load(file)
return [EmailAccount(**account) for account in config['accounts']]
-def get_account_by_email(this_email: str) -> Optional[EmailAccount]:
- email_accounts = load_email_accounts(EMAIL_CONFIG)
- for account in email_accounts:
- if account.imap.username.lower() == this_email.lower():
- return account
- return None
-
-def get_imap_details(this_email: str) -> Optional[IMAPConfig]:
- account = get_account_by_email(this_email)
- return account.imap if account else None
-
-def get_smtp_details(this_email: str) -> Optional[SMTPConfig]:
- account = get_account_by_email(this_email)
- return account.smtp if account else None
-
-
def get_imap_connection(account: EmailAccount):
return Imbox(account.imap.host,
username=account.imap.username,
@@ -56,79 +42,17 @@ def get_imap_connection(account: EmailAccount):
ssl=account.imap.encryption == 'SSL',
starttls=account.imap.encryption == 'STARTTLS')
-def get_smtp_connection(account: EmailAccount):
+def get_smtp_connection(autoresponder: AutoResponder):
context = ssl._create_unverified_context()
- if account.smtp.encryption == 'SSL':
- return SMTP_SSL(account.smtp.host, account.smtp.port, context=context)
- elif account.smtp.encryption == 'STARTTLS':
- smtp = SMTP(account.smtp.host, account.smtp.port)
+ if autoresponder.smtp.encryption == 'SSL':
+ return SMTP_SSL(autoresponder.smtp.host, autoresponder.smtp.port, context=context)
+ elif autoresponder.smtp.encryption == 'STARTTLS':
+ smtp = SMTP(autoresponder.smtp.host, autoresponder.smtp.port)
smtp.starttls(context=context)
return smtp
else:
- return SMTP(account.smtp.host, account.smtp.port)
-
-
-def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
- L.DEBUG(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
- def matches_list(item: str, this_email: IncomingEmail) -> bool:
- if '@' in item:
- return item in this_email.sender
- else:
- return item.lower() in this_email.subject.lower() or item.lower() in this_email.body.lower()
- matching_profiles = []
- for profile in account.autoresponders:
- whitelist_match = not profile.whitelist or any(matches_list(item, this_email) for item in profile.whitelist)
- blacklist_match = any(matches_list(item, this_email) for item in profile.blacklist)
- if whitelist_match and not blacklist_match:
- L.DEBUG(f"We have a match for {whitelist_match} and no blacklist matches.")
- matching_profiles.append(profile)
- elif whitelist_match and blacklist_match:
- L.DEBUG(f"Matched whitelist for {whitelist_match}, but also matched blacklist for {blacklist_match}")
- else:
- L.DEBUG(f"No whitelist or blacklist matches.")
- return matching_profiles
-
-
-async def generate_auto_response_body(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> str:
- now = await locate.localize_datetime(dt_datetime.now())
- then = await locate.localize_datetime(this_email.datetime_received)
- age = now - then
- usr_prompt = f'''
- Generate a personalized auto-response to the following email:
- From: {this_email.sender}
- Sent: {age} ago
- Subject: "{this_email.subject}"
- Body:
- {this_email.body}
- Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}.
- Keep the response {profile.style} and to the point, but responsive to the sender's inquiry.
- Do not mention or recite this context information in your response.
- '''
- sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
- try:
- # async def query_ollama(usr: str, sys: str = LLM_SYS_MSG, model: str = DEFAULT_LLM, max_tokens: int = 200):
- response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
-
- L.DEBUG(f"query_ollama response: {response}")
-
- if isinstance(response, str):
- response += "\n\n"
- return response
- elif isinstance(response, dict):
- if "message" in response and "content" in response["message"]:
- return response["message"]["content"]
- else:
- L.ERR(f"Unexpected response structure from query_ollama: {response}")
- else:
- L.ERR(f"Unexpected response type from query_ollama: {type(response)}")
-
- # If we reach here, we couldn't extract a valid response
- raise ValueError("Could not extract valid response from query_ollama")
-
- except Exception as e:
- L.ERR(f"Error generating auto-response: {str(e)}")
- return f"Thank you for your email regarding '{this_email.subject}'. We are currently experiencing technical difficulties with our auto-response system. We will review your email and respond as soon as possible. We apologize for any inconvenience."
+ return SMTP(autoresponder.smtp.host, autoresponder.smtp.port)
def clean_email_content(html_content):
@@ -156,119 +80,7 @@ async def extract_attachments(attachments) -> List[str]:
return attachment_texts
-
-async def save_email(this_email: IncomingEmail, account: EmailAccount):
- try:
- md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
- tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
- summary = ""
- if account.summarize == True:
- email_content = f'At {this_email.datetime_received}, {this_email.sender} sent an email with the subject line "{this_email.subject}". The email in its entirety reads: \n\n{this_email.body}\n"'
- if this_email.attachments:
- attachment_texts = await extract_attachments(this_email.attachments)
- email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
- summary = await llm.summarize_text(email_content)
- await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = account.podcast, output_path = tts_path)
- summary = prefix_lines(summary, '> ')
-
- # Create the markdown content
- markdown_content = f'''---
-date: {this_email.datetime_received.strftime('%Y-%m-%d')}
-tags:
-- email
----
-| | | |
-| --: | :--: | :--: |
-| *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}** | |
-| *from* | **[[{this_email.sender}]]** | |
-| *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])} | |
-| *subject* | **{this_email.subject}** | |
-'''
-
- if summary:
- markdown_content += f'''
-> [!summary] Summary
-> {summary}
-'''
-
- if tts_path.exists():
- markdown_content += f'''
-![[{tts_path}]]
-'''
-
- markdown_content += f'''
----
-{this_email.body}
-'''
-
- with open(md_path, 'w', encoding='utf-8') as md_file:
- md_file.write(markdown_content)
-
- L.DEBUG(f"Saved markdown to {md_path}")
-
- return True
-
- except Exception as e:
- L.ERR(f"Exception: {e}")
- return False
-
-async def autorespond(this_email: IncomingEmail, account: EmailAccount):
- L.DEBUG(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
- matching_profiles = get_matching_autoresponders(this_email, account)
- L.DEBUG(f"Matching profiles: {matching_profiles}")
- for profile in matching_profiles:
- L.INFO(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
- auto_response_subject = f"Auto-Response Re: {this_email.subject}"
- auto_response_body = await generate_auto_response_body(this_email, profile, account)
- L.DEBUG(f"Auto-response: {auto_response_body}")
- success = await send_auto_response(this_email.sender, auto_response_subject, auto_response_body, profile, account)
- if success == True:
- return True
-
- L.WARN(f"We were not able to successfully auto-respond to {this_email.subject}")
- return False
-
-async def send_auto_response(to_email, subject, body, profile, account):
- try:
- message = MIMEMultipart()
- message['From'] = account.smtp.username
- message['To'] = to_email
- message['Subject'] = subject
- message.attach(MIMEText(body, 'plain'))
-
- if profile.image_prompt:
- jpg_path = await sd.workflow(profile.image_prompt, earlyout=False, downscale_to_fit=True)
- if jpg_path and os.path.exists(jpg_path):
- with open(jpg_path, 'rb') as img_file:
- img = MIMEImage(img_file.read(), name=os.path.basename(jpg_path))
- message.attach(img)
-
- L.DEBUG(f"Sending auto-response {to_email} concerning {subject} from account {account.name}...")
- with get_smtp_connection(account) as server:
- server.login(account.smtp.username, account.smtp.password)
- server.send_message(message)
-
- L.INFO(f"Auto-response sent to {to_email} concerning {subject} from account {account.name}!")
- return True
-
- except Exception as e:
- L.ERR(f"Error in preparing/sending auto-response from account {account.name}: {e}")
- return False
-
-
-
-
-async def load_processed_uids(filename: Path) -> Set[str]:
- if filename.exists():
- with open(filename, 'r') as f:
- return set(line.strip().split(':')[-1] for line in f)
- return set()
-
-async def save_processed_uid(filename: Path, account_name: str, uid: str):
- with open(filename, 'a') as f:
- f.write(f"{account_name}:{uid}\n")
-
-async def process_account_summarization(account: EmailAccount):
+async def process_account_archival(account: EmailAccount):
summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
os.makedirs(summarized_log.parent, exist_ok = True)
@@ -283,7 +95,7 @@ async def process_account_summarization(account: EmailAccount):
uid_str = uid.decode() if isinstance(uid, bytes) else str(uid)
if uid_str not in processed_uids:
recipients = [EmailContact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to]
- localized_datetime = await locate.localize_datetime(message.date)
+ localized_datetime = await loc.dt(message.date)
this_email = IncomingEmail(
sender=message.sent_from[0]['email'],
datetime_received=localized_datetime,
@@ -292,15 +104,15 @@ async def process_account_summarization(account: EmailAccount):
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
attachments=message.attachments
)
- if account.summarize:
- save_success = await save_email(this_email, account)
- if save_success:
- await save_processed_uid(summarized_log, account.name, uid_str)
- L.INFO(f"Summarized email: {uid_str}")
- else:
- L.WARN(f"Failed to summarize {this_email.subject}")
+ md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
+ md_summary = await summarize_single_email(this_email, account.podcast) if account.summarize == True else None
+ md_content = await archive_single_email(this_email, md_summary)
+ save_success = await save_email(md_path, md_content)
+ if save_success:
+ await save_processed_uid(summarized_log, account.name, uid_str)
+ L.INFO(f"Summarized email: {uid_str}")
else:
- L.INFO(f"account.summarize shows as false.")
+ L.WARN(f"Failed to summarize {this_email.subject}")
else:
L.DEBUG(f"Skipping {uid_str} because it was already processed.")
except Exception as e:
@@ -308,51 +120,216 @@ async def process_account_summarization(account: EmailAccount):
await asyncio.sleep(account.refresh)
+async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False):
+ tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
+ summary = ""
+ email_content = f'At {this_email.datetime_received}, {this_email.sender} sent an email with the subject line "{this_email.subject}". The email in its entirety reads: \n\n{this_email.body}\n"'
+ if this_email.attachments:
+ attachment_texts = await extract_attachments(this_email.attachments)
+ email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
+ summary = await llm.summarize_text(email_content)
+ await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = podcast, output_path = tts_path)
+ md_summary = f'```ad.summary\n'
+ md_summary += f'title: {this_email.subject}\n'
+ md_summary += f'{summary}\n'
+ md_summary += f'```\n\n'
+ md_summary += f'![[{tts_path}]]\n' if tts_path.exists() else ''
+
+ return md_summary
+
+async def archive_single_email(this_email: IncomingEmail, summary: str = None):
+ try:
+ markdown_content = f'''---
+date: {this_email.datetime_received.strftime('%Y-%m-%d')}
+tags:
+- email
+---
+| | | |
+| --: | :--: | :--: |
+| *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}** | |
+| *from* | **[[{this_email.sender}]]** | |
+| *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])} | |
+| *subject* | **{this_email.subject}** | |
+'''
+
+ if summary:
+ markdown_content += summary
+
+ markdown_content += f'''
+---
+{this_email.body}
+'''
+ return markdown_content
+
+ except Exception as e:
+ L.ERR(f"Exception: {e}")
+ return False
+
+async def save_email(md_path, md_content):
+ try:
+ with open(md_path, 'w', encoding='utf-8') as md_file:
+ md_file.write(md_content)
+
+ L.DEBUG(f"Saved markdown to {md_path}")
+ return True
+ except Exception as e:
+ L.ERR(f"Failed to save email: {e}")
+ return False
+
+def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
+ L.DEBUG(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
+ def matches_list(item: str, this_email: IncomingEmail) -> bool:
+ if '@' in item:
+ return item in this_email.sender
+ else:
+ return item.lower() in this_email.subject.lower() or item.lower() in this_email.body.lower()
+ matching_profiles = []
+ for profile in account.autoresponders:
+ whitelist_match = not profile.whitelist or any(matches_list(item, this_email) for item in profile.whitelist)
+ blacklist_match = any(matches_list(item, this_email) for item in profile.blacklist)
+ if whitelist_match and not blacklist_match:
+ L.DEBUG(f"We have a match for {whitelist_match} and no blacklist matches.")
+ matching_profiles.append(profile)
+ elif whitelist_match and blacklist_match:
+ L.DEBUG(f"Matched whitelist for {whitelist_match}, but also matched blacklist for {blacklist_match}")
+ else:
+ L.DEBUG(f"No whitelist or blacklist matches.")
+ return matching_profiles
+
+
async def process_account_autoresponding(account: EmailAccount):
- autoresponded_log = EMAIL_LOGS / account.name / "autoresponded.txt"
- os.makedirs(autoresponded_log.parent, exist_ok = True)
+ EMAIL_AUTORESPONSE_LOG = EMAIL_LOGS / account.name / "autoresponded.txt"
+ os.makedirs(EMAIL_AUTORESPONSE_LOG.parent, exist_ok=True)
while True:
try:
- processed_uids = await load_processed_uids(autoresponded_log)
+ processed_uids = await load_processed_uids(EMAIL_AUTORESPONSE_LOG)
L.DEBUG(f"{len(processed_uids)} emails marked as already responded to are being ignored.")
+
with get_imap_connection(account) as inbox:
unread_messages = inbox.messages(unread=True)
L.DEBUG(f"There are {len(unread_messages)} unread messages.")
+
for uid, message in unread_messages:
uid_str = uid.decode() if isinstance(uid, bytes) else str(uid)
if uid_str not in processed_uids:
- recipients = [EmailContact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to]
- localized_datetime = await locate.localize_datetime(message.date)
- this_email = IncomingEmail(
- sender=message.sent_from[0]['email'],
- datetime_received=localized_datetime,
- recipients=recipients,
- subject=message.subject,
- body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
- attachments=message.attachments
- )
- L.DEBUG(f"Attempting autoresponse on {this_email.subject}")
- respond_success = await autorespond(this_email, account)
- if respond_success:
- await save_processed_uid(autoresponded_log, account.name, uid_str)
- L.WARN(f"Auto-responded to email: {this_email.subject}")
- else:
- L.WARN(f"Failed auto-response to {this_email.subject}")
- L.DEBUG(f"Skipping {uid_str} because it was already processed.")
+ await autorespond_single_email(message, uid_str, account, EMAIL_AUTORESPONSE_LOG)
+ else:
+ L.DEBUG(f"Skipping {uid_str} because it was already processed.")
+
except Exception as e:
L.ERR(f"An error occurred during auto-responding for account {account.name}: {e}")
await asyncio.sleep(account.refresh)
-async def process_all_accounts():
+async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path):
+ this_email = await create_incoming_email(message)
+ L.DEBUG(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
+ matching_profiles = get_matching_autoresponders(this_email, account)
+ L.DEBUG(f"Matching profiles: {matching_profiles}")
+
+ for profile in matching_profiles:
+ response_body = await generate_response(this_email, profile, account)
+ if response_body:
+ subject = f"Re: {this_email.subject}"
+ jpg_path = await sd.workflow(profile.image_prompt, earlyout=False, downscale_to_fit=True) if profile.image_prompt else None
+ success = await send_response(this_email.sender, subject, response_body, profile, account, jpg_path)
+ if success:
+ L.WARN(f"Auto-responded to email: {this_email.subject}")
+ await save_processed_uid(log_file, account.name, uid_str)
+ else:
+ L.WARN(f"Failed to send auto-response to {this_email.subject}")
+ else:
+ L.WARN(f"Unable to generate auto-response for {this_email.subject}")
+
+async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]:
+ L.INFO(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
+
+ now = await loc.dt(dt_datetime.now())
+ then = await loc.dt(this_email.datetime_received)
+ age = now - then
+ usr_prompt = f'''
+Generate a personalized auto-response to the following email:
+From: {this_email.sender}
+Sent: {age} ago
+Subject: "{this_email.subject}"
+Body: {this_email.body}
+---
+Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}. Keep the response {profile.style} and to the point, but responsive to the sender's inquiry. Do not mention or recite this context information in your response.
+ '''
+ sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
+
+ try:
+ response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
+ L.DEBUG(f"query_ollama response: {response}")
+
+ if isinstance(response, dict) and "message" in response and "content" in response["message"]:
+ response = response["message"]["content"]
+
+ return response + "\n\n"
+
+ except Exception as e:
+ L.ERR(f"Error generating auto-response: {str(e)}")
+ return None
+
+async def send_response(to_email: str, subject: str, body: str, profile: AutoResponder, image_attachment: Path = None) -> bool:
+ try:
+ message = MIMEMultipart()
+ message['From'] = profile.smtp.username
+ message['To'] = to_email
+ message['Subject'] = subject
+ message.attach(MIMEText(body, 'plain'))
+
+ if image_attachment and os.path.exists(image_attachment):
+ with open(image_attachment, 'rb') as img_file:
+ img = MIMEImage(img_file.read(), name=os.path.basename(image_attachment))
+ message.attach(img)
+
+ L.DEBUG(f"Sending auto-response to {to_email} concerning {subject} from account {profile.name}...")
+
+ with get_smtp_connection(profile) as server:
+ server.login(profile.smtp.username, profile.smtp.password)
+ server.send_message(message)
+
+ L.INFO(f"Auto-response sent to {to_email} concerning {subject} from account {profile.name}!")
+ return True
+
+ except Exception as e:
+ L.ERR(f"Error in preparing/sending auto-response from account {profile.name}: {e}")
+ return False
+
+async def create_incoming_email(message) -> IncomingEmail:
+ recipients = [EmailContact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to]
+ localized_datetime = await loc.dt(message.date)
+ return IncomingEmail(
+ sender=message.sent_from[0]['email'],
+ datetime_received=localized_datetime,
+ recipients=recipients,
+ subject=message.subject,
+ body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
+ attachments=message.attachments
+ )
+
+async def load_processed_uids(filename: Path) -> Set[str]:
+ if filename.exists():
+ async with aiofiles.open(filename, 'r') as f:
+ return set(line.strip().split(':')[-1] for line in await f.readlines())
+ return set()
+
+async def save_processed_uid(filename: Path, account_name: str, uid: str):
+ async with aiofiles.open(filename, 'a') as f:
+ await f.write(f"{account_name}:{uid}\n")
+
+
+async def process_all_accounts():
email_accounts = load_email_accounts(EMAIL_CONFIG)
- summarization_tasks = [asyncio.create_task(process_account_summarization(account)) for account in email_accounts]
+ summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
+
@email.on_event("startup")
async def startup_event():
await asyncio.sleep(5)
- asyncio.create_task(process_all_accounts())
\ No newline at end of file
+ asyncio.create_task(process_all_accounts())
diff --git a/sijapi/routers/llm.py b/sijapi/routers/llm.py
index 0db62f8..d38c6e2 100644
--- a/sijapi/routers/llm.py
+++ b/sijapi/routers/llm.py
@@ -80,14 +80,6 @@ async def generate_response(prompt: str):
)
return {"response": output['response']}
-@llm.post("/llm/query")
-async def llm_query_endpoint(
- message: str = Form(...),
- file: Optional(UploadFile) = Form(...)
-):
- return None
-
-
async def query_ollama(usr: str, sys: str = LLM_SYS_MSG, model: str = DEFAULT_LLM, max_tokens: int = 200):
messages = [{"role": "system", "content": sys},
diff --git a/sijapi/routers/loc.py b/sijapi/routers/loc.py
new file mode 100644
index 0000000..3484533
--- /dev/null
+++ b/sijapi/routers/loc.py
@@ -0,0 +1,385 @@
+'''
+Uses Postgres/PostGIS for for location tracking (data obtained via the companion mobile Pythonista scripts), and for geocoding purposes.
+'''
+from fastapi import APIRouter, HTTPException, Query
+from fastapi.responses import HTMLResponse, JSONResponse
+import yaml
+from typing import List, Tuple, Union
+import traceback
+from datetime import datetime, timezone
+from typing import Union, List
+import folium
+from zoneinfo import ZoneInfo
+from dateutil.parser import parse as dateutil_parse
+from typing import Optional, List, Union
+from datetime import datetime
+from sijapi import L, DB, TZ, NAMED_LOCATIONS, DynamicTZ, GEO
+from sijapi.classes import Location
+from sijapi.utilities import haversine
+
+loc = APIRouter()
+
+async def dt(
+ date_time: Union[str, datetime],
+ tz: Union[str, ZoneInfo, None] = None
+) -> datetime:
+ try:
+ # Convert string to datetime if necessary
+ if isinstance(date_time, str):
+ date_time = dateutil_parse(date_time)
+ L.DEBUG(f"Converted string '{date_time}' to datetime object.")
+
+ if not isinstance(date_time, datetime):
+ raise ValueError("Input must be a string or datetime object.")
+
+ # Handle provided timezone
+ if tz is not None:
+ if tz == "local":
+ last_loc = await get_last_location(date_time)
+ tz_str = DynamicTZ.find(last_loc.latitude, last_loc.longitude)
+ try:
+ tz = ZoneInfo(tz_str)
+ except Exception as e:
+ L.WARN(f"Invalid timezone string '{tz_str}' from DynamicTZ. Falling back to UTC. Error: {e}")
+ tz = ZoneInfo('UTC')
+ L.DEBUG(f"Using local timezone: {tz}")
+ elif isinstance(tz, str):
+ try:
+ tz = ZoneInfo(tz)
+ except Exception as e:
+ L.ERR(f"Invalid timezone string '{tz}'. Error: {e}")
+ raise ValueError(f"Invalid timezone string: {tz}")
+ elif isinstance(tz, ZoneInfo):
+ pass # tz is already a ZoneInfo object
+ else:
+ raise ValueError("tz must be 'local', a string, or a ZoneInfo object.")
+
+ # Localize to the provided or determined timezone
+ if date_time.tzinfo is None:
+ date_time = date_time.replace(tzinfo=tz)
+ L.DEBUG(f"Localized naive datetime to timezone: {tz}")
+ else:
+ date_time = date_time.astimezone(tz)
+ L.DEBUG(f"Converted datetime from {date_time.tzinfo} to timezone: {tz}")
+
+ # If no timezone provided, only fill in missing timezone info
+ elif date_time.tzinfo is None:
+ last_loc = get_last_location(date_time)
+ tz_str = DynamicTZ.find(last_loc.latitude, last_loc.longitude)
+ try:
+ tz = ZoneInfo(tz_str)
+ except Exception as e:
+ L.WARN(f"Invalid timezone string '{tz_str}' from DynamicTZ. Falling back to UTC. Error: {e}")
+ tz = ZoneInfo('UTC')
+
+ date_time = date_time.replace(tzinfo=tz)
+ L.DEBUG(f"Filled in missing timezone info: {tz}")
+
+ # If datetime already has timezone and no new timezone provided, do nothing
+ else:
+ L.DEBUG(f"Datetime already has timezone {date_time.tzinfo}. No changes made.")
+
+ return date_time
+ except ValueError as e:
+ L.ERR(f"Error in dt: {e}")
+ raise
+ except Exception as e:
+ L.ERR(f"Unexpected error in dt: {e}")
+ raise ValueError(f"Failed to localize datetime: {e}")
+
+async def get_last_location() -> Optional[Location]:
+ query_datetime = datetime.now(TZ)
+ L.DEBUG(f"Query_datetime: {query_datetime}")
+
+ this_location = await fetch_last_location_before(query_datetime)
+
+ if this_location:
+ L.DEBUG(f"location: {this_location}")
+ return this_location
+
+ return None
+
+
+async def fetch_locations(start: datetime, end: datetime = None) -> List[Location]:
+ start_datetime = await dt(start)
+ if end is None:
+ end_datetime = await dt(start_datetime.replace(hour=23, minute=59, second=59))
+ else:
+ end_datetime = await dt(end)
+
+ if start_datetime.time() == datetime.min.time() and end_datetime.time() == datetime.min.time():
+ end_datetime = end_datetime.replace(hour=23, minute=59, second=59)
+
+ L.DEBUG(f"Fetching locations between {start_datetime} and {end_datetime}")
+
+ async with DB.get_connection() as conn:
+ locations = []
+ # Check for records within the specified datetime range
+ range_locations = await conn.fetch('''
+ SELECT id, datetime,
+ ST_X(ST_AsText(location)::geometry) AS longitude,
+ ST_Y(ST_AsText(location)::geometry) AS latitude,
+ ST_Z(ST_AsText(location)::geometry) AS elevation,
+ city, state, zip, street,
+ action, device_type, device_model, device_name, device_os
+ FROM locations
+ WHERE datetime >= $1 AND datetime <= $2
+ ORDER BY datetime DESC
+ ''', start_datetime.replace(tzinfo=None), end_datetime.replace(tzinfo=None))
+
+ L.DEBUG(f"Range locations query returned: {range_locations}")
+ locations.extend(range_locations)
+
+ if not locations and (end is None or start_datetime.date() == end_datetime.date()):
+ location_data = await conn.fetchrow('''
+ SELECT id, datetime,
+ ST_X(ST_AsText(location)::geometry) AS longitude,
+ ST_Y(ST_AsText(location)::geometry) AS latitude,
+ ST_Z(ST_AsText(location)::geometry) AS elevation,
+ city, state, zip, street,
+ action, device_type, device_model, device_name, device_os
+ FROM locations
+ WHERE datetime < $1
+ ORDER BY datetime DESC
+ LIMIT 1
+ ''', start_datetime.replace(tzinfo=None))
+
+ L.DEBUG(f"Fallback query returned: {location_data}")
+ if location_data:
+ locations.append(location_data)
+
+ L.DEBUG(f"Locations found: {locations}")
+
+ # Sort location_data based on the datetime field in descending order
+ sorted_locations = sorted(locations, key=lambda x: x['datetime'], reverse=True)
+
+ # Create Location objects directly from the location data
+ location_objects = [
+ Location(
+ latitude=location['latitude'],
+ longitude=location['longitude'],
+ datetime=location['datetime'],
+ elevation=location.get('elevation'),
+ city=location.get('city'),
+ state=location.get('state'),
+ zip=location.get('zip'),
+ street=location.get('street'),
+ context={
+ 'action': location.get('action'),
+ 'device_type': location.get('device_type'),
+ 'device_model': location.get('device_model'),
+ 'device_name': location.get('device_name'),
+ 'device_os': location.get('device_os')
+ }
+ ) for location in sorted_locations if location['latitude'] is not None and location['longitude'] is not None
+ ]
+
+ return location_objects if location_objects else []
+
+# Function to fetch the last location before the specified datetime
+async def fetch_last_location_before(datetime: datetime) -> Optional[Location]:
+ datetime = await dt(datetime)
+
+ L.DEBUG(f"Fetching last location before {datetime}")
+
+ async with DB.get_connection() as conn:
+
+ location_data = await conn.fetchrow('''
+ SELECT id, datetime,
+ ST_X(ST_AsText(location)::geometry) AS longitude,
+ ST_Y(ST_AsText(location)::geometry) AS latitude,
+ ST_Z(ST_AsText(location)::geometry) AS elevation,
+ city, state, zip, street, country,
+ action
+ FROM locations
+ WHERE datetime < $1
+ ORDER BY datetime DESC
+ LIMIT 1
+ ''', datetime.replace(tzinfo=None))
+
+ await conn.close()
+
+ if location_data:
+ L.DEBUG(f"Last location found: {location_data}")
+ return Location(**location_data)
+ else:
+ L.DEBUG("No location found before the specified datetime")
+ return None
+
+@loc.get("/map/start_date={start_date_str}&end_date={end_date_str}", response_class=HTMLResponse)
+async def generate_map_endpoint(start_date_str: str, end_date_str: str):
+ try:
+ start_date = await dt(start_date_str)
+ end_date = await dt(end_date_str)
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Invalid date format")
+
+ html_content = await generate_map(start_date, end_date)
+ return HTMLResponse(content=html_content)
+
+
+@loc.get("/map", response_class=HTMLResponse)
+async def generate_alltime_map_endpoint():
+ try:
+ start_date = await dt(datetime.fromisoformat("2022-01-01"))
+ end_date = dt(datetime.now())
+ except ValueError:
+ raise HTTPException(status_code=400, detail="Invalid date format")
+
+ html_content = await generate_map(start_date, end_date)
+ return HTMLResponse(content=html_content)
+
+
+async def generate_map(start_date: datetime, end_date: datetime):
+ locations = await fetch_locations(start_date, end_date)
+ if not locations:
+ raise HTTPException(status_code=404, detail="No locations found for the given date range")
+
+ # Create a folium map centered around the first location
+ map_center = [locations[0].latitude, locations[0].longitude]
+ m = folium.Map(location=map_center, zoom_start=5)
+
+ # Add markers for each location
+ for location in locations:
+ folium.Marker(
+ location=[location.latitude, location.longitude],
+ popup=f"{location.city}, {location.state}
Elevation: {location.elevation}m
Date: {location.datetime}",
+ tooltip=f"{location.city}, {location.state}"
+ ).add_to(m)
+
+ # Save the map to an HTML file and return the HTML content
+ map_html = "map.html"
+ m.save(map_html)
+
+ with open(map_html, 'r') as file:
+ html_content = file.read()
+
+ return html_content
+
+async def post_location(location: Location):
+ L.DEBUG(f"post_location called with {location.datetime}")
+
+ async with DB.get_connection() as conn:
+ try:
+ context = location.context or {}
+ action = context.get('action', 'manual')
+ device_type = context.get('device_type', 'Unknown')
+ device_model = context.get('device_model', 'Unknown')
+ device_name = context.get('device_name', 'Unknown')
+ device_os = context.get('device_os', 'Unknown')
+
+ # Parse and localize the datetime
+ localized_datetime = await dt(location.datetime)
+
+ await conn.execute('''
+ INSERT INTO locations (
+ datetime, location, city, state, zip, street, action, device_type, device_model, device_name, device_os,
+ class_, type, name, display_name, amenity, house_number, road, quarter, neighbourhood,
+ suburb, county, country_code, country
+ )
+ VALUES ($1, ST_SetSRID(ST_MakePoint($2, $3, $4), 4326), $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15,
+ $16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26)
+ ''', localized_datetime, location.longitude, location.latitude, location.elevation, location.city, location.state,
+ location.zip, location.street, action, device_type, device_model, device_name, device_os,
+ location.class_, location.type, location.name, location.display_name,
+ location.amenity, location.house_number, location.road, location.quarter, location.neighbourhood,
+ location.suburb, location.county, location.country_code, location.country)
+
+ await conn.close()
+ L.INFO(f"Successfully posted location: {location.latitude}, {location.longitude}, {location.elevation} on {localized_datetime}")
+ return {
+ 'datetime': localized_datetime,
+ 'latitude': location.latitude,
+ 'longitude': location.longitude,
+ 'elevation': location.elevation,
+ 'city': location.city,
+ 'state': location.state,
+ 'zip': location.zip,
+ 'street': location.street,
+ 'action': action,
+ 'device_type': device_type,
+ 'device_model': device_model,
+ 'device_name': device_name,
+ 'device_os': device_os,
+ 'class_': location.class_,
+ 'type': location.type,
+ 'name': location.name,
+ 'display_name': location.display_name,
+ 'amenity': location.amenity,
+ 'house_number': location.house_number,
+ 'road': location.road,
+ 'quarter': location.quarter,
+ 'neighbourhood': location.neighbourhood,
+ 'suburb': location.suburb,
+ 'county': location.county,
+ 'country_code': location.country_code,
+ 'country': location.country
+ }
+ except Exception as e:
+ L.ERR(f"Error posting location {e}")
+ L.ERR(traceback.format_exc())
+ return None
+
+
+@loc.post("/locate")
+async def post_locate_endpoint(locations: Union[Location, List[Location]]):
+ if isinstance(locations, Location):
+ locations = [locations]
+
+ # Prepare locations
+ for location in locations:
+ if not location.datetime:
+ tz = DynamicTZ.find(location.latitude, location.longitude)
+ location.datetime = datetime.now(tz).isoformat()
+
+ if not location.context:
+ location.context = {
+ "action": "manual",
+ "device_type": "Pythonista",
+ "device_model": "Unknown",
+ "device_name": "Unknown",
+ "device_os": "Unknown"
+ }
+ L.DEBUG(f"Location received for processing: {location}")
+
+ geocoded_locations = await GEO.code(locations)
+
+ responses = []
+ for location in geocoded_locations:
+ L.DEBUG(f"Final location submitted to database: {location}")
+
+ location_entry = await post_location(location)
+ if location_entry:
+ responses.append({"location_data": location_entry})
+ else:
+ L.WARN(f"Posting location to database appears to have failed.")
+
+ return {"message": "Locations and weather updated", "results": responses}
+
+
+
+@loc.get("/locate", response_model=Location)
+async def get_last_location_endpoint() -> JSONResponse:
+ this_location = await get_last_location()
+
+ if this_location:
+ location_dict = this_location.model_dump()
+ location_dict["datetime"] = this_location.datetime.isoformat()
+ return JSONResponse(content=location_dict)
+ else:
+ raise HTTPException(status_code=404, detail="No location found before the specified datetime")
+
+@loc.get("/locate/{datetime_str}", response_model=List[Location])
+async def get_locate(datetime_str: str, all: bool = False):
+ try:
+ date_time = await dt(datetime_str)
+ except ValueError as e:
+ L.ERR(f"Invalid datetime string provided: {datetime_str}")
+ return ["ERROR: INVALID DATETIME PROVIDED. USE YYYYMMDDHHmmss or YYYYMMDD format."]
+
+ locations = await fetch_locations(date_time)
+ if not locations:
+ raise HTTPException(status_code=404, detail="No nearby data found for this date and time")
+
+ return locations if all else [locations[0]]
+
diff --git a/sijapi/routers/locate.py b/sijapi/routers/locate.py
deleted file mode 100644
index e0fb40c..0000000
--- a/sijapi/routers/locate.py
+++ /dev/null
@@ -1,609 +0,0 @@
-'''
-Uses Postgres/PostGIS for for location tracking (data obtained via the companion mobile Pythonista scripts), and for geocoding purposes.
-'''
-from fastapi import APIRouter, HTTPException, Query
-from fastapi.responses import HTMLResponse, JSONResponse
-import requests
-import yaml
-import time
-import pytz
-import traceback
-from datetime import datetime, timezone
-from typing import Union, List
-import asyncio
-import pytz
-import aiohttp
-import folium
-from zoneinfo import ZoneInfo
-import time as timer
-from dateutil.parser import parse as dateutil_parse
-from concurrent.futures import ThreadPoolExecutor
-from functools import partial
-from srtm import get_data
-from pathlib import Path
-from pydantic import BaseModel
-from typing import Optional, Any, Dict, List, Union
-from datetime import datetime, timedelta, time
-from sijapi import L, DB, TZ, NAMED_LOCATIONS, DynamicTZ, GEOLOCATOR
-from sijapi.classes import Location, PyGeolocator
-from sijapi.utilities import haversine
-# from osgeo import gdal
-# import elevation
-
-
-locate = APIRouter()
-
-async def reverse_geocode_local_1(lat, lon, date_time: datetime = datetime.now()) -> Optional[Location]:
- date_time = await localize_datetime(date_time)
- loc = GEOLOCATOR.lookup(lat, lon)
- location = Location(
- latitude = lat,
- longitude = lon,
- elevation = loc['elevation'],
- datetime = date_time,
- city = loc['city'],
- state = loc['state'],
- country = loc['country']
- )
- return location
-
-async def reverse_geocode(latitude: float, longitude: float, elevation: float = None) -> Optional[Location]:
- url = f"https://nominatim.openstreetmap.org/reverse?format=json&lat={latitude}&lon={longitude}"
- L.DEBUG(f"Calling Nominatim API at {url}")
- headers = {
- 'User-Agent': 'sij.law/1.0 (sij@sij.law)', # replace with your app name and email
- }
- try:
- async with aiohttp.ClientSession() as session:
- async with session.get(url, headers=headers) as response:
- response.raise_for_status()
- data = await response.json()
-
- address = data.get("address", {})
- elevation = elevation or await get_elevation(latitude, longitude)
- location = Location(
- latitude=latitude,
- longitude=longitude,
- elevation=elevation,
- datetime=datetime.now(timezone.utc),
- zip=address.get("postcode"),
- street=address.get("road"),
- city=address.get("city"),
- state=address.get("state"),
- country=address.get("country"),
- context={},
- class_=data.get("class"),
- type=data.get("type"),
- name=data.get("name"),
- display_name=data.get("display_name"),
- amenity=address.get("amenity"),
- house_number=address.get("house_number"),
- road=address.get("road"),
- quarter=address.get("quarter"),
- neighbourhood=address.get("neighbourhood"),
- suburb=address.get("suburb"),
- county=address.get("county"),
- country_code=address.get("country_code")
- )
- L.DEBUG(f"Created Location object: {location}")
- return location
- except aiohttp.ClientError as e:
- L.ERR(f"Error: {e}")
- return None
-
-
-
-## NOT YET IMPLEMENTED
-async def geocode(zip_code: Optional[str] = None, latitude: Optional[float] = None, longitude: Optional[float] = None, city: Optional[str] = None, state: Optional[str] = None, country_code: str = 'US') -> Location:
- if (latitude is None or longitude is None) and (zip_code is None) and (city is None or state is None):
- L.ERR(f"Must provide sufficient information for geocoding!")
- return None
-
- try:
- # Establish the database connection
- async with DB.get_connection() as conn:
-
- # Build the SQL query based on the provided parameters
- query = "SELECT id, street, city, state, country, latitude, longitude, zip, elevation, datetime, date, ST_Distance(geom, ST_SetSRID(ST_MakePoint($1, $2), 4326)) AS distance FROM Locations"
-
- conditions = []
- params = []
-
- if latitude is not None and longitude is not None:
- conditions.append("ST_DWithin(geom, ST_SetSRID(ST_MakePoint($1, $2), 4326), 50000)") # 50 km radius
- params.extend([longitude, latitude])
-
- if zip_code:
- conditions.append("zip = $3 AND country = $4")
- params.extend([zip_code, country_code])
-
- if city and state:
- conditions.append("city ILIKE $5 AND state ILIKE $6 AND country = $7")
- params.extend([city, state, country_code])
-
- if conditions:
- query += " WHERE " + " OR ".join(conditions)
-
- query += " ORDER BY distance LIMIT 1;"
-
- L.DEBUG(f"Executing query: {query} with params: {params}")
-
- # Execute the query with the provided parameters
- result = await conn.fetchrow(query, *params)
-
- # Close the connection
- await conn.close()
-
- if result:
- location_info = Location(
- latitude=result['latitude'],
- longitude=result['longitude'],
- datetime=result.get['datetime'],
- zip=result['zip'],
- street=result.get('street', ''),
- city=result['city'],
- state=result['state'],
- country=result['country'],
- elevation=result.get('elevation', 0),
- distance=result.get('distance')
- )
- L.DEBUG(f"Found location: {location_info}")
- return location_info
- else:
- L.DEBUG("No location found with provided parameters.")
- return Location()
-
- except Exception as e:
- L.ERR(f"Error occurred: {e}")
- raise Exception("An error occurred while processing your request")
-
-
-
-
-async def localize_datetime(dt: Union[str, datetime], fetch_loc: bool = False) -> datetime:
- try:
- # Convert string to datetime if necessary
- if isinstance(dt, str):
- dt = dateutil_parse(dt)
- L.DEBUG(f"Converted string '{dt}' to datetime object.")
-
- if not isinstance(dt, datetime):
- raise ValueError("Input must be a string or datetime object.")
-
- # Fetch timezone string
- if fetch_loc:
- loc = await get_last_location()
- tz_str = DynamicTZ.find(loc[0], loc[1]) # Assuming loc is (lat, lon)
- else:
- tz_str = DynamicTZ.last_timezone
-
- L.DEBUG(f"Retrieved timezone string: {tz_str}")
-
- # Convert timezone string to ZoneInfo object
- try:
- tz = ZoneInfo(tz_str)
- except Exception as e:
- L.WARN(f"Invalid timezone string '{tz_str}'. Falling back to UTC. Error: {e}")
- tz = ZoneInfo('UTC')
-
- L.DEBUG(f"Using timezone: {tz}")
-
- # Localize datetime
- if dt.tzinfo is None:
- dt = dt.replace(tzinfo=tz)
- L.DEBUG(f"Localized naive datetime to {tz}")
- elif dt.tzinfo != tz:
- dt = dt.astimezone(tz)
- L.DEBUG(f"Converted datetime from {dt.tzinfo} to {tz}")
-
- return dt
- except ValueError as e:
- L.ERR(f"Error parsing datetime: {e}")
- raise
- except Exception as e:
- L.ERR(f"Unexpected error in localize_datetime: {e}")
- raise ValueError(f"Failed to localize datetime: {e}")
-
-
-async def find_override_locations(lat: float, lon: float) -> Optional[str]:
- # Load the JSON file
- with open(NAMED_LOCATIONS, 'r') as file:
- locations = yaml.safe_load(file)
-
- closest_location = None
- closest_distance = float('inf')
-
- # Iterate through each location entry in the JSON
- for location in locations:
- loc_name = location.get("name")
- loc_lat = location.get("latitude")
- loc_lon = location.get("longitude")
- loc_radius = location.get("radius")
-
- # Calculate distance using haversine
- distance = haversine(lat, lon, loc_lat, loc_lon)
-
- # Check if the distance is within the specified radius
- if distance <= loc_radius:
- if distance < closest_distance:
- closest_distance = distance
- closest_location = loc_name
-
- return closest_location
-
-
-async def get_elevation(latitude: float, longitude: float, unit: str = "m") -> float:
- loop = asyncio.get_running_loop()
-
- # Create a thread pool executor
- with ThreadPoolExecutor() as pool:
- # Run get_data() in a separate thread
- srtm_data = await loop.run_in_executor(pool, get_data)
-
- # Run get_elevation() in a separate thread
- elevation = await loop.run_in_executor(
- pool,
- partial(srtm_data.get_elevation, latitude, longitude)
- )
- if unit == "m":
- return elevation
- elif unit == "km":
- return elevation / 1000
- elif unit == "ft" or unit == "'":
- return elevation * 3.280839895
- else:
- raise ValueError(f"Unsupported unit: {unit}")
-
-async def fetch_locations(start: datetime, end: datetime = None) -> List[Location]:
- start_datetime = await localize_datetime(start)
- if end is None:
- end_datetime = await localize_datetime(start_datetime.replace(hour=23, minute=59, second=59))
- else:
- end_datetime = await localize_datetime(end)
-
- if start_datetime.time() == datetime.min.time() and end_datetime.time() == datetime.min.time():
- end_datetime = end_datetime.replace(hour=23, minute=59, second=59)
-
- L.DEBUG(f"Fetching locations between {start_datetime} and {end_datetime}")
-
- async with DB.get_connection() as conn:
- locations = []
- # Check for records within the specified datetime range
- range_locations = await conn.fetch('''
- SELECT id, datetime,
- ST_X(ST_AsText(location)::geometry) AS longitude,
- ST_Y(ST_AsText(location)::geometry) AS latitude,
- ST_Z(ST_AsText(location)::geometry) AS elevation,
- city, state, zip, street,
- action, device_type, device_model, device_name, device_os
- FROM locations
- WHERE datetime >= $1 AND datetime <= $2
- ORDER BY datetime DESC
- ''', start_datetime.replace(tzinfo=None), end_datetime.replace(tzinfo=None))
-
- L.DEBUG(f"Range locations query returned: {range_locations}")
- locations.extend(range_locations)
-
- if not locations and (end is None or start_datetime.date() == end_datetime.date()):
- location_data = await conn.fetchrow('''
- SELECT id, datetime,
- ST_X(ST_AsText(location)::geometry) AS longitude,
- ST_Y(ST_AsText(location)::geometry) AS latitude,
- ST_Z(ST_AsText(location)::geometry) AS elevation,
- city, state, zip, street,
- action, device_type, device_model, device_name, device_os
- FROM locations
- WHERE datetime < $1
- ORDER BY datetime DESC
- LIMIT 1
- ''', start_datetime.replace(tzinfo=None))
-
- L.DEBUG(f"Fallback query returned: {location_data}")
- if location_data:
- locations.append(location_data)
-
- L.DEBUG(f"Locations found: {locations}")
-
- # Sort location_data based on the datetime field in descending order
- sorted_locations = sorted(locations, key=lambda x: x['datetime'], reverse=True)
-
- # Create Location objects directly from the location data
- location_objects = [
- Location(
- latitude=loc['latitude'],
- longitude=loc['longitude'],
- datetime=loc['datetime'],
- elevation=loc.get('elevation'),
- city=loc.get('city'),
- state=loc.get('state'),
- zip=loc.get('zip'),
- street=loc.get('street'),
- context={
- 'action': loc.get('action'),
- 'device_type': loc.get('device_type'),
- 'device_model': loc.get('device_model'),
- 'device_name': loc.get('device_name'),
- 'device_os': loc.get('device_os')
- }
- ) for loc in sorted_locations if loc['latitude'] is not None and loc['longitude'] is not None
- ]
-
- return location_objects if location_objects else []
-
-# Function to fetch the last location before the specified datetime
-async def fetch_last_location_before(datetime: datetime) -> Optional[Location]:
- datetime = await localize_datetime(datetime)
-
- L.DEBUG(f"Fetching last location before {datetime}")
-
- async with DB.get_connection() as conn:
-
- location_data = await conn.fetchrow('''
- SELECT id, datetime,
- ST_X(ST_AsText(location)::geometry) AS longitude,
- ST_Y(ST_AsText(location)::geometry) AS latitude,
- ST_Z(ST_AsText(location)::geometry) AS elevation,
- city, state, zip, street, country,
- action
- FROM locations
- WHERE datetime < $1
- ORDER BY datetime DESC
- LIMIT 1
- ''', datetime.replace(tzinfo=None))
-
- await conn.close()
-
- if location_data:
- L.DEBUG(f"Last location found: {location_data}")
- return Location(**location_data)
- else:
- L.DEBUG("No location found before the specified datetime")
- return None
-
-
-
-@locate.get("/map/start_date={start_date_str}&end_date={end_date_str}", response_class=HTMLResponse)
-async def generate_map_endpoint(start_date_str: str, end_date_str: str):
- try:
- start_date = await localize_datetime(start_date_str)
- end_date = await localize_datetime(end_date_str)
- except ValueError:
- raise HTTPException(status_code=400, detail="Invalid date format")
-
- html_content = await generate_map(start_date, end_date)
- return HTMLResponse(content=html_content)
-
-
-@locate.get("/map", response_class=HTMLResponse)
-async def generate_alltime_map_endpoint():
- try:
- start_date = await localize_datetime(datetime.fromisoformat("2022-01-01"))
- end_date = localize_datetime(datetime.now())
- except ValueError:
- raise HTTPException(status_code=400, detail="Invalid date format")
-
- html_content = await generate_map(start_date, end_date)
- return HTMLResponse(content=html_content)
-
-
-async def generate_map(start_date: datetime, end_date: datetime):
- locations = await fetch_locations(start_date, end_date)
- if not locations:
- raise HTTPException(status_code=404, detail="No locations found for the given date range")
-
- # Create a folium map centered around the first location
- map_center = [locations[0].latitude, locations[0].longitude]
- m = folium.Map(location=map_center, zoom_start=5)
-
- # Add markers for each location
- for loc in locations:
- folium.Marker(
- location=[loc.latitude, loc.longitude],
- popup=f"{loc.city}, {loc.state}
Elevation: {loc.elevation}m
Date: {loc.datetime}",
- tooltip=f"{loc.city}, {loc.state}"
- ).add_to(m)
-
- # Save the map to an HTML file and return the HTML content
- map_html = "map.html"
- m.save(map_html)
-
- with open(map_html, 'r') as file:
- html_content = file.read()
-
- return html_content
-
-async def post_location(location: Location):
- L.DEBUG(f"post_location called with {location.datetime}")
-
- async with DB.get_connection() as conn:
- try:
- context = location.context or {}
- action = context.get('action', 'manual')
- device_type = context.get('device_type', 'Unknown')
- device_model = context.get('device_model', 'Unknown')
- device_name = context.get('device_name', 'Unknown')
- device_os = context.get('device_os', 'Unknown')
-
- # Parse and localize the datetime
- localized_datetime = await localize_datetime(location.datetime)
-
-
- await conn.execute('''
- INSERT INTO locations (
- datetime, location, city, state, zip, street, action, device_type, device_model, device_name, device_os,
- class_, type, name, display_name, amenity, house_number, road, quarter, neighbourhood,
- suburb, county, country_code, country
- )
- VALUES ($1, ST_SetSRID(ST_MakePoint($2, $3, $4), 4326), $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15,
- $16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26)
- ''', localized_datetime, location.longitude, location.latitude, location.elevation, location.city, location.state,
- location.zip, location.street, action, device_type, device_model, device_name, device_os,
- location.class_, location.type, location.name, location.display_name,
- location.amenity, location.house_number, location.road, location.quarter, location.neighbourhood,
- location.suburb, location.county, location.country_code, location.country)
-
- await conn.close()
- L.INFO(f"Successfully posted location: {location.latitude}, {location.longitude}, {location.elevation} on {localized_datetime}")
- return {
- 'datetime': localized_datetime,
- 'latitude': location.latitude,
- 'longitude': location.longitude,
- 'elevation': location.elevation,
- 'city': location.city,
- 'state': location.state,
- 'zip': location.zip,
- 'street': location.street,
- 'action': action,
- 'device_type': device_type,
- 'device_model': device_model,
- 'device_name': device_name,
- 'device_os': device_os,
- 'class_': location.class_,
- 'type': location.type,
- 'name': location.name,
- 'display_name': location.display_name,
- 'amenity': location.amenity,
- 'house_number': location.house_number,
- 'road': location.road,
- 'quarter': location.quarter,
- 'neighbourhood': location.neighbourhood,
- 'suburb': location.suburb,
- 'county': location.county,
- 'country_code': location.country_code,
- 'country': location.country
- }
- except Exception as e:
- L.ERR(f"Error posting location {e}")
- L.ERR(traceback.format_exc())
- return None
-
-
-
-@locate.post("/locate")
-async def post_locate_endpoint(locations: Union[Location, List[Location]]):
- responses = []
- if isinstance(locations, Location):
- locations = [locations]
-
- for location in locations:
- if not location.datetime:
- location.datetime = datetime.now(timezone.utc).isoformat()
-
- if not location.elevation:
- location.elevation = location.altitude if location.altitude else await get_elevation(location.latitude, location.longitude)
-
- # Ensure context is a dictionary with default values if not provided
- if not location.context:
- location.context = {
- "action": "manual",
- "device_type": "Pythonista",
- "device_model": "Unknown",
- "device_name": "Unknown",
- "device_os": "Unknown"
- }
-
- # L.DEBUG(f"datetime before localization: {location.datetime}")
- location.datetime = await localize_datetime(location.datetime)
- # L.DEBUG(f"datetime after localization: {location.datetime}")
- L.DEBUG(f"Location received for processing: {location}")
- # Perform reverse geocoding
- geocoded_location = await reverse_geocode(location.latitude, location.longitude)
- if geocoded_location:
- L.DEBUG(f"Reverse geocoding result: {geocoded_location}")
- for field in location.__fields__:
- if getattr(location, field) is None:
- setattr(location, field, getattr(geocoded_location, field))
- else:
- L.WARN(f"Geocoding failed!")
- L.DEBUG(f"Final location submitted to database: {location}")
-
- location_entry = await post_location(location)
- if location_entry:
- responses.append({"location_data": location_entry}) # Add weather data if necessary
- else:
- L.WARN(f"Posing location to database appears to have failed.")
-
- return {"message": "Locations and weather updated", "results": responses}
-
-
-
-async def get_last_location() -> Optional[Location]:
- query_datetime = datetime.now(TZ)
- L.DEBUG(f"Query_datetime: {query_datetime}")
-
- this_location = await fetch_last_location_before(query_datetime)
-
- if this_location:
- L.DEBUG(f"location: {this_location}")
- return this_location
-
- return None
-
-@locate.get("/locate", response_model=Location)
-async def get_last_location_endpoint() -> JSONResponse:
- this_location = await get_last_location()
-
- if this_location:
- location_dict = this_location.model_dump()
- location_dict["datetime"] = this_location.datetime.isoformat()
- return JSONResponse(content=location_dict)
- else:
- raise HTTPException(status_code=404, detail="No location found before the specified datetime")
-
-@locate.get("/locate/{datetime_str}", response_model=List[Location])
-async def get_locate(datetime_str: str, all: bool = False):
- try:
- date_time = await localize_datetime(datetime_str)
- except ValueError as e:
- L.ERR(f"Invalid datetime string provided: {datetime_str}")
- return ["ERROR: INVALID DATETIME PROVIDED. USE YYYYMMDDHHmmss or YYYYMMDD format."]
-
- locations = await fetch_locations(date_time)
- if not locations:
- raise HTTPException(status_code=404, detail="No nearby data found for this date and time")
-
- return locations if all else [locations[0]]
-
-
-
-
-
-
-future_elevation = """
-def get_elevation_srtm(latitude, longitude, srtm_file):
- try:
- # Open the SRTM dataset
- dataset = gdal.Open(srtm_file)
-
- # Get the geotransform and band information
- geotransform = dataset.GetGeoTransform()
- band = dataset.GetRasterBand(1)
-
- # Calculate the pixel coordinates from the latitude and longitude
- x = int((longitude - geotransform[0]) / geotransform[1])
- y = int((latitude - geotransform[3]) / geotransform[5])
-
- # Read the elevation value from the SRTM dataset
- elevation = band.ReadAsArray(x, y, 1, 1)[0][0]
-
- # Close the dataset
- dataset = None
-
- return elevation
-
- except Exception as e:
- L.ERR(f"Error: {e}")
- return None
-"""
-
-def get_elevation2(latitude: float, longitude: float) -> float:
- url = f"https://nationalmap.gov/epqs/pqs.php?x={longitude}&y={latitude}&units=Meters&output=json"
-
- try:
- response = requests.get(url)
- data = response.json()
- elevation = data["USGS_Elevation_Point_Query_Service"]["Elevation_Query"]["Elevation"]
- return float(elevation)
- except Exception as e:
- # Handle exceptions (e.g., network errors, API changes) appropriately
- raise RuntimeError(f"Error getting elevation data: {str(e)}")
diff --git a/sijapi/routers/note.py b/sijapi/routers/note.py
index a3d99eb..8aadcec 100644
--- a/sijapi/routers/note.py
+++ b/sijapi/routers/note.py
@@ -18,20 +18,21 @@ from markdownify import markdownify as md
from typing import Optional, Union, Dict, List, Tuple
from urllib.parse import urlparse
from urllib3.util.retry import Retry
+import newspaper
from newspaper import Article
import trafilatura
from readability import Document
from requests.adapters import HTTPAdapter
import re
import os
-from datetime import timedelta, datetime, time as dt_time, date as dt_date
+from datetime import timedelta, datetime as dt_datetime, time as dt_time, date as dt_date
from dateutil.parser import parse as dateutil_parse
from fastapi import HTTPException, status
from pathlib import Path
from fastapi import APIRouter, Query, HTTPException
-from sijapi import L, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, ARCHIVE_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, TZ, DynamicTZ
-from sijapi.routers import tts, llm, time, sd, locate, weather, asr, calendar
-from sijapi.routers.locate import Location
+from sijapi import L, OBSIDIAN_VAULT_DIR, OBSIDIAN_RESOURCES_DIR, ARCHIVE_DIR, BASE_URL, OBSIDIAN_BANNER_SCENE, DEFAULT_11L_VOICE, DEFAULT_VOICE, TZ, DynamicTZ, GEO
+from sijapi.routers import cal, loc, tts, llm, time, sd, weather, asr
+from sijapi.routers.loc import Location
from sijapi.utilities import assemble_journal_path, assemble_archive_path, convert_to_12_hour_format, sanitize_filename, convert_degrees_to_cardinal, HOURLY_COLUMNS_MAPPING
@@ -39,16 +40,17 @@ note = APIRouter()
+### Daily Note Builder ###
@note.get("/note/bulk/{dt_start}/{dt_end}")
async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
- start_date = datetime.strptime(dt_start, "%Y-%m-%d")
- end_date = datetime.strptime(dt_end, "%Y-%m-%d")
+ start_date = dt_datetime.strptime(dt_start, "%Y-%m-%d")
+ end_date = dt_datetime.strptime(dt_end, "%Y-%m-%d")
results = []
current_date = start_date
while current_date <= end_date:
- formatted_date = await locate.localize_datetime(current_date)
+ formatted_date = await loc.dt(current_date)
result = await build_daily_note(formatted_date)
results.append(result)
current_date += timedelta(days=1)
@@ -59,13 +61,13 @@ async def build_daily_note_range_endpoint(dt_start: str, dt_end: str):
@note.post("/note/create")
async def build_daily_note_endpoint(
- date_str: Optional[str] = Form(datetime.now().strftime("%Y-%m-%d")),
+ date_str: Optional[str] = Form(dt_datetime.now().strftime("%Y-%m-%d")),
location: Optional[str] = Form(None)
):
lat, lon = None, None
try:
if not date_str:
- date_str = datetime.now().strftime("%Y-%m-%d")
+ date_str = dt_datetime.now().strftime("%Y-%m-%d")
if location:
lat, lon = map(float, location.split(','))
tz = ZoneInfo(DynamicTZ.find(lat, lon))
@@ -75,31 +77,35 @@ async def build_daily_note_endpoint(
except (ValueError, AttributeError, TypeError) as e:
L.WARN(f"Falling back to localized datetime due to error: {e}")
try:
- date_time = locate.localize_datetime(date_str)
- places = await locate.fetch_locations(date_time)
+ date_time = loc.dt(date_str)
+ places = await loc.fetch_locations(date_time)
lat, lon = places[0].latitude, places[0].longitude
except Exception as e:
return JSONResponse(content={"error": str(e)}, status_code=400)
path = await build_daily_note(date_time, lat, lon)
- return JSONResponse(content={"path": path}, status_code=200)
-async def build_daily_note(date_time: datetime, lat: float = None, lon: float = None):
+ path_str = str(path) # Convert PosixPath to string
+
+ return JSONResponse(content={"path": path_str}, status_code=200)
+
+
+async def build_daily_note(date_time: dt_datetime, lat: float = None, lon: float = None):
'''
Obsidian helper. Takes a datetime and creates a new daily note. Note: it uses the sijapi configuration file to place the daily note and does NOT presently interface with Obsidian's daily note or periodic notes extensions. It is your responsibility to ensure they match.
'''
absolute_path, _ = assemble_journal_path(date_time)
-
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in build_daily_note.")
formatted_day = date_time.strftime("%A %B %d, %Y") # Monday May 27, 2024 formatting
day_before = (date_time - timedelta(days=1)).strftime("%Y-%m-%d %A") # 2024-05-26 Sunday formatting
day_after = (date_time + timedelta(days=1)).strftime("%Y-%m-%d %A") # 2024-05-28 Tuesday formatting
header = f"# [[{day_before}|← ]] {formatted_day} [[{day_after}| →]]\n\n"
if not lat or not lon:
- places = await locate.fetch_locations(date_time)
+ places = await loc.fetch_locations(date_time)
lat, lon = places[0].latitude, places[0].longitude
- location = await locate.reverse_geocode(lat, lon)
+ location = await GEO.code(lat, lon)
timeslips = await build_daily_timeslips(date_time)
@@ -126,7 +132,7 @@ date: "{fm_day}"
banner: "![[{banner_path}]]"
tags:
- daily-note
-created: "{datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
+created: "{dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
---
{header}
@@ -153,6 +159,8 @@ created: "{datetime.now().strftime("%Y-%m-%d %H:%M:%S")}"
return absolute_path
+### Daily Note Component Builders ###
+
async def build_daily_timeslips(date):
'''
@@ -166,469 +174,13 @@ async def build_daily_timeslips(date):
return f"![[{relative_path}]]"
-### CLIPPER ###
-@note.post("/clip")
-async def clip_post(
- bg_tasks: BackgroundTasks,
- url: Optional[str] = Form(None),
- source: Optional[str] = Form(None),
- title: Optional[str] = Form(None),
- tts: str = Form('summary'),
- voice: str = Form(DEFAULT_VOICE),
- encoding: str = Form('utf-8')
-):
- markdown_filename = await process_article(bg_tasks, url, title, encoding, source, tts, voice)
- return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
-
-@note.post("/archive")
-async def archive_post(
- url: Optional[str] = Form(None),
- source: Optional[str] = Form(None),
- title: Optional[str] = Form(None),
- encoding: str = Form('utf-8')
-):
- markdown_filename = await process_archive(url, title, encoding, source)
- return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
-
-@note.get("/clip")
-async def clip_get(
- bg_tasks: BackgroundTasks,
- url: str,
- title: Optional[str] = Query(None),
- encoding: str = Query('utf-8'),
- tts: str = Query('summary'),
- voice: str = Query(DEFAULT_VOICE)
-):
- markdown_filename = await process_article(bg_tasks, url, title, encoding, tts=tts, voice=voice)
- return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
-
-@note.post("/note/add")
-async def note_add_endpoint(file: Optional[UploadFile] = File(None), text: Optional[str] = Form(None), source: Optional[str] = Form(None), bg_tasks: BackgroundTasks = None):
- L.DEBUG(f"Received request on /note/add...")
- if not file and not text:
- L.WARN(f"... without any file or text!")
- raise HTTPException(status_code=400, detail="Either text or a file must be provided")
- else:
- result = await process_for_daily_note(file, text, source, bg_tasks)
- L.INFO(f"Result on /note/add: {result}")
- return JSONResponse(result, status_code=204)
-
-async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None):
- now = datetime.now()
- transcription_entry = ""
- file_entry = ""
- if file:
- L.DEBUG("File received...")
- file_content = await file.read()
- audio_io = BytesIO(file_content)
-
- # Improve error handling for file type guessing
- guessed_type = mimetypes.guess_type(file.filename)
- file_type = guessed_type[0] if guessed_type[0] else "application/octet-stream"
-
- L.DEBUG(f"Processing as {file_type}...")
-
- # Extract the main type (e.g., 'audio', 'image', 'video')
- main_type = file_type.split('/')[0]
- subdir = main_type.title() if main_type else "Documents"
-
- absolute_path, relative_path = assemble_journal_path(now, subdir=subdir, filename=file.filename)
- L.DEBUG(f"Destination path: {absolute_path}")
-
- with open(absolute_path, 'wb') as f:
- f.write(file_content)
- L.DEBUG(f"Processing {f.name}...")
-
- if main_type == 'audio':
- transcription = await asr.transcribe_audio(file_path=absolute_path, params=asr.TranscribeParams(model="small-en", language="en", threads=6))
- file_entry = f"![[{relative_path}]]"
- elif main_type == 'image':
- file_entry = f"![[{relative_path}]]"
- else:
- file_entry = f"[Source]({relative_path})"
-
- text_entry = text if text else ""
- L.DEBUG(f"transcription: {transcription_entry}\nfile_entry: {file_entry}\ntext_entry: {text_entry}")
- return await add_to_daily_note(transcription_entry, file_entry, text_entry, now)
-
-async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: datetime = None):
- date_time = date_time or datetime.now()
- note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True)
- time_str = date_time.strftime("%H:%M")
-
- entry_lines = []
- if additional_text and additional_text.strip():
- entry_lines.append(f"\t* {additional_text.strip()}")
- if transcription and transcription.strip():
- entry_lines.append(f"\t* {transcription.strip()}")
- if file_link and file_link.strip():
- entry_lines.append(f"\t\t {file_link.strip()}")
-
- entry = f"\n* **{time_str}**\n" + "\n".join(entry_lines)
-
- # Write the entry to the end of the file
- if note_path.exists():
- with open(note_path, 'a', encoding='utf-8') as note_file:
- note_file.write(entry)
- else:
- date_str = date_time.strftime("%Y-%m-%d")
- frontmatter = f"""---
-date: {date_str}
-tags:
- - notes
----
-
-"""
- content = frontmatter + entry
- # If the file doesn't exist, create it and start with "Notes"
- with open(note_path, 'w', encoding='utf-8') as note_file:
- note_file.write(content)
-
- return entry
-
-async def handle_text(title:str, summary:str, extracted_text:str, date_time: datetime = None):
- date_time = date_time if date_time else datetime.now()
- absolute_path, relative_path = assemble_journal_path(date_time, filename=title, extension=".md", no_timestamp = True)
- with open(absolute_path, "w") as file:
- file.write(f"# {title}\n\n## Summary\n{summary}\n\n## Transcript\n{extracted_text}")
-
- # add_to_daily_note(f"**Uploaded [[{title}]]**: *{summary}*", absolute_path)
-
- return True
-
-
-async def process_document(
- bg_tasks: BackgroundTasks,
- document: File,
- title: Optional[str] = None,
- tts_mode: str = "summary",
- voice: str = DEFAULT_VOICE
-):
- timestamp = datetime.now().strftime('%b %d, %Y at %H:%M')
-
- # Save the document to OBSIDIAN_RESOURCES_DIR
- document_content = await document.read()
- file_path = Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR / document.filename
- with open(file_path, 'wb') as f:
- f.write(document_content)
-
- parsed_content = await llm.extract_text(file_path) # Ensure extract_text is awaited
-
- llm_title, summary = await llm.title_and_summary(parsed_content)
- try:
- readable_title = sanitize_filename(title if title else document.filename)
-
- if tts_mode == "full" or tts_mode == "content" or tts_mode == "body":
- tts_text = parsed_content
- elif tts_mode == "summary" or tts_mode == "excerpt":
- tts_text = summary
- else:
- tts_text = None
-
- frontmatter = f"""---
-title: {readable_title}
-added: {timestamp}
----
-"""
- body = f"# {readable_title}\n\n"
-
- if tts_text:
- try:
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
- audio_filename = f"{datetime_str} {readable_title}"
- audio_path = await tts.generate_speech(
- bg_tasks=bg_tasks,
- text=tts_text,
- voice=voice,
- model="eleven_turbo_v2",
- podcast=True,
- title=audio_filename,
- output_dir=Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR
- )
- audio_ext = Path(audio_path).suffix
- obsidian_link = f"![[{OBSIDIAN_RESOURCES_DIR}/{audio_filename}{audio_ext}]]"
- body += f"{obsidian_link}\n\n"
- except Exception as e:
- L.ERR(f"Failed in the TTS portion of clipping: {e}")
-
- body += f"> [!summary]+\n"
- body += f"> {summary}\n\n"
- body += parsed_content
- markdown_content = frontmatter + body
-
- markdown_filename = f"{readable_title}.md"
- encoding = 'utf-8'
-
- with open(markdown_filename, 'w', encoding=encoding) as md_file:
- md_file.write(markdown_content)
-
- L.INFO(f"Successfully saved to {markdown_filename}")
-
- return markdown_filename
-
- except Exception as e:
- L.ERR(f"Failed to clip: {str(e)}")
- raise HTTPException(status_code=500, detail=str(e))
-
-
-async def process_article(
- bg_tasks: BackgroundTasks,
- url: str,
- title: Optional[str] = None,
- encoding: str = 'utf-8',
- source: Optional[str] = None,
- tts_mode: str = "summary",
- voice: str = DEFAULT_11L_VOICE
-):
-
- timestamp = datetime.now().strftime('%b %d, %Y at %H:%M')
-
- parsed_content = await parse_article(url, source)
- if parsed_content is None:
- return {"error": "Failed to retrieve content"}
-
- readable_title = sanitize_filename(title or parsed_content.get("title") or timestamp)
- markdown_filename, relative_path = assemble_journal_path(datetime.now(), subdir="Articles", filename=readable_title, extension=".md")
-
- try:
- summary = await llm.summarize_text(parsed_content["content"], "Summarize the provided text. Respond with the summary and nothing else. Do not otherwise acknowledge the request. Just provide the requested summary.")
- summary = summary.replace('\n', ' ') # Remove line breaks
-
- if tts_mode == "full" or tts_mode == "content":
- tts_text = parsed_content["content"]
- elif tts_mode == "summary" or tts_mode == "excerpt":
- tts_text = summary
- else:
- tts_text = None
-
- banner_markdown = ''
- try:
- banner_url = parsed_content.get('image', '')
- if banner_url != '':
- banner_image = download_file(banner_url, Path(OBSIDIAN_VAULT_DIR / OBSIDIAN_RESOURCES_DIR))
- if banner_image:
- banner_markdown = f"![[{OBSIDIAN_RESOURCES_DIR}/{banner_image}]]"
-
- except Exception as e:
- L.ERR(f"No image found in article")
-
- authors = ', '.join('[[{}]]'.format(author) for author in parsed_content.get('authors', ['Unknown']))
-
- frontmatter = f"""---
-title: {readable_title}
-authors: {', '.join('[[{}]]'.format(author) for author in parsed_content.get('authors', ['Unknown']))}
-published: {parsed_content.get('date_published', 'Unknown')}
-added: {timestamp}
-excerpt: {parsed_content.get('excerpt', '')}
-banner: "{banner_markdown}"
-tags:
-
-"""
- frontmatter += '\n'.join(f" - {tag}" for tag in parsed_content.get('tags', []))
- frontmatter += '\n---\n'
-
- body = f"# {readable_title}\n\n"
-
- if tts_text:
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
- audio_filename = f"{datetime_str} {readable_title}"
- try:
- audio_path = await tts.generate_speech(bg_tasks=bg_tasks, text=tts_text, voice=voice, model="eleven_turbo_v2", podcast=True, title=audio_filename,
- output_dir=Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR)
- audio_ext = Path(audio_path).suffix
- obsidian_link = f"![[{OBSIDIAN_RESOURCES_DIR}/{audio_filename}{audio_ext}]]"
- body += f"{obsidian_link}\n\n"
- except Exception as e:
- L.ERR(f"Failed to generate TTS for np3k. {e}")
-
- try:
- body += f"by {authors} in [{parsed_content.get('domain', urlparse(url).netloc.replace('www.', ''))}]({url}).\n\n"
- body += f"> [!summary]+\n"
- body += f"> {summary}\n\n"
- body += parsed_content["content"]
- markdown_content = frontmatter + body
-
- except Exception as e:
- L.ERR(f"Failed to combine elements of article markdown.")
-
- try:
- with open(markdown_filename, 'w', encoding=encoding) as md_file:
- md_file.write(markdown_content)
-
- L.INFO(f"Successfully saved to {markdown_filename}")
- add_to_daily_note
- return markdown_filename
-
- except Exception as e:
- L.ERR(f"Failed to write markdown file")
- raise HTTPException(status_code=500, detail=str(e))
-
- except Exception as e:
- L.ERR(f"Failed to clip {url}: {str(e)}")
- raise HTTPException(status_code=500, detail=str(e))
-
-
-async def parse_article(url: str, source: Optional[str] = None):
- source = source if source else trafilatura.fetch_url(url)
- traf = trafilatura.extract_metadata(filecontent=source, default_url=url)
-
- # Pass the HTML content to newspaper3k:
- np3k = Article(url)
- np3k.set_html(source)
- np3k.parse()
-
- L.INFO(f"Parsed {np3k.title}")
-
-
- title = (np3k.title or traf.title) or url
- authors = np3k.authors or traf.author
- authors = (authors if isinstance(authors, List) else [authors])
- date = np3k.publish_date or traf.date
- try:
- date = await locate.localize_datetime(date)
- except:
- L.DEBUG(f"Failed to localize {date}")
- date = await locate.localize_datetime(datetime.now())
- excerpt = np3k.meta_description or traf.description
- content = trafilatura.extract(source, output_format="markdown", include_comments=False) or np3k.text
- image = np3k.top_image or traf.image
- domain = traf.sitename or urlparse(url).netloc.replace('www.', '').title()
- tags = np3k.meta_keywords or traf.categories or traf.tags
- tags = tags if isinstance(tags, List) else [tags]
-
- return {
- 'title': title.replace(" ", " "),
- 'authors': authors,
- 'date': date.strftime("%b %d, %Y at %H:%M"),
- 'excerpt': excerpt,
- 'content': content,
- 'image': image,
- 'url': url,
- 'domain': domain,
- 'tags': np3k.meta_keywords
- }
-
-
-async def html_to_markdown(url: str = None, source: str = None) -> Optional[str]:
- if source:
- html_content = source
- elif url:
- async with aiohttp.ClientSession() as session:
- async with session.get(url) as response:
- html_content = await response.text()
- else:
- L.ERR(f"Unable to convert nothing to markdown.")
- return None
-
- # Use readability to extract the main content
- doc = Document(html_content)
- cleaned_html = doc.summary()
-
- # Parse the cleaned HTML with BeautifulSoup for any additional processing
- soup = BeautifulSoup(cleaned_html, 'html.parser')
-
- # Remove any remaining unwanted elements
- for element in soup(['script', 'style']):
- element.decompose()
-
- # Convert to markdown
- markdown_content = md(str(soup), heading_style="ATX")
-
- return markdown_content
-
-
-async def process_archive(
- url: str,
- title: Optional[str] = None,
- encoding: str = 'utf-8',
- source: Optional[str] = None,
-) -> Path:
- timestamp = datetime.now().strftime('%b %d, %Y at %H:%M')
- readable_title = title if title else f"{url} - {timestamp}"
-
- content = await html_to_markdown(url, source)
- if content is None:
- raise HTTPException(status_code=400, detail="Failed to convert content to markdown")
-
- markdown_path, relative_path = assemble_archive_path(readable_title, ".md")
-
- markdown_content = f"---\n"
- markdown_content += f"title: {readable_title}\n"
- markdown_content += f"added: {timestamp}\n"
- markdown_content += f"url: {url}"
- markdown_content += f"date: {datetime.now().strftime('%Y-%m-%d')}"
- markdown_content += f"---\n\n"
- markdown_content += f"# {readable_title}\n\n"
- markdown_content += f"Clipped from [{url}]({url}) on {timestamp}"
- markdown_content += content
-
- try:
- markdown_path.parent.mkdir(parents=True, exist_ok=True)
- with open(markdown_path, 'w', encoding=encoding) as md_file:
- md_file.write(markdown_content)
- L.DEBUG(f"Successfully saved to {markdown_path}")
- return markdown_path
- except Exception as e:
- L.WARN(f"Failed to write markdown file: {str(e)}")
- return None
-
-def download_file(url, folder):
- os.makedirs(folder, exist_ok=True)
- filename = str(uuid.uuid4()) + os.path.splitext(urlparse(url).path)[-1]
- filepath = os.path.join(folder, filename)
-
- session = requests.Session()
- retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504])
- session.mount('http://', HTTPAdapter(max_retries=retries))
- session.mount('https://', HTTPAdapter(max_retries=retries))
-
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
- }
-
- try:
- response = session.get(url, headers=headers, timeout=10)
- if response.status_code == 200:
- if 'image' in response.headers.get('Content-Type', ''):
- with open(filepath, 'wb') as f:
- f.write(response.content)
- else:
- L.ERR(f"Failed to download image: {url}, invalid content type: {response.headers.get('Content-Type')}")
- return None
- else:
- L.ERR(f"Failed to download image: {url}, status code: {response.status_code}")
- return None
- except Exception as e:
- L.ERR(f"Failed to download image: {url}, error: {str(e)}")
- return None
- return filename
-
-def copy_file(local_path, folder):
- os.makedirs(folder, exist_ok=True)
- filename = os.path.basename(local_path)
- destination_path = os.path.join(folder, filename)
- shutil.copy(local_path, destination_path)
- return filename
-
-
-async def save_file(file: UploadFile, folder: Path) -> Path:
- file_path = folder / f"{datetime.now().strftime('%Y%m%d_%H%M%S')}_{file.filename}"
- with open(file_path, 'wb') as f:
- shutil.copyfileobj(file.file, f)
- return file_path
-
-
-
-
-### FRONTMATTER, BANNER
-
@note.put("/note/update_frontmatter")
async def update_frontmatter_endpoint(date: str, key: str, value: str):
- date_time = datetime.strptime(date, "%Y-%m-%d")
+ date_time = dt_datetime.strptime(date, "%Y-%m-%d")
result = await update_frontmatter(date_time, key, value)
return result
-async def update_frontmatter(date_time: datetime, key: str, value: str):
+async def update_frontmatter(date_time: dt_datetime, key: str, value: str):
# Parse the date and format paths
file_path, relative_path = assemble_journal_path(date_time)
@@ -682,14 +234,18 @@ async def banner_endpoint(dt: str, location: str = None, mood: str = None, other
Endpoint (POST) that generates a new banner image for the Obsidian daily note for a specified date, taking into account optional additional information, then updates the frontmatter if necessary.
'''
L.DEBUG(f"banner_endpoint requested with date: {dt} ({type(dt)})")
- date_time = await locate.localize_datetime(dt)
+ date_time = await loc.dt(dt)
L.DEBUG(f"date_time after localization: {date_time} ({type(date_time)})")
jpg_path = await generate_banner(date_time, location, mood=mood, other_context=other_context)
return jpg_path
-async def get_note(date_time: datetime):
- date_time = await locate.localize_datetime(date_time);
+
+
+
+
+async def get_note(date_time: dt_datetime):
+ date_time = await loc.dt(date_time);
absolute_path, local_path = assemble_journal_path(date_time, filename = "Notes", extension = ".md", no_timestamp = True)
if absolute_path.is_file():
@@ -697,7 +253,7 @@ async def get_note(date_time: datetime):
content = file.read()
return content if content else None
-async def sentiment_analysis(date_time: datetime):
+async def sentiment_analysis(date_time: dt_datetime):
most_recent_note = await get_note(date_time)
most_recent_note = most_recent_note or await get_note(date_time - timedelta(days=1))
if most_recent_note:
@@ -711,20 +267,20 @@ async def sentiment_analysis(date_time: datetime):
async def generate_banner(dt, location: Location = None, forecast: str = None, mood: str = None, other_context: str = None):
# L.DEBUG(f"Location: {location}, forecast: {forecast}, mood: {mood}, other_context: {other_context}")
- date_time = await locate.localize_datetime(dt)
+ date_time = await loc.dt(dt)
L.DEBUG(f"generate_banner called with date_time: {date_time}")
destination_path, local_path = assemble_journal_path(date_time, filename="Banner", extension=".jpg", no_timestamp = True)
L.DEBUG(f"destination path generated: {destination_path}")
if not location:
- locations = await locate.fetch_locations(date_time)
+ locations = await loc.fetch_locations(date_time)
if locations:
location = locations[0]
display_name = "Location: "
if location:
lat, lon = location.latitude, location.longitude
- override_location = await locate.find_override_locations(lat, lon)
+ override_location = await loc.find_override_locations(lat, lon)
display_name += f"{override_location}, " if override_location else ""
if location.display_name:
display_name += f"{location.display_name}"
@@ -739,7 +295,7 @@ async def generate_banner(dt, location: Location = None, forecast: str = None, m
display_name += f"{location.country} " if location.country else ""
if display_name == "Location: ":
- geocoded_location = await locate.reverse_geocode(lat, lon)
+ geocoded_location = await GEO.code(lat, lon)
if geocoded_location.display_name or geocoded_location.city or geocoded_location.country:
return await generate_banner(dt, geocoded_location, forecast, mood, other_context)
else:
@@ -757,7 +313,7 @@ async def generate_banner(dt, location: Location = None, forecast: str = None, m
elif sentiment and not mood: mood = f"Mood: {sentiment}"
else: mood = ""
- events = await calendar.get_events(date_time, date_time)
+ events = await cal.get_events(date_time, date_time)
formatted_events = []
for event in events:
event_str = event.get('name')
@@ -792,7 +348,8 @@ async def note_weather_get(
):
try:
- date_time = datetime.now() if date == "0" else await locate.localize_datetime(date)
+ date_time = dt_datetime.now() if date == "0" else await loc.dt(date)
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our dt_datetime in note_weather_get.")
L.DEBUG(f"date: {date} .. date_time: {date_time}")
content = await update_dn_weather(date_time) #, lat, lon)
return JSONResponse(content={"forecast": content}, status_code=200)
@@ -807,26 +364,27 @@ async def note_weather_get(
@note.post("/update/note/{date}")
async def post_update_daily_weather_and_calendar_and_timeslips(date: str) -> PlainTextResponse:
- date_time = await locate.localize_datetime(date)
+ date_time = await loc.dt(date)
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our dt_datetime in post_update_daily_weather_and_calendar_and_timeslips.")
await update_dn_weather(date_time)
await update_daily_note_events(date_time)
await build_daily_timeslips(date_time)
return f"[Refresh]({BASE_URL}/update/note/{date_time.strftime('%Y-%m-%d')}"
-async def update_dn_weather(date_time: datetime, lat: float = None, lon: float = None):
+async def update_dn_weather(date_time: dt_datetime, lat: float = None, lon: float = None):
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in update_dn_weather.")
try:
if lat and lon:
- place = locate.reverse_geocode(lat, lon)
+ place = GEO.code(lat, lon)
else:
L.DEBUG(f"Updating weather for {date_time}")
-
- places = await locate.fetch_locations(date_time)
+ places = await loc.fetch_locations(date_time)
place = places[0]
lat = place.latitude
lon = place.longitude
- city = await locate.find_override_locations(lat, lon)
+ city = await loc.find_override_locations(lat, lon)
if city:
L.INFO(f"Using override location: {city}")
@@ -836,7 +394,7 @@ async def update_dn_weather(date_time: datetime, lat: float = None, lon: float =
L.INFO(f"City in data: {city}")
else:
- loc = await locate.reverse_geocode(lat, lon)
+ loc = await GEO.code(lat, lon)
L.DEBUG(f"loc: {loc}")
city = loc.name
city = city if city else loc.city
@@ -849,7 +407,7 @@ async def update_dn_weather(date_time: datetime, lat: float = None, lon: float =
L.DEBUG(f"Journal path: absolute_path={absolute_path}, relative_path={relative_path}")
try:
- L.DEBUG(f"passing date_time {date_time}, {lat}/{lon} into fetch_and_store")
+ L.DEBUG(f"passing date_time {date_time.strftime('%Y-%m-%d %H:%M:%S')}, {lat}/{lon} into fetch_and_store")
day = await weather.get_weather(date_time, lat, lon)
L.DEBUG(f"day information obtained from get_weather: {day}")
if day:
@@ -887,13 +445,13 @@ async def update_dn_weather(date_time: datetime, lat: float = None, lon: float =
date_str = date_time.strftime("%Y-%m-%d")
- now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+ now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
detailed_forecast = (
f"---\n"
f"date: {date_str}\n"
- f"latitude: {lat}"
- f"longitude: {lon}"
+ f"latitude: {lat}\n"
+ f"longitude: {lon}\n"
f"tags:\n"
f" - weather\n"
f"updated: {now}\n"
@@ -951,7 +509,6 @@ async def update_dn_weather(date_time: datetime, lat: float = None, lon: float =
L.ERR(traceback.format_exc())
raise HTTPException(status_code=999, detail=f"Error: {e}")
-
except ValueError as ve:
L.ERR(f"Value error in update_dn_weather: {str(ve)}")
L.ERR(traceback.format_exc())
@@ -970,7 +527,6 @@ def format_hourly_time(hour):
L.ERR(f"Error in format_hourly_time: {str(e)}")
L.ERR(traceback.format_exc())
return ""
-
def format_hourly_icon(hour, sunrise, sunset):
try:
@@ -1121,10 +677,6 @@ def get_weather_emoji(weather_condition):
return "⛅"
else:
return "🌡️" # Default emoji for unclassified weather
-
-
-### CALENDAR ###
-
async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[str, str]]]]) -> str:
def remove_characters(s: str) -> str:
@@ -1134,10 +686,10 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
return s
date_str = event_data["date"]
- now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+ now = dt_datetime.now().strftime("%Y-%m-%d %H:%M:%S")
events_markdown = []
- event_data["events"] = sorted(event_data["events"], key=lambda event: (not event['all_day'], datetime.strptime(event['start'], "%H:%M")), reverse=False)
+ event_data["events"] = sorted(event_data["events"], key=lambda event: (not event['all_day'], dt_datetime.strptime(event['start'], "%H:%M")), reverse=False)
total_events = len(event_data["events"])
event_markdown = f"```ad-events"
@@ -1156,7 +708,7 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
event_name = event['name'][:80]
markdown_name = f"[{event_name}]({url})"
- if (event['all_day']) or (event['start'] == event['end'] == "00:00") or (datetime.combine(dt_date.min, datetime.strptime(event['end'], "%H:%M").time()) - datetime.combine(dt_date.min, datetime.strptime(event['start'], "%H:%M").time()) >= timedelta(hours=23, minutes=59)):
+ if (event['all_day']) or (event['start'] == event['end'] == "00:00") or (dt_datetime.combine(dt_date.min, dt_datetime.strptime(event['end'], "%H:%M").time()) - dt_datetime.combine(dt_date.min, dt_datetime.strptime(event['start'], "%H:%M").time()) >= timedelta(hours=23, minutes=59)):
event_markdown += f"\n - [ ] **{markdown_name}** (All day)"
else:
@@ -1215,14 +767,14 @@ async def format_events_as_markdown(event_data: Dict[str, Union[str, List[Dict[s
@note.get("/note/events", response_class=PlainTextResponse)
async def note_events_endpoint(date: str = Query(None)):
- date_time = await locate.localize_datetime(date) if date else datetime.now(TZ)
+ date_time = await loc.dt(date) if date else await loc.dt(dt_datetime.now())
response = await update_daily_note_events(date_time)
return PlainTextResponse(content=response, status_code=200)
-async def update_daily_note_events(date_time: datetime):
+async def update_daily_note_events(date_time: dt_datetime):
L.DEBUG(f"Looking up events on date: {date_time.strftime('%Y-%m-%d')}")
try:
- events = await calendar.get_events(date_time, date_time)
+ events = await cal.get_events(date_time, date_time)
L.DEBUG(f"Raw events: {events}")
event_data = {
"date": date_time.strftime('%Y-%m-%d'),
@@ -1242,3 +794,537 @@ async def update_daily_note_events(date_time: datetime):
L.ERR(f"Error processing events: {e}")
raise HTTPException(status_code=500, detail=str(e))
+
+
+
+
+### CLIPPER ###
+@note.post("/clip")
+async def clip_post(
+ bg_tasks: BackgroundTasks,
+ url: Optional[str] = Form(None),
+ source: Optional[str] = Form(None),
+ title: Optional[str] = Form(None),
+ tts: str = Form('summary'),
+ voice: str = Form(DEFAULT_VOICE),
+ encoding: str = Form('utf-8')
+):
+ markdown_filename = await process_article(bg_tasks, url, title, encoding, source, tts, voice)
+ return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
+
+@note.post("/archive")
+async def archive_post(
+ url: Optional[str] = Form(None),
+ source: Optional[str] = Form(None),
+ title: Optional[str] = Form(None),
+ encoding: str = Form('utf-8')
+):
+ markdown_filename = await process_archive(url, title, encoding, source)
+ return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
+
+@note.get("/clip")
+async def clip_get(
+ bg_tasks: BackgroundTasks,
+ url: str,
+ title: Optional[str] = Query(None),
+ encoding: str = Query('utf-8'),
+ tts: str = Query('summary'),
+ voice: str = Query(DEFAULT_VOICE)
+):
+ markdown_filename = await process_article(bg_tasks, url, title, encoding, tts=tts, voice=voice)
+ return {"message": "Clip saved successfully", "markdown_filename": markdown_filename}
+
+@note.post("/note/add")
+async def note_add_endpoint(file: Optional[UploadFile] = File(None), text: Optional[str] = Form(None), source: Optional[str] = Form(None), bg_tasks: BackgroundTasks = None):
+ L.DEBUG(f"Received request on /note/add...")
+ if not file and not text:
+ L.WARN(f"... without any file or text!")
+ raise HTTPException(status_code=400, detail="Either text or a file must be provided")
+ else:
+ result = await process_for_daily_note(file, text, source, bg_tasks)
+ L.INFO(f"Result on /note/add: {result}")
+ return JSONResponse(result, status_code=204)
+
+async def process_for_daily_note(file: Optional[UploadFile] = File(None), text: Optional[str] = None, source: Optional[str] = None, bg_tasks: BackgroundTasks = None):
+ now = dt_datetime.now()
+ transcription_entry = ""
+ file_entry = ""
+ if file:
+ L.DEBUG("File received...")
+ file_content = await file.read()
+ audio_io = BytesIO(file_content)
+
+ # Improve error handling for file type guessing
+ guessed_type = mimetypes.guess_type(file.filename)
+ file_type = guessed_type[0] if guessed_type[0] else "application/octet-stream"
+
+ L.DEBUG(f"Processing as {file_type}...")
+
+ # Extract the main type (e.g., 'audio', 'image', 'video')
+ main_type = file_type.split('/')[0]
+ subdir = main_type.title() if main_type else "Documents"
+
+ absolute_path, relative_path = assemble_journal_path(now, subdir=subdir, filename=file.filename)
+ L.DEBUG(f"Destination path: {absolute_path}")
+
+ with open(absolute_path, 'wb') as f:
+ f.write(file_content)
+ L.DEBUG(f"Processing {f.name}...")
+
+ if main_type == 'audio':
+ transcription = await asr.transcribe_audio(file_path=absolute_path, params=asr.TranscribeParams(model="small-en", language="en", threads=6))
+ file_entry = f"![[{relative_path}]]"
+ elif main_type == 'image':
+ file_entry = f"![[{relative_path}]]"
+ else:
+ file_entry = f"[Source]({relative_path})"
+
+ text_entry = text if text else ""
+ L.DEBUG(f"transcription: {transcription_entry}\nfile_entry: {file_entry}\ntext_entry: {text_entry}")
+ return await add_to_daily_note(transcription_entry, file_entry, text_entry, now)
+
+async def add_to_daily_note(transcription: str = None, file_link: str = None, additional_text: str = None, date_time: dt_datetime = None):
+ date_time = date_time or dt_datetime.now()
+ note_path, _ = assemble_journal_path(date_time, filename='Notes', extension=".md", no_timestamp = True)
+ time_str = date_time.strftime("%H:%M")
+
+ entry_lines = []
+ if additional_text and additional_text.strip():
+ entry_lines.append(f"\t* {additional_text.strip()}")
+ if transcription and transcription.strip():
+ entry_lines.append(f"\t* {transcription.strip()}")
+ if file_link and file_link.strip():
+ entry_lines.append(f"\t\t {file_link.strip()}")
+
+ entry = f"\n* **{time_str}**\n" + "\n".join(entry_lines)
+
+ # Write the entry to the end of the file
+ if note_path.exists():
+ with open(note_path, 'a', encoding='utf-8') as note_file:
+ note_file.write(entry)
+ else:
+ date_str = date_time.strftime("%Y-%m-%d")
+ frontmatter = f"""---
+date: {date_str}
+tags:
+ - notes
+---
+
+"""
+ content = frontmatter + entry
+ # If the file doesn't exist, create it and start with "Notes"
+ with open(note_path, 'w', encoding='utf-8') as note_file:
+ note_file.write(content)
+
+ return entry
+
+
+
+async def process_document(
+ bg_tasks: BackgroundTasks,
+ document: File,
+ title: Optional[str] = None,
+ tts_mode: str = "summary",
+ voice: str = DEFAULT_VOICE
+):
+ timestamp = dt_datetime.now().strftime('%b %d, %Y at %H:%M')
+
+ # Save the document to OBSIDIAN_RESOURCES_DIR
+ document_content = await document.read()
+ file_path = Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR / document.filename
+ with open(file_path, 'wb') as f:
+ f.write(document_content)
+
+ parsed_content = await llm.extract_text(file_path) # Ensure extract_text is awaited
+
+ llm_title, summary = await llm.title_and_summary(parsed_content)
+ try:
+ readable_title = sanitize_filename(title if title else document.filename)
+
+ if tts_mode == "full" or tts_mode == "content" or tts_mode == "body":
+ tts_text = parsed_content
+ elif tts_mode == "summary" or tts_mode == "excerpt":
+ tts_text = summary
+ else:
+ tts_text = None
+
+ frontmatter = f"""---
+title: {readable_title}
+added: {timestamp}
+---
+"""
+ body = f"# {readable_title}\n\n"
+
+ if tts_text:
+ try:
+ datetime_str = dt_datetime.now().strftime("%Y%m%d%H%M%S")
+ audio_filename = f"{datetime_str} {readable_title}"
+ audio_path = await tts.generate_speech(
+ bg_tasks=bg_tasks,
+ text=tts_text,
+ voice=voice,
+ model="eleven_turbo_v2",
+ podcast=True,
+ title=audio_filename,
+ output_dir=Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR
+ )
+ audio_ext = Path(audio_path).suffix
+ obsidian_link = f"![[{OBSIDIAN_RESOURCES_DIR}/{audio_filename}{audio_ext}]]"
+ body += f"{obsidian_link}\n\n"
+ except Exception as e:
+ L.ERR(f"Failed in the TTS portion of clipping: {e}")
+
+ body += f"> [!summary]+\n"
+ body += f"> {summary}\n\n"
+ body += parsed_content
+ markdown_content = frontmatter + body
+
+ markdown_filename = f"{readable_title}.md"
+ encoding = 'utf-8'
+
+ with open(markdown_filename, 'w', encoding=encoding) as md_file:
+ md_file.write(markdown_content)
+
+ L.INFO(f"Successfully saved to {markdown_filename}")
+
+ return markdown_filename
+
+ except Exception as e:
+ L.ERR(f"Failed to clip: {str(e)}")
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+async def process_article(
+ bg_tasks: BackgroundTasks,
+ parsed_content: Article,
+ tts_mode: str = "summary",
+ voice: str = DEFAULT_11L_VOICE
+):
+ timestamp = dt_datetime.now().strftime('%b %d, %Y at %H:%M')
+
+ readable_title = sanitize_filename(parsed_content.title or timestamp)
+ markdown_filename, relative_path = assemble_journal_path(dt_datetime.now(), subdir="Articles", filename=readable_title, extension=".md")
+
+ try:
+ summary = await llm.summarize_text(parsed_content.clean_doc, "Summarize the provided text. Respond with the summary and nothing else. Do not otherwise acknowledge the request. Just provide the requested summary.")
+ summary = summary.replace('\n', ' ') # Remove line breaks
+
+ if tts_mode == "full" or tts_mode == "content":
+ tts_text = parsed_content.clean_doc
+ elif tts_mode == "summary" or tts_mode == "excerpt":
+ tts_text = summary
+ else:
+ tts_text = None
+
+ banner_markdown = ''
+ try:
+ banner_url = parsed_content.top_image
+ if banner_url != '':
+ banner_image = download_file(banner_url, Path(OBSIDIAN_VAULT_DIR / OBSIDIAN_RESOURCES_DIR))
+ if banner_image:
+ banner_markdown = f"![[{OBSIDIAN_RESOURCES_DIR}/{banner_image}]]"
+
+ except Exception as e:
+ L.ERR(f"No image found in article")
+
+ authors = ', '.join('[[{}]]'.format(author) for author in parsed_content.authors)
+ published_date = parsed_content.publish_date
+ frontmatter = f"""---
+title: {readable_title}
+authors: {authors}
+published: {published_date}
+added: {timestamp}
+banner: "{banner_markdown}"
+tags:
+
+"""
+ frontmatter += '\n'.join(f" - {tag}" for tag in parsed_content.tags)
+ frontmatter += '\n---\n'
+
+ body = f"# {readable_title}\n\n"
+ if tts_text:
+ audio_filename = f"{published_date} {readable_title}"
+ try:
+ audio_path = await tts.generate_speech(bg_tasks=bg_tasks, text=tts_text, voice=voice, model="eleven_turbo_v2", podcast=True, title=audio_filename,
+ output_dir=Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR)
+ audio_ext = Path(audio_path).suffix
+ obsidian_link = f"![[{OBSIDIAN_RESOURCES_DIR}/{audio_filename}{audio_ext}]]"
+ body += f"{obsidian_link}\n\n"
+ except Exception as e:
+ L.ERR(f"Failed to generate TTS for np3k. {e}")
+
+ try:
+ body += f"by {authors} in {parsed_content.canonical_link}" # update with method for getting the newspaper name
+ body += f"> [!summary]+\n"
+ body += f"> {summary}\n\n"
+ body += parsed_content["content"]
+ markdown_content = frontmatter + body
+
+ except Exception as e:
+ L.ERR(f"Failed to combine elements of article markdown.")
+
+ try:
+ with open(markdown_filename, 'w') as md_file:
+ md_file.write(markdown_content)
+
+ L.INFO(f"Successfully saved to {markdown_filename}")
+ add_to_daily_note
+ return markdown_filename
+
+ except Exception as e:
+ L.ERR(f"Failed to write markdown file")
+ raise HTTPException(status_code=500, detail=str(e))
+
+ except Exception as e:
+ L.ERR(f"Failed to clip: {str(e)}")
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+async def process_article2(
+ bg_tasks: BackgroundTasks,
+ url: str,
+ title: Optional[str] = None,
+ encoding: str = 'utf-8',
+ source: Optional[str] = None,
+ tts_mode: str = "summary",
+ voice: str = DEFAULT_11L_VOICE
+):
+
+ timestamp = dt_datetime.now().strftime('%b %d, %Y at %H:%M')
+
+ parsed_content = await parse_article(url, source)
+ if parsed_content is None:
+ return {"error": "Failed to retrieve content"}
+
+ readable_title = sanitize_filename(title or parsed_content.get("title") or timestamp)
+ markdown_filename, relative_path = assemble_journal_path(dt_datetime.now(), subdir="Articles", filename=readable_title, extension=".md")
+
+ try:
+ summary = await llm.summarize_text(parsed_content["content"], "Summarize the provided text. Respond with the summary and nothing else. Do not otherwise acknowledge the request. Just provide the requested summary.")
+ summary = summary.replace('\n', ' ') # Remove line breaks
+
+ if tts_mode == "full" or tts_mode == "content":
+ tts_text = parsed_content["content"]
+ elif tts_mode == "summary" or tts_mode == "excerpt":
+ tts_text = summary
+ else:
+ tts_text = None
+
+ banner_markdown = ''
+ try:
+ banner_url = parsed_content.get('image', '')
+ if banner_url != '':
+ banner_image = download_file(banner_url, Path(OBSIDIAN_VAULT_DIR / OBSIDIAN_RESOURCES_DIR))
+ if banner_image:
+ banner_markdown = f"![[{OBSIDIAN_RESOURCES_DIR}/{banner_image}]]"
+
+ except Exception as e:
+ L.ERR(f"No image found in article")
+
+ authors = ', '.join('[[{}]]'.format(author) for author in parsed_content.get('authors', ['Unknown']))
+
+ frontmatter = f"""---
+title: {readable_title}
+authors: {', '.join('[[{}]]'.format(author) for author in parsed_content.get('authors', ['Unknown']))}
+published: {parsed_content.get('date_published', 'Unknown')}
+added: {timestamp}
+excerpt: {parsed_content.get('excerpt', '')}
+banner: "{banner_markdown}"
+tags:
+
+"""
+ frontmatter += '\n'.join(f" - {tag}" for tag in parsed_content.get('tags', []))
+ frontmatter += '\n---\n'
+
+ body = f"# {readable_title}\n\n"
+
+ if tts_text:
+ datetime_str = dt_datetime.now().strftime("%Y%m%d%H%M%S")
+ audio_filename = f"{datetime_str} {readable_title}"
+ try:
+ audio_path = await tts.generate_speech(bg_tasks=bg_tasks, text=tts_text, voice=voice, model="eleven_turbo_v2", podcast=True, title=audio_filename,
+ output_dir=Path(OBSIDIAN_VAULT_DIR) / OBSIDIAN_RESOURCES_DIR)
+ audio_ext = Path(audio_path).suffix
+ obsidian_link = f"![[{OBSIDIAN_RESOURCES_DIR}/{audio_filename}{audio_ext}]]"
+ body += f"{obsidian_link}\n\n"
+ except Exception as e:
+ L.ERR(f"Failed to generate TTS for np3k. {e}")
+
+ try:
+ body += f"by {authors} in [{parsed_content.get('domain', urlparse(url).netloc.replace('www.', ''))}]({url}).\n\n"
+ body += f"> [!summary]+\n"
+ body += f"> {summary}\n\n"
+ body += parsed_content["content"]
+ markdown_content = frontmatter + body
+
+ except Exception as e:
+ L.ERR(f"Failed to combine elements of article markdown.")
+
+ try:
+ with open(markdown_filename, 'w', encoding=encoding) as md_file:
+ md_file.write(markdown_content)
+
+ L.INFO(f"Successfully saved to {markdown_filename}")
+ add_to_daily_note
+ return markdown_filename
+
+ except Exception as e:
+ L.ERR(f"Failed to write markdown file")
+ raise HTTPException(status_code=500, detail=str(e))
+
+ except Exception as e:
+ L.ERR(f"Failed to clip {url}: {str(e)}")
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+async def parse_article(url: str, source: Optional[str] = None) -> Article:
+ source = source if source else trafilatura.fetch_url(url)
+ traf = trafilatura.extract_metadata(filecontent=source, default_url=url)
+
+ # Create and parse the newspaper3k Article
+ article = Article(url)
+ article.set_html(source)
+ article.parse()
+
+ L.INFO(f"Parsed {article.title}")
+
+ # Update or set properties based on trafilatura and additional processing
+ article.title = article.title or traf.title or url
+ article.authors = article.authors or (traf.author if isinstance(traf.author, list) else [traf.author])
+
+ article.publish_date = article.publish_date or traf.date
+ try:
+ article.publish_date = await loc.dt(article.publish_date, "UTC")
+ except:
+ L.DEBUG(f"Failed to localize {article.publish_date}")
+ article.publish_date = await loc.dt(dt_datetime.now(), "UTC")
+
+ article.meta_description = article.meta_description or traf.description
+ article.text = trafilatura.extract(source, output_format="markdown", include_comments=False) or article.text
+ article.top_image = article.top_image or traf.image
+ article.source_url = traf.sitename or urlparse(url).netloc.replace('www.', '').title()
+ article.meta_keywords = article.meta_keywords or traf.categories or traf.tags
+ article.meta_keywords = article.meta_keywords if isinstance(article.meta_keywords, list) else [article.meta_keywords]
+
+ # Set additional data in the additional_data dictionary
+ article.additional_data = {
+ 'excerpt': article.meta_description,
+ 'domain': article.source_url,
+ 'tags': article.meta_keywords,
+ 'content': article.text # Store the markdown content here
+ }
+
+ return article
+
+
+
+async def html_to_markdown(url: str = None, source: str = None) -> Optional[str]:
+ if source:
+ html_content = source
+ elif url:
+ async with aiohttp.ClientSession() as session:
+ async with session.get(url) as response:
+ html_content = await response.text()
+ else:
+ L.ERR(f"Unable to convert nothing to markdown.")
+ return None
+
+ # Use readability to extract the main content
+ doc = Document(html_content)
+ cleaned_html = doc.summary()
+
+ # Parse the cleaned HTML with BeautifulSoup for any additional processing
+ soup = BeautifulSoup(cleaned_html, 'html.parser')
+
+ # Remove any remaining unwanted elements
+ for element in soup(['script', 'style']):
+ element.decompose()
+
+ # Convert to markdown
+ markdown_content = md(str(soup), heading_style="ATX")
+
+ return markdown_content
+
+
+async def process_archive(
+ url: str,
+ title: Optional[str] = None,
+ encoding: str = 'utf-8',
+ source: Optional[str] = None,
+) -> Path:
+ timestamp = dt_datetime.now().strftime('%b %d, %Y at %H:%M')
+ readable_title = title if title else f"{url} - {timestamp}"
+
+ content = await html_to_markdown(url, source)
+ if content is None:
+ raise HTTPException(status_code=400, detail="Failed to convert content to markdown")
+
+ markdown_path, relative_path = assemble_archive_path(readable_title, ".md")
+
+ markdown_content = f"---\n"
+ markdown_content += f"title: {readable_title}\n"
+ markdown_content += f"added: {timestamp}\n"
+ markdown_content += f"url: {url}"
+ markdown_content += f"date: {dt_datetime.now().strftime('%Y-%m-%d')}"
+ markdown_content += f"---\n\n"
+ markdown_content += f"# {readable_title}\n\n"
+ markdown_content += f"Clipped from [{url}]({url}) on {timestamp}"
+ markdown_content += content
+
+ try:
+ markdown_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(markdown_path, 'w', encoding=encoding) as md_file:
+ md_file.write(markdown_content)
+ L.DEBUG(f"Successfully saved to {markdown_path}")
+ return markdown_path
+ except Exception as e:
+ L.WARN(f"Failed to write markdown file: {str(e)}")
+ return None
+
+def download_file(url, folder):
+ os.makedirs(folder, exist_ok=True)
+ filename = str(uuid.uuid4()) + os.path.splitext(urlparse(url).path)[-1]
+ filepath = os.path.join(folder, filename)
+
+ session = requests.Session()
+ retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504])
+ session.mount('http://', HTTPAdapter(max_retries=retries))
+ session.mount('https://', HTTPAdapter(max_retries=retries))
+
+ headers = {
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
+ }
+
+ try:
+ response = session.get(url, headers=headers, timeout=10)
+ if response.status_code == 200:
+ if 'image' in response.headers.get('Content-Type', ''):
+ with open(filepath, 'wb') as f:
+ f.write(response.content)
+ else:
+ L.ERR(f"Failed to download image: {url}, invalid content type: {response.headers.get('Content-Type')}")
+ return None
+ else:
+ L.ERR(f"Failed to download image: {url}, status code: {response.status_code}")
+ return None
+ except Exception as e:
+ L.ERR(f"Failed to download image: {url}, error: {str(e)}")
+ return None
+ return filename
+
+def copy_file(local_path, folder):
+ os.makedirs(folder, exist_ok=True)
+ filename = os.path.basename(local_path)
+ destination_path = os.path.join(folder, filename)
+ shutil.copy(local_path, destination_path)
+ return filename
+
+
+async def save_file(file: UploadFile, folder: Path) -> Path:
+ file_path = folder / f"{dt_datetime.now().strftime('%Y%m%d_%H%M%S')}_{file.filename}"
+ with open(file_path, 'wb') as f:
+ shutil.copyfileobj(file.file, f)
+ return file_path
+
diff --git a/sijapi/routers/serve.py b/sijapi/routers/serve.py
index eea8725..3c88a8d 100644
--- a/sijapi/routers/serve.py
+++ b/sijapi/routers/serve.py
@@ -33,7 +33,7 @@ from sijapi import (
MAC_UN, MAC_PW, MAC_ID, TS_TAILNET, DATA_DIR, SD_IMAGE_DIR, PUBLIC_KEY, OBSIDIAN_VAULT_DIR
)
from sijapi.utilities import bool_convert, sanitize_filename, assemble_journal_path
-from sijapi.routers import note, locate
+from sijapi.routers import loc, note
serve = APIRouter(tags=["public"])
@@ -69,7 +69,7 @@ def is_valid_date(date_str: str) -> bool:
@serve.get("/notes/{file_path:path}")
async def get_file_endpoint(file_path: str):
try:
- date_time = await locate.localize_datetime(file_path);
+ date_time = await loc.dt(file_path);
absolute_path, local_path = assemble_journal_path(date_time, no_timestamp = True)
except ValueError as e:
L.DEBUG(f"Unable to parse {file_path} as a date, now trying to use it as a local path")
diff --git a/sijapi/routers/time.py b/sijapi/routers/time.py
index e04e893..78c01c6 100644
--- a/sijapi/routers/time.py
+++ b/sijapi/routers/time.py
@@ -26,7 +26,7 @@ from collections import defaultdict
from dotenv import load_dotenv
from traceback import format_exc
from sijapi import L, HOME_DIR, TIMING_API_KEY, TIMING_API_URL
-from sijapi.routers.locate import localize_datetime
+from sijapi.routers import loc
### INITIALIZATIONS ###
time = APIRouter(tags=["private"])
@@ -102,8 +102,8 @@ def truncate_project_title(title):
async def fetch_and_prepare_timing_data(start: datetime, end: Optional[datetime] = None) -> List[Dict]:
- # start_date = await localize_datetime(start)
- # end_date = await localize_datetime(end) if end else None
+ # start_date = await loc.dt(start)
+ # end_date = await loc.dt(end) if end else None
# Adjust the start date to include the day before and format the end date
start_date_adjusted = (start - timedelta(days=1)).strftime("%Y-%m-%dT00:00:00")
end_date_formatted = f"{datetime.strftime(end, '%Y-%m-%d')}T23:59:59" if end else f"{datetime.strftime(start, '%Y-%m-%d')}T23:59:59"
@@ -317,8 +317,8 @@ async def get_timing_markdown3(
):
# Fetch and process timing data
- start = await localize_datetime(start_date)
- end = await localize_datetime(end_date) if end_date else None
+ start = await loc.dt(start_date)
+ end = await loc.dt(end_date) if end_date else None
timing_data = await fetch_and_prepare_timing_data(start, end)
# Retain these for processing Markdown data with the correct timezone
@@ -377,8 +377,8 @@ async def get_timing_markdown(
start: str = Query(..., regex=r"\d{4}-\d{2}-\d{2}"),
end: Optional[str] = Query(None, regex=r"\d{4}-\d{2}-\d{2}")
):
- start_date = await localize_datetime(start)
- end_date = await localize_datetime(end)
+ start_date = await loc.dt(start)
+ end_date = await loc.dt(end)
markdown_formatted_data = await process_timing_markdown(start_date, end_date)
return Response(content=markdown_formatted_data, media_type="text/markdown")
@@ -446,8 +446,8 @@ async def get_timing_json(
):
# Fetch and process timing data
- start = await localize_datetime(start_date)
- end = await localize_datetime(end_date)
+ start = await loc.dt(start_date)
+ end = await loc.dt(end_date)
timing_data = await fetch_and_prepare_timing_data(start, end)
# Convert processed data to the required JSON structure
diff --git a/sijapi/routers/weather.py b/sijapi/routers/weather.py
index 6f2ab5d..a9068ca 100644
--- a/sijapi/routers/weather.py
+++ b/sijapi/routers/weather.py
@@ -10,9 +10,9 @@ from typing import Dict
from datetime import datetime
from shapely.wkb import loads
from binascii import unhexlify
-from sijapi import L, VISUALCROSSING_API_KEY, TZ, DB
+from sijapi import L, VISUALCROSSING_API_KEY, TZ, DB, GEO
from sijapi.utilities import haversine
-from sijapi.routers import locate
+from sijapi.routers import loc
weather = APIRouter()
@@ -20,19 +20,19 @@ weather = APIRouter()
async def get_weather(date_time: datetime, latitude: float, longitude: float):
# request_date_str = date_time.strftime("%Y-%m-%d")
L.DEBUG(f"Called get_weather with lat: {latitude}, lon: {longitude}, date_time: {date_time}")
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in get_weather.")
daily_weather_data = await get_weather_from_db(date_time, latitude, longitude)
fetch_new_data = True
if daily_weather_data:
try:
L.DEBUG(f"Daily weather data from db: {daily_weather_data}")
last_updated = str(daily_weather_data['DailyWeather'].get('last_updated'))
- last_updated = await locate.localize_datetime(last_updated)
+ last_updated = await loc.dt(last_updated)
stored_loc_data = unhexlify(daily_weather_data['DailyWeather'].get('location'))
stored_loc = loads(stored_loc_data)
stored_lat = stored_loc.y
stored_lon = stored_loc.x
stored_ele = stored_loc.z
-
hourly_weather = daily_weather_data.get('HourlyWeather')
@@ -53,6 +53,7 @@ async def get_weather(date_time: datetime, latitude: float, longitude: float):
if fetch_new_data:
L.DEBUG(f"We require new data!")
request_date_str = date_time.strftime("%Y-%m-%d")
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d')} as our datetime for fetching new data.")
url = f"https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/{latitude},{longitude}/{request_date_str}/{request_date_str}?unitGroup=us&key={VISUALCROSSING_API_KEY}"
try:
async with AsyncClient() as client:
@@ -85,6 +86,7 @@ async def get_weather(date_time: datetime, latitude: float, longitude: float):
async def store_weather_to_db(date_time: datetime, weather_data: dict):
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in store_weather_to_db")
async with DB.get_connection() as conn:
try:
day_data = weather_data.get('days')[0]
@@ -95,16 +97,21 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
stations_array = day_data.get('stations', []) or []
date_str = date_time.strftime("%Y-%m-%d")
+ L.WARN(f"Using {date_str} in our query in store_weather_to_db.")
# Get location details from weather data if available
longitude = weather_data.get('longitude')
latitude = weather_data.get('latitude')
- elevation = await locate.get_elevation(latitude, longitude) # 152.4 # default until we add a geocoder that can look up actual elevation; weather_data.get('elevation') # assuming 'elevation' key, replace if different
+ elevation = await GEO.elevation(latitude, longitude)
location_point = f"POINTZ({longitude} {latitude} {elevation})" if longitude and latitude and elevation else None
# Correct for the datetime objects
- day_data['datetime'] = await locate.localize_datetime(day_data.get('datetime')) #day_data.get('datetime'))
+ L.WARN(f"Uncorrected datetime in store_weather_to_db: {day_data['datetime']}")
+ day_data['datetime'] = await loc.dt(day_data.get('datetime')) #day_data.get('datetime'))
+ L.WARN(f"Corrected datetime in store_weather_to_db with localized datetime: {day_data['datetime']}")
+ L.WARN(f"Uncorrected sunrise time in store_weather_to_db: {day_data['sunrise']}")
day_data['sunrise'] = day_data['datetime'].replace(hour=int(day_data.get('sunrise').split(':')[0]), minute=int(day_data.get('sunrise').split(':')[1]))
+ L.WARN(f"Corrected sunrise time in store_weather_to_db with localized datetime: {day_data['sunrise']}")
day_data['sunset'] = day_data['datetime'].replace(hour=int(day_data.get('sunset').split(':')[0]), minute=int(day_data.get('sunset').split(':')[1]))
daily_weather_params = (
@@ -160,7 +167,7 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
await asyncio.sleep(0.1)
# hour_data['datetime'] = parse_date(hour_data.get('datetime'))
hour_timestamp = date_str + ' ' + hour_data['datetime']
- hour_data['datetime'] = await locate.localize_datetime(hour_timestamp)
+ hour_data['datetime'] = await loc.dt(hour_timestamp)
L.DEBUG(f"Processing hours now...")
# L.DEBUG(f"Processing {hour_data['datetime']}")
@@ -226,6 +233,7 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
async def get_weather_from_db(date_time: datetime, latitude: float, longitude: float):
+ L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in get_weather_from_db.")
async with DB.get_connection() as conn:
query_date = date_time.date()
try:
@@ -238,25 +246,38 @@ async def get_weather_from_db(date_time: datetime, latitude: float, longitude: f
LIMIT 1
'''
- daily_weather_data = await conn.fetchrow(query, query_date, longitude, latitude, longitude, latitude)
+
+ daily_weather_record = await conn.fetchrow(query, query_date, longitude, latitude, longitude, latitude)
- if daily_weather_data is None:
+ if daily_weather_record is None:
L.DEBUG(f"No daily weather data retrieved from database.")
return None
- # else:
- # L.DEBUG(f"Daily_weather_data: {daily_weather_data}")
+
+ # Convert asyncpg.Record to a mutable dictionary
+ daily_weather_data = dict(daily_weather_record)
+
+ # Now we can modify the dictionary
+ daily_weather_data['datetime'] = await loc.dt(daily_weather_data.get('datetime'))
+
# Query to get hourly weather data
query = '''
SELECT HW.* FROM HourlyWeather HW
WHERE HW.daily_weather_id = $1
'''
- hourly_weather_data = await conn.fetch(query, daily_weather_data['id'])
+
+ hourly_weather_records = await conn.fetch(query, daily_weather_data['id'])
+
+ hourly_weather_data = []
+ for record in hourly_weather_records:
+ hour_data = dict(record)
+ hour_data['datetime'] = await loc.dt(hour_data.get('datetime'))
+ hourly_weather_data.append(hour_data)
- day: Dict = {
- 'DailyWeather': dict(daily_weather_data),
- 'HourlyWeather': [dict(row) for row in hourly_weather_data],
+ day = {
+ 'DailyWeather': daily_weather_data,
+ 'HourlyWeather': hourly_weather_data,
}
- # L.DEBUG(f"day: {day}")
+ L.DEBUG(f"day: {day}")
return day
except Exception as e:
L.ERR(f"Unexpected error occurred: {e}")