Auto-update: Fri Jun 28 22:22:58 PDT 2024

This commit is contained in:
sanj 2024-06-28 22:22:58 -07:00
parent d26dac7775
commit 0660455eea
13 changed files with 1513 additions and 1547 deletions

View file

@ -4,7 +4,7 @@ import json
filename = 'location_log.json'
server = '!{!{ ENTER A PUBLIC URL TO YOUR SIJAPI INSTANCE }!}!'
api_key = !{!{ ENTER YOUR GLOBAL_API_KEY HERE }!}!
api_key = '!{!{ ENTER YOUR GLOBAL_API_KEY HERE }!}!'
def upload_location_data(data):
headers = {

View file

@ -12,7 +12,7 @@ from typing import List, Optional
import traceback
import logging
from .logs import Logger
from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, TimezoneTracker, Database, PyGeolocator
from .classes import AutoResponder, IMAPConfig, SMTPConfig, EmailAccount, EmailContact, IncomingEmail, TimezoneTracker, Database, Geocoder
# from sijapi.config.config import load_config
# cfg = load_config()
@ -68,7 +68,7 @@ GEONAMES_TXT = DATA_DIR / "geonames.txt"
LOCATIONS_CSV = DATA_DIR / "US.csv"
TZ = tz.gettz(os.getenv("TZ", "America/Los_Angeles"))
TZ_CACHE = DATA_DIR / "tzcache.json"
DynamicTZ = TimezoneTracker(TZ_CACHE)
GEO = Geocoder(NAMED_LOCATIONS, TZ_CACHE)
### Obsidian & notes
ALLOWED_FILENAME_CHARS = r'[^\w \.-]'
@ -90,7 +90,6 @@ YEAR_FMT = os.getenv("YEAR_FMT")
MONTH_FMT = os.getenv("MONTH_FMT")
DAY_FMT = os.getenv("DAY_FMT")
DAY_SHORT_FMT = os.getenv("DAY_SHORT_FMT")
GEOLOCATOR = PyGeolocator
### Large language model
LLM_URL = os.getenv("LLM_URL", "http://localhost:11434")

View file

@ -1,54 +1,88 @@
from pydantic import BaseModel
from typing import List, Optional, Any, Tuple, Dict, Union, Tuple
from datetime import datetime, timedelta
import asyncio
import asyncpg
import json
from pydantic import BaseModel, Field
from typing import Optional
import asyncpg
import os
from typing import Optional, Tuple, Union
from datetime import datetime, timedelta
from typing import List, Optional, Any, Tuple, Dict, Union, Tuple
from datetime import datetime, timedelta, timezone
import asyncio
import json
from timezonefinder import TimezoneFinder
from pathlib import Path
from pydantic import BaseModel, Field
from typing import Optional
from pydantic import BaseModel, Field
from typing import Optional
import asyncpg
from pydantic import BaseModel, Field
from typing import Optional
import asyncpg
import aiohttp
import aiofiles
from contextlib import asynccontextmanager
from concurrent.futures import ThreadPoolExecutor
import reverse_geocoder as rg
from timezonefinder import TimezoneFinder
from srtm import get_data
class PyGeolocator:
def __init__(self):
class Location(BaseModel):
latitude: float
longitude: float
datetime: datetime
elevation: Optional[float] = None
altitude: Optional[float] = None
zip: Optional[str] = None
street: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
country: Optional[str] = None
context: Optional[Dict[str, Any]] = None
class_: Optional[str] = None
type: Optional[str] = None
name: Optional[str] = None
display_name: Optional[str] = None
boundingbox: Optional[List[str]] = None
amenity: Optional[str] = None
house_number: Optional[str] = None
road: Optional[str] = None
quarter: Optional[str] = None
neighbourhood: Optional[str] = None
suburb: Optional[str] = None
county: Optional[str] = None
country_code: Optional[str] = None
class Config:
json_encoders = {
datetime: lambda dt: dt.isoformat(),
}
class Geocoder:
def __init__(self, named_locs: Union[str, Path] = None, cache_file: Union[str, Path] = 'timezone_cache.json'):
self.tf = TimezoneFinder()
self.srtm_data = get_data()
self.named_locs = Path(named_locs) if named_locs else None
self.cache_file = Path(cache_file)
self.last_timezone: str = "America/Los_Angeles"
self.last_update: Optional[datetime] = None
self.last_location: Optional[Tuple[float, float]] = None
self.executor = ThreadPoolExecutor()
def get_location(self, lat, lon):
result = rg.search((lat, lon))
return result[0]['name'], result[0]['admin1'], result[0]['cc']
async def location(self, lat: float, lon: float):
loop = asyncio.get_running_loop()
return await loop.run_in_executor(self.executor, rg.search, [(lat, lon)])
def get_elevation(self, lat, lon):
return self.srtm_data.get_elevation(lat, lon)
async def elevation(self, latitude: float, longitude: float, unit: str = "m") -> float:
loop = asyncio.get_running_loop()
elevation = await loop.run_in_executor(self.executor, self.srtm_data.get_elevation, latitude, longitude)
def get_timezone(self, lat, lon):
return self.tf.timezone_at(lat=lat, lng=lon)
if unit == "m":
return elevation
elif unit == "km":
return elevation / 1000
elif unit == "ft" or unit == "'":
return elevation * 3.280839895
else:
raise ValueError(f"Unsupported unit: {unit}")
def lookup(self, lat, lon):
city, state, country = self.get_location(lat, lon)
elevation = self.get_elevation(lat, lon)
timezone = self.get_timezone(lat, lon)
async def timezone(self, lat: float, lon: float):
loop = asyncio.get_running_loop()
timezone = await loop.run_in_executor(self.executor, self.tf.timezone_at, lat, lon)
return timezone if timezone else 'Unknown'
async def lookup(self, lat: float, lon: float):
city, state, country = (await self.location(lat, lon))[0]['name'], (await self.location(lat, lon))[0]['admin1'], (await self.location(lat, lon))[0]['cc']
elevation = await self.elevation(lat, lon)
timezone = await self.timezone(lat, lon)
return {
"city": city,
@ -58,6 +92,179 @@ class PyGeolocator:
"timezone": timezone
}
async def code(self, locations: Union[Location, Tuple[float, float], List[Union[Location, Tuple[float, float]]]]) -> Union[Location, List[Location]]:
if isinstance(locations, (Location, tuple)):
locations = [locations]
processed_locations = []
for loc in locations:
if isinstance(loc, tuple):
processed_locations.append(Location(latitude=loc[0], longitude=loc[1]))
elif isinstance(loc, Location):
processed_locations.append(loc)
else:
raise ValueError(f"Unsupported location type: {type(loc)}")
coordinates = [(location.latitude, location.longitude) for location in processed_locations]
geocode_results = await self.location(*zip(*coordinates))
elevations = await asyncio.gather(*[self.elevation(lat, lon) for lat, lon in coordinates])
timezones = await asyncio.gather(*[self.timezone(lat, lon) for lat, lon in coordinates])
geocoded_locations = []
for location, result, elevation, timezone in zip(processed_locations, geocode_results, elevations, timezones):
geocoded_location = Location(
latitude=location.latitude,
longitude=location.longitude,
elevation=elevation,
datetime=location.datetime or datetime.now(timezone.utc),
zip=result.get("admin2"),
city=result.get("name"),
state=result.get("admin1"),
country=result.get("cc"),
context=location.context or {},
name=result.get("name"),
display_name=f"{result.get('name')}, {result.get('admin1')}, {result.get('cc')}",
country_code=result.get("cc"),
timezone=timezone
)
# Merge original location data with geocoded data
for field in location.__fields__:
if getattr(location, field) is None:
setattr(location, field, getattr(geocoded_location, field))
geocoded_locations.append(location)
return geocoded_locations[0] if len(geocoded_locations) == 1 else geocoded_locations
async def geocode_osm(self, latitude: float, longitude: float, email: str):
url = f"https://nominatim.openstreetmap.org/reverse?format=json&lat={latitude}&lon={longitude}"
headers = {
'User-Agent': f'sijapi/1.0 ({email})', # replace with your app name and email
}
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
response.raise_for_status()
data = await response.json()
address = data.get("address", {})
elevation = await self.elevation(latitude, longitude)
return Location(
latitude=latitude,
longitude=longitude,
elevation=elevation,
datetime=datetime.now(timezone.utc),
zip=address.get("postcode"),
street=address.get("road"),
city=address.get("city"),
state=address.get("state"),
country=address.get("country"),
context={},
class_=data.get("class"),
type=data.get("type"),
name=data.get("name"),
display_name=data.get("display_name"),
amenity=address.get("amenity"),
house_number=address.get("house_number"),
road=address.get("road"),
quarter=address.get("quarter"),
neighbourhood=address.get("neighbourhood"),
suburb=address.get("suburb"),
county=address.get("county"),
country_code=address.get("country_code"),
timezone=await self.timezone(latitude, longitude)
)
def load_override_locations(self):
if self.named_locs and self.named_locs.exists():
with open(self.named_locs, 'r') as file:
return yaml.safe_load(file)
return []
def haversine(self, lat1, lon1, lat2, lon2):
R = 6371 # Earth's radius in kilometers
lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2])
dlat = lat2 - lat1
dlon = lon2 - lon1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * atan2(sqrt(a), sqrt(1-a))
return R * c
async def find_override_location(self, lat: float, lon: float) -> Optional[str]:
closest_location = None
closest_distance = float('inf')
for location in self.override_locations:
loc_name = location.get("name")
loc_lat = location.get("latitude")
loc_lon = location.get("longitude")
loc_radius = location.get("radius")
distance = self.haversine(lat, lon, loc_lat, loc_lon)
if distance <= loc_radius:
if distance < closest_distance:
closest_distance = distance
closest_location = loc_name
return closest_location
async def refresh_timezone(self, location: Union[Location, Tuple[float, float]], force: bool = False) -> str:
if isinstance(location, Location):
lat, lon = location.latitude, location.longitude
else:
lat, lon = location
current_time = datetime.now()
if (force or
not self.last_update or
current_time - self.last_update > timedelta(hours=1) or
self.last_location != (lat, lon)):
new_timezone = await self.timezone(lat, lon)
self.last_timezone = new_timezone
self.last_update = current_time
self.last_location = (lat, lon)
await self.tz_save()
return self.last_timezone
async def tz_save(self):
cache_data = {
'last_timezone': self.last_timezone,
'last_update': self.last_update.isoformat() if self.last_update else None,
'last_location': self.last_location
}
async with aiofiles.open(self.cache_file, 'w') as f:
await f.write(json.dumps(cache_data))
async def tz_cached(self):
try:
async with aiofiles.open(self.cache_file, 'r') as f:
cache_data = json.loads(await f.read())
self.last_timezone = cache_data.get('last_timezone')
self.last_update = datetime.fromisoformat(cache_data['last_update']) if cache_data.get('last_update') else None
self.last_location = tuple(cache_data['last_location']) if cache_data.get('last_location') else None
except (FileNotFoundError, json.JSONDecodeError):
# If file doesn't exist or is invalid, we'll start fresh
pass
async def tz_current(self, location: Union[Location, Tuple[float, float]]) -> str:
await self.tz_cached()
return await self.refresh_timezone(location)
async def tz_last(self) -> Optional[str]:
await self.tz_cached()
return self.last_timezone
def __del__(self):
self.executor.shutdown()
class Database(BaseModel):
host: str = Field(..., description="Database host")
port: int = Field(5432, description="Database port")
@ -98,15 +305,6 @@ class Database(BaseModel):
return self.dict(exclude_none=True)
class AutoResponder(BaseModel):
name: str
style: str
context: str
ollama_model: str = "llama3"
whitelist: List[str]
blacklist: List[str]
image_prompt: Optional[str] = None
class IMAPConfig(BaseModel):
username: str
password: str
@ -121,6 +319,16 @@ class SMTPConfig(BaseModel):
port: int
encryption: str = None
class AutoResponder(BaseModel):
name: str
style: str
context: str
ollama_model: str = "llama3"
whitelist: List[str]
blacklist: List[str]
image_prompt: Optional[str] = None
smtp: SMTPConfig
class EmailAccount(BaseModel):
name: str
refresh: int
@ -129,7 +337,6 @@ class EmailAccount(BaseModel):
summarize: bool = False
podcast: bool = False
imap: IMAPConfig
smtp: SMTPConfig
autoresponders: Optional[List[AutoResponder]]
class EmailContact(BaseModel):
@ -143,95 +350,3 @@ class IncomingEmail(BaseModel):
subject: str
body: str
attachments: List[dict] = []
class Location(BaseModel):
latitude: float
longitude: float
datetime: datetime
elevation: Optional[float] = None
altitude: Optional[float] = None
zip: Optional[str] = None
street: Optional[str] = None
city: Optional[str] = None
state: Optional[str] = None
country: Optional[str] = None
context: Optional[Dict[str, Any]] = None
class_: Optional[str] = None
type: Optional[str] = None
name: Optional[str] = None
display_name: Optional[str] = None
boundingbox: Optional[List[str]] = None
amenity: Optional[str] = None
house_number: Optional[str] = None
road: Optional[str] = None
quarter: Optional[str] = None
neighbourhood: Optional[str] = None
suburb: Optional[str] = None
county: Optional[str] = None
country_code: Optional[str] = None
class Config:
json_encoders = {
datetime: lambda dt: dt.isoformat(),
}
class TimezoneTracker:
def __init__(self, cache_file: Union[str, Path] = 'timezone_cache.json'):
self.cache_file = Path(cache_file)
self.last_timezone: str = "America/Los_Angeles"
self.last_update: Optional[datetime] = None
self.last_location: Optional[Tuple[float, float]] = None
self.tf = TimezoneFinder()
def find(self, lat: float, lon: float) -> str:
timezone = self.tf.timezone_at(lat=lat, lng=lon)
return timezone if timezone else 'Unknown'
async def refresh(self, location: Union[Location, Tuple[float, float]], force: bool = False) -> str:
if isinstance(location, Location):
lat, lon = location.latitude, location.longitude
else:
lat, lon = location
current_time = datetime.now()
if (force or
not self.last_update or
current_time - self.last_update > timedelta(hours=1) or
self.last_location != (lat, lon)):
new_timezone = self.find(lat, lon)
self.last_timezone = new_timezone
self.last_update = current_time
self.last_location = (lat, lon)
await self.save_to_cache()
return self.last_timezone
async def save_to_cache(self):
cache_data = {
'last_timezone': self.last_timezone,
'last_update': self.last_update.isoformat() if self.last_update else None,
'last_location': self.last_location
}
with self.cache_file.open('w') as f:
json.dump(cache_data, f)
async def load_from_cache(self):
try:
with self.cache_file.open('r') as f:
cache_data = json.load(f)
self.last_timezone = cache_data.get('last_timezone')
self.last_update = datetime.fromisoformat(cache_data['last_update']) if cache_data.get('last_update') else None
self.last_location = tuple(cache_data['last_location']) if cache_data.get('last_location') else None
except (FileNotFoundError, json.JSONDecodeError):
# If file doesn't exist or is invalid, we'll start fresh
pass
async def get_current(self, location: Union[Location, Tuple[float, float]]) -> str:
await self.load_from_cache()
return await self.refresh(location)
async def get_last(self) -> Optional[str]:
await self.load_from_cache()
return self.last_timezone

View file

@ -96,7 +96,7 @@ TRUSTED_SUBNETS=127.0.0.1/32,10.13.37.0/24,100.64.64.0/24
# ──────────
#
#─── router selection: ────────────────────────────────────────────────────────────
ROUTERS=asr,calendar,cf,email,health,llm,locate,note,rag,sd,serve,time,tts,weather
ROUTERS=asr,cal,cf,email,health,llm,loc,note,rag,sd,serve,time,tts,weather
UNLOADED=ig
#─── notes: ──────────────────────────────────────────────────────────────────────
#
@ -115,7 +115,7 @@ UNLOADED=ig
# asr: requires faster_whisper — $ pip install faster_whisper — and
# downloading the model file specified in ASR_DEFAULT_MODEL.
#
# calendar: requires (1) a Microsoft 365 account with a properly configured
# cal: requires (1) a Microsoft 365 account with a properly configured
# Azure Active Directory app, and/or (2) Calendars on macOS.
#
# cf: interfaces with the Cloudflare API and Caddy to register new
@ -138,7 +138,7 @@ UNLOADED=ig
# configured separately in the ig_config.json file; relies heavily
# on the llm and sd routers which have their own dependencies.
#
# locate: some endpoints work as is, but the core location tracking
# loc: some endpoints work as is, but the core location tracking
# functionality requires Postgresql + PostGIS extension and are
# designed specifically to pair with a mobile device where
# Pythonista is installed and configured to run the
@ -148,8 +148,8 @@ UNLOADED=ig
# note: designed for use with Obsidian plus the Daily Notes and Tasks
# core extensions; and the Admonitions, Banners, Icons (with the
# Lucide pack), and Make.md community extensions. Moreover `notes`
# relies heavily on the calendar, llm, locate, sd, summarize, time,
# tts, and weather routers and accordingly on the external
# relies heavily on the cal, llm, loc, sd, summarize, time, loc,
# and weather routers and accordingly on the external
# dependencies of each.
#
# sd: requires ComfyUI plus any modules and StableDiffusion models
@ -165,7 +165,7 @@ UNLOADED=ig
#
# weather: requires a VisualCrossing API key and is designed for (but doesn't
# itself strictly require) Postgresql with the PostGIS extension;
# (... but it presently relies on the locate router, which does).
# (... but it presently relies on the loc router, which does).
#
#
# ... Whew! that was a lot, right? I'm so glad we're in this together...
@ -217,7 +217,7 @@ TAILSCALE_API_KEY=¿SECRET? # <--- enter your own TS API key
# ░▒ ░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ░ ░░ ░ ░ ░ ░ ░
# ░░ ░ ░T̷ O̷ G̷ E̷ T̷ H̷ ░ R̷. ░ ░ ░ ░ ░
#
#─── frag, or weat,and locate modules:── .
#─── frag, or weat,and loc modules:────── .
DB_NAME=db
#
DB_HOST=127.0.0.1
@ -237,12 +237,12 @@ DB_SSH_PASS=¿SECRET? # <--- enter SSH password for pg server (if not l
# variables allow database access over an SSH tunnel.
#
# In the current implementation, we rely on Postgres to hold:
# i. user-logged location data (locate module), and
# i. user-logged location data (loc module), and
# ii. results from past weather forecast checks (weather module).
#
# A future version will hopefully make use of PostGIS's geocoding capabilities,
# and add a vector database for the LLM module. Until then it's up to you if the
# locate and weather modules are worth the hassle of maintaining Postgres.
# loc and weather modules are worth the hassle of maintaining Postgres.
# ──────────
#
#─────────────────────────────── 𝐼 𝐵 𝐸 𝑇 𝑌 𝑂 𝑈 ─────────────────────────────────

View file

@ -17,16 +17,16 @@ from datetime import datetime, timedelta
from Foundation import NSDate, NSRunLoop
import EventKit as EK
from sijapi import L, ICAL_TOGGLE, ICALENDARS, MS365_TOGGLE, MS365_CLIENT_ID, MS365_SECRET, MS365_AUTHORITY_URL, MS365_SCOPE, MS365_REDIRECT_PATH, MS365_TOKEN_PATH
from sijapi.routers.locate import localize_datetime
from sijapi.routers import loc
calendar = APIRouter()
cal = APIRouter()
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
timeout = httpx.Timeout(12)
if MS365_TOGGLE is True:
L.CRIT(f"Visit https://api.sij.ai/o365/login to obtain your Microsoft 365 authentication token.")
@calendar.get("/o365/login")
@cal.get("/o365/login")
async def login():
L.DEBUG(f"Received request to /o365/login")
L.DEBUG(f"SCOPE: {MS365_SCOPE}")
@ -40,7 +40,7 @@ if MS365_TOGGLE is True:
L.INFO(f"Redirecting to authorization URL: {authorization_url}")
return RedirectResponse(authorization_url)
@calendar.get("/o365/oauth_redirect")
@cal.get("/o365/oauth_redirect")
async def oauth_redirect(code: str = None, error: str = None):
L.DEBUG(f"Received request to /o365/oauth_redirect")
if error:
@ -73,7 +73,7 @@ if MS365_TOGGLE is True:
detail="Failed to obtain access token"
)
@calendar.get("/o365/me")
@cal.get("/o365/me")
async def read_items():
L.DEBUG(f"Received request to /o365/me")
token = await load_token()
@ -212,10 +212,10 @@ def datetime_to_nsdate(dt: datetime) -> NSDate:
return NSDate.dateWithTimeIntervalSince1970_(dt.timestamp())
@calendar.get("/events")
@cal.get("/events")
async def get_events_endpoint(start_date: str, end_date: str):
start_dt = await localize_datetime(start_date)
end_dt = await localize_datetime(end_date)
start_dt = await loc.dt(start_date)
end_dt = await loc.dt(end_date)
datetime.strptime(start_date, "%Y-%m-%d") or datetime.now()
end_dt = datetime.strptime(end_date, "%Y-%m-%d") or datetime.now()
response = await get_events(start_dt, end_dt)
@ -341,8 +341,8 @@ async def get_ms365_events(start_date: datetime, end_date: datetime):
async def parse_calendar_for_day(range_start: datetime, range_end: datetime, events: List[Dict[str, Any]]):
range_start = await localize_datetime(range_start)
range_end = await localize_datetime(range_end)
range_start = await loc.dt(range_start)
range_end = await loc.dt(range_end)
event_list = []
for event in events:
@ -361,13 +361,13 @@ async def parse_calendar_for_day(range_start: datetime, range_end: datetime, eve
L.INFO(f"End date string not a dict")
try:
start_date = await localize_datetime(start_str) if start_str else None
start_date = await loc.dt(start_str) if start_str else None
except (ValueError, TypeError) as e:
L.ERR(f"Invalid start date format: {start_str}, error: {e}")
continue
try:
end_date = await localize_datetime(end_str) if end_str else None
end_date = await loc.dt(end_str) if end_str else None
except (ValueError, TypeError) as e:
L.ERR(f"Invalid end date format: {end_str}, error: {e}")
continue
@ -376,13 +376,13 @@ async def parse_calendar_for_day(range_start: datetime, range_end: datetime, eve
if start_date:
# Ensure start_date is timezone-aware
start_date = await localize_datetime(start_date)
start_date = await loc.dt(start_date)
# If end_date is not provided, assume it's the same as start_date
if not end_date:
end_date = start_date
else:
end_date = await localize_datetime(end_date)
end_date = await loc.dt(end_date)
# Check if the event overlaps with the given range
if (start_date < range_end) and (end_date > range_start):

View file

@ -3,6 +3,7 @@ Uses IMAP and SMTP login credentials to monitor an inbox and summarize incoming
'''
from fastapi import APIRouter
import asyncio
import aiofiles
from imbox import Imbox
from bs4 import BeautifulSoup
import os
@ -19,35 +20,20 @@ import yaml
from typing import List, Dict, Optional, Set
from datetime import datetime as dt_datetime
from sijapi import L, PODCAST_DIR, DEFAULT_VOICE, EMAIL_CONFIG, EMAIL_LOGS
from sijapi.routers import tts, llm, sd, locate
from sijapi.routers import loc, tts, llm, sd
from sijapi.utilities import clean_text, assemble_journal_path, extract_text, prefix_lines
from sijapi.classes import EmailAccount, IMAPConfig, SMTPConfig, IncomingEmail, EmailContact, AutoResponder
from sijapi.classes import EmailAccount
email = APIRouter(tags=["private"])
def load_email_accounts(yaml_path: str) -> List[EmailAccount]:
with open(yaml_path, 'r') as file:
config = yaml.safe_load(file)
return [EmailAccount(**account) for account in config['accounts']]
def get_account_by_email(this_email: str) -> Optional[EmailAccount]:
email_accounts = load_email_accounts(EMAIL_CONFIG)
for account in email_accounts:
if account.imap.username.lower() == this_email.lower():
return account
return None
def get_imap_details(this_email: str) -> Optional[IMAPConfig]:
account = get_account_by_email(this_email)
return account.imap if account else None
def get_smtp_details(this_email: str) -> Optional[SMTPConfig]:
account = get_account_by_email(this_email)
return account.smtp if account else None
def get_imap_connection(account: EmailAccount):
return Imbox(account.imap.host,
username=account.imap.username,
@ -56,79 +42,17 @@ def get_imap_connection(account: EmailAccount):
ssl=account.imap.encryption == 'SSL',
starttls=account.imap.encryption == 'STARTTLS')
def get_smtp_connection(account: EmailAccount):
def get_smtp_connection(autoresponder: AutoResponder):
context = ssl._create_unverified_context()
if account.smtp.encryption == 'SSL':
return SMTP_SSL(account.smtp.host, account.smtp.port, context=context)
elif account.smtp.encryption == 'STARTTLS':
smtp = SMTP(account.smtp.host, account.smtp.port)
if autoresponder.smtp.encryption == 'SSL':
return SMTP_SSL(autoresponder.smtp.host, autoresponder.smtp.port, context=context)
elif autoresponder.smtp.encryption == 'STARTTLS':
smtp = SMTP(autoresponder.smtp.host, autoresponder.smtp.port)
smtp.starttls(context=context)
return smtp
else:
return SMTP(account.smtp.host, account.smtp.port)
def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
L.DEBUG(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
def matches_list(item: str, this_email: IncomingEmail) -> bool:
if '@' in item:
return item in this_email.sender
else:
return item.lower() in this_email.subject.lower() or item.lower() in this_email.body.lower()
matching_profiles = []
for profile in account.autoresponders:
whitelist_match = not profile.whitelist or any(matches_list(item, this_email) for item in profile.whitelist)
blacklist_match = any(matches_list(item, this_email) for item in profile.blacklist)
if whitelist_match and not blacklist_match:
L.DEBUG(f"We have a match for {whitelist_match} and no blacklist matches.")
matching_profiles.append(profile)
elif whitelist_match and blacklist_match:
L.DEBUG(f"Matched whitelist for {whitelist_match}, but also matched blacklist for {blacklist_match}")
else:
L.DEBUG(f"No whitelist or blacklist matches.")
return matching_profiles
async def generate_auto_response_body(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> str:
now = await locate.localize_datetime(dt_datetime.now())
then = await locate.localize_datetime(this_email.datetime_received)
age = now - then
usr_prompt = f'''
Generate a personalized auto-response to the following email:
From: {this_email.sender}
Sent: {age} ago
Subject: "{this_email.subject}"
Body:
{this_email.body}
Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}.
Keep the response {profile.style} and to the point, but responsive to the sender's inquiry.
Do not mention or recite this context information in your response.
'''
sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
try:
# async def query_ollama(usr: str, sys: str = LLM_SYS_MSG, model: str = DEFAULT_LLM, max_tokens: int = 200):
response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
L.DEBUG(f"query_ollama response: {response}")
if isinstance(response, str):
response += "\n\n"
return response
elif isinstance(response, dict):
if "message" in response and "content" in response["message"]:
return response["message"]["content"]
else:
L.ERR(f"Unexpected response structure from query_ollama: {response}")
else:
L.ERR(f"Unexpected response type from query_ollama: {type(response)}")
# If we reach here, we couldn't extract a valid response
raise ValueError("Could not extract valid response from query_ollama")
except Exception as e:
L.ERR(f"Error generating auto-response: {str(e)}")
return f"Thank you for your email regarding '{this_email.subject}'. We are currently experiencing technical difficulties with our auto-response system. We will review your email and respond as soon as possible. We apologize for any inconvenience."
return SMTP(autoresponder.smtp.host, autoresponder.smtp.port)
def clean_email_content(html_content):
@ -156,119 +80,7 @@ async def extract_attachments(attachments) -> List[str]:
return attachment_texts
async def save_email(this_email: IncomingEmail, account: EmailAccount):
try:
md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
summary = ""
if account.summarize == True:
email_content = f'At {this_email.datetime_received}, {this_email.sender} sent an email with the subject line "{this_email.subject}". The email in its entirety reads: \n\n{this_email.body}\n"'
if this_email.attachments:
attachment_texts = await extract_attachments(this_email.attachments)
email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
summary = await llm.summarize_text(email_content)
await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = account.podcast, output_path = tts_path)
summary = prefix_lines(summary, '> ')
# Create the markdown content
markdown_content = f'''---
date: {this_email.datetime_received.strftime('%Y-%m-%d')}
tags:
- email
---
| | | |
| --: | :--: | :--: |
| *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}** | |
| *from* | **[[{this_email.sender}]]** | |
| *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])} | |
| *subject* | **{this_email.subject}** | |
'''
if summary:
markdown_content += f'''
> [!summary] Summary
> {summary}
'''
if tts_path.exists():
markdown_content += f'''
![[{tts_path}]]
'''
markdown_content += f'''
---
{this_email.body}
'''
with open(md_path, 'w', encoding='utf-8') as md_file:
md_file.write(markdown_content)
L.DEBUG(f"Saved markdown to {md_path}")
return True
except Exception as e:
L.ERR(f"Exception: {e}")
return False
async def autorespond(this_email: IncomingEmail, account: EmailAccount):
L.DEBUG(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
matching_profiles = get_matching_autoresponders(this_email, account)
L.DEBUG(f"Matching profiles: {matching_profiles}")
for profile in matching_profiles:
L.INFO(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
auto_response_subject = f"Auto-Response Re: {this_email.subject}"
auto_response_body = await generate_auto_response_body(this_email, profile, account)
L.DEBUG(f"Auto-response: {auto_response_body}")
success = await send_auto_response(this_email.sender, auto_response_subject, auto_response_body, profile, account)
if success == True:
return True
L.WARN(f"We were not able to successfully auto-respond to {this_email.subject}")
return False
async def send_auto_response(to_email, subject, body, profile, account):
try:
message = MIMEMultipart()
message['From'] = account.smtp.username
message['To'] = to_email
message['Subject'] = subject
message.attach(MIMEText(body, 'plain'))
if profile.image_prompt:
jpg_path = await sd.workflow(profile.image_prompt, earlyout=False, downscale_to_fit=True)
if jpg_path and os.path.exists(jpg_path):
with open(jpg_path, 'rb') as img_file:
img = MIMEImage(img_file.read(), name=os.path.basename(jpg_path))
message.attach(img)
L.DEBUG(f"Sending auto-response {to_email} concerning {subject} from account {account.name}...")
with get_smtp_connection(account) as server:
server.login(account.smtp.username, account.smtp.password)
server.send_message(message)
L.INFO(f"Auto-response sent to {to_email} concerning {subject} from account {account.name}!")
return True
except Exception as e:
L.ERR(f"Error in preparing/sending auto-response from account {account.name}: {e}")
return False
async def load_processed_uids(filename: Path) -> Set[str]:
if filename.exists():
with open(filename, 'r') as f:
return set(line.strip().split(':')[-1] for line in f)
return set()
async def save_processed_uid(filename: Path, account_name: str, uid: str):
with open(filename, 'a') as f:
f.write(f"{account_name}:{uid}\n")
async def process_account_summarization(account: EmailAccount):
async def process_account_archival(account: EmailAccount):
summarized_log = EMAIL_LOGS / account.name / "summarized.txt"
os.makedirs(summarized_log.parent, exist_ok = True)
@ -283,7 +95,7 @@ async def process_account_summarization(account: EmailAccount):
uid_str = uid.decode() if isinstance(uid, bytes) else str(uid)
if uid_str not in processed_uids:
recipients = [EmailContact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to]
localized_datetime = await locate.localize_datetime(message.date)
localized_datetime = await loc.dt(message.date)
this_email = IncomingEmail(
sender=message.sent_from[0]['email'],
datetime_received=localized_datetime,
@ -292,15 +104,15 @@ async def process_account_summarization(account: EmailAccount):
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
attachments=message.attachments
)
if account.summarize:
save_success = await save_email(this_email, account)
md_path, md_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".md")
md_summary = await summarize_single_email(this_email, account.podcast) if account.summarize == True else None
md_content = await archive_single_email(this_email, md_summary)
save_success = await save_email(md_path, md_content)
if save_success:
await save_processed_uid(summarized_log, account.name, uid_str)
L.INFO(f"Summarized email: {uid_str}")
else:
L.WARN(f"Failed to summarize {this_email.subject}")
else:
L.INFO(f"account.summarize shows as false.")
else:
L.DEBUG(f"Skipping {uid_str} because it was already processed.")
except Exception as e:
@ -308,23 +120,189 @@ async def process_account_summarization(account: EmailAccount):
await asyncio.sleep(account.refresh)
async def summarize_single_email(this_email: IncomingEmail, podcast: bool = False):
tts_path, tts_relative = assemble_journal_path(this_email.datetime_received, "Emails", this_email.subject, ".wav")
summary = ""
email_content = f'At {this_email.datetime_received}, {this_email.sender} sent an email with the subject line "{this_email.subject}". The email in its entirety reads: \n\n{this_email.body}\n"'
if this_email.attachments:
attachment_texts = await extract_attachments(this_email.attachments)
email_content += "\n—--\n" + "\n—--\n".join([f"Attachment: {text}" for text in attachment_texts])
summary = await llm.summarize_text(email_content)
await tts.local_tts(text_content = summary, speed = 1.1, voice = DEFAULT_VOICE, podcast = podcast, output_path = tts_path)
md_summary = f'```ad.summary\n'
md_summary += f'title: {this_email.subject}\n'
md_summary += f'{summary}\n'
md_summary += f'```\n\n'
md_summary += f'![[{tts_path}]]\n' if tts_path.exists() else ''
return md_summary
async def archive_single_email(this_email: IncomingEmail, summary: str = None):
try:
markdown_content = f'''---
date: {this_email.datetime_received.strftime('%Y-%m-%d')}
tags:
- email
---
| | | |
| --: | :--: | :--: |
| *received* | **{this_email.datetime_received.strftime('%B %d, %Y at %H:%M:%S %Z')}** | |
| *from* | **[[{this_email.sender}]]** | |
| *to* | {', '.join([f'**[[{recipient.email}]]**' if not recipient.name else f'**[[{recipient.name}|{recipient.email}]]**' for recipient in this_email.recipients])} | |
| *subject* | **{this_email.subject}** | |
'''
if summary:
markdown_content += summary
markdown_content += f'''
---
{this_email.body}
'''
return markdown_content
except Exception as e:
L.ERR(f"Exception: {e}")
return False
async def save_email(md_path, md_content):
try:
with open(md_path, 'w', encoding='utf-8') as md_file:
md_file.write(md_content)
L.DEBUG(f"Saved markdown to {md_path}")
return True
except Exception as e:
L.ERR(f"Failed to save email: {e}")
return False
def get_matching_autoresponders(this_email: IncomingEmail, account: EmailAccount) -> List[AutoResponder]:
L.DEBUG(f"Called get_matching_autoresponders for email \"{this_email.subject},\" account name \"{account.name}\"")
def matches_list(item: str, this_email: IncomingEmail) -> bool:
if '@' in item:
return item in this_email.sender
else:
return item.lower() in this_email.subject.lower() or item.lower() in this_email.body.lower()
matching_profiles = []
for profile in account.autoresponders:
whitelist_match = not profile.whitelist or any(matches_list(item, this_email) for item in profile.whitelist)
blacklist_match = any(matches_list(item, this_email) for item in profile.blacklist)
if whitelist_match and not blacklist_match:
L.DEBUG(f"We have a match for {whitelist_match} and no blacklist matches.")
matching_profiles.append(profile)
elif whitelist_match and blacklist_match:
L.DEBUG(f"Matched whitelist for {whitelist_match}, but also matched blacklist for {blacklist_match}")
else:
L.DEBUG(f"No whitelist or blacklist matches.")
return matching_profiles
async def process_account_autoresponding(account: EmailAccount):
autoresponded_log = EMAIL_LOGS / account.name / "autoresponded.txt"
os.makedirs(autoresponded_log.parent, exist_ok = True)
EMAIL_AUTORESPONSE_LOG = EMAIL_LOGS / account.name / "autoresponded.txt"
os.makedirs(EMAIL_AUTORESPONSE_LOG.parent, exist_ok=True)
while True:
try:
processed_uids = await load_processed_uids(autoresponded_log)
processed_uids = await load_processed_uids(EMAIL_AUTORESPONSE_LOG)
L.DEBUG(f"{len(processed_uids)} emails marked as already responded to are being ignored.")
with get_imap_connection(account) as inbox:
unread_messages = inbox.messages(unread=True)
L.DEBUG(f"There are {len(unread_messages)} unread messages.")
for uid, message in unread_messages:
uid_str = uid.decode() if isinstance(uid, bytes) else str(uid)
if uid_str not in processed_uids:
await autorespond_single_email(message, uid_str, account, EMAIL_AUTORESPONSE_LOG)
else:
L.DEBUG(f"Skipping {uid_str} because it was already processed.")
except Exception as e:
L.ERR(f"An error occurred during auto-responding for account {account.name}: {e}")
await asyncio.sleep(account.refresh)
async def autorespond_single_email(message, uid_str: str, account: EmailAccount, log_file: Path):
this_email = await create_incoming_email(message)
L.DEBUG(f"Evaluating {this_email.subject} for autoresponse-worthiness...")
matching_profiles = get_matching_autoresponders(this_email, account)
L.DEBUG(f"Matching profiles: {matching_profiles}")
for profile in matching_profiles:
response_body = await generate_response(this_email, profile, account)
if response_body:
subject = f"Re: {this_email.subject}"
jpg_path = await sd.workflow(profile.image_prompt, earlyout=False, downscale_to_fit=True) if profile.image_prompt else None
success = await send_response(this_email.sender, subject, response_body, profile, account, jpg_path)
if success:
L.WARN(f"Auto-responded to email: {this_email.subject}")
await save_processed_uid(log_file, account.name, uid_str)
else:
L.WARN(f"Failed to send auto-response to {this_email.subject}")
else:
L.WARN(f"Unable to generate auto-response for {this_email.subject}")
async def generate_response(this_email: IncomingEmail, profile: AutoResponder, account: EmailAccount) -> Optional[str]:
L.INFO(f"Generating auto-response to {this_email.subject} with profile: {profile.name}")
now = await loc.dt(dt_datetime.now())
then = await loc.dt(this_email.datetime_received)
age = now - then
usr_prompt = f'''
Generate a personalized auto-response to the following email:
From: {this_email.sender}
Sent: {age} ago
Subject: "{this_email.subject}"
Body: {this_email.body}
---
Respond on behalf of {account.fullname}, who is unable to respond personally because {profile.context}. Keep the response {profile.style} and to the point, but responsive to the sender's inquiry. Do not mention or recite this context information in your response.
'''
sys_prompt = f"You are an AI assistant helping {account.fullname} with email responses. {account.fullname} is described as: {account.bio}"
try:
response = await llm.query_ollama(usr_prompt, sys_prompt, profile.ollama_model, 400)
L.DEBUG(f"query_ollama response: {response}")
if isinstance(response, dict) and "message" in response and "content" in response["message"]:
response = response["message"]["content"]
return response + "\n\n"
except Exception as e:
L.ERR(f"Error generating auto-response: {str(e)}")
return None
async def send_response(to_email: str, subject: str, body: str, profile: AutoResponder, image_attachment: Path = None) -> bool:
try:
message = MIMEMultipart()
message['From'] = profile.smtp.username
message['To'] = to_email
message['Subject'] = subject
message.attach(MIMEText(body, 'plain'))
if image_attachment and os.path.exists(image_attachment):
with open(image_attachment, 'rb') as img_file:
img = MIMEImage(img_file.read(), name=os.path.basename(image_attachment))
message.attach(img)
L.DEBUG(f"Sending auto-response to {to_email} concerning {subject} from account {profile.name}...")
with get_smtp_connection(profile) as server:
server.login(profile.smtp.username, profile.smtp.password)
server.send_message(message)
L.INFO(f"Auto-response sent to {to_email} concerning {subject} from account {profile.name}!")
return True
except Exception as e:
L.ERR(f"Error in preparing/sending auto-response from account {profile.name}: {e}")
return False
async def create_incoming_email(message) -> IncomingEmail:
recipients = [EmailContact(email=recipient['email'], name=recipient.get('name', '')) for recipient in message.sent_to]
localized_datetime = await locate.localize_datetime(message.date)
this_email = IncomingEmail(
localized_datetime = await loc.dt(message.date)
return IncomingEmail(
sender=message.sent_from[0]['email'],
datetime_received=localized_datetime,
recipients=recipients,
@ -332,26 +310,25 @@ async def process_account_autoresponding(account: EmailAccount):
body=clean_email_content(message.body['html'][0]) if message.body['html'] else clean_email_content(message.body['plain'][0]) or "",
attachments=message.attachments
)
L.DEBUG(f"Attempting autoresponse on {this_email.subject}")
respond_success = await autorespond(this_email, account)
if respond_success:
await save_processed_uid(autoresponded_log, account.name, uid_str)
L.WARN(f"Auto-responded to email: {this_email.subject}")
else:
L.WARN(f"Failed auto-response to {this_email.subject}")
L.DEBUG(f"Skipping {uid_str} because it was already processed.")
except Exception as e:
L.ERR(f"An error occurred during auto-responding for account {account.name}: {e}")
await asyncio.sleep(account.refresh)
async def load_processed_uids(filename: Path) -> Set[str]:
if filename.exists():
async with aiofiles.open(filename, 'r') as f:
return set(line.strip().split(':')[-1] for line in await f.readlines())
return set()
async def save_processed_uid(filename: Path, account_name: str, uid: str):
async with aiofiles.open(filename, 'a') as f:
await f.write(f"{account_name}:{uid}\n")
async def process_all_accounts():
email_accounts = load_email_accounts(EMAIL_CONFIG)
summarization_tasks = [asyncio.create_task(process_account_summarization(account)) for account in email_accounts]
summarization_tasks = [asyncio.create_task(process_account_archival(account)) for account in email_accounts]
autoresponding_tasks = [asyncio.create_task(process_account_autoresponding(account)) for account in email_accounts]
await asyncio.gather(*summarization_tasks, *autoresponding_tasks)
@email.on_event("startup")
async def startup_event():
await asyncio.sleep(5)

View file

@ -80,14 +80,6 @@ async def generate_response(prompt: str):
)
return {"response": output['response']}
@llm.post("/llm/query")
async def llm_query_endpoint(
message: str = Form(...),
file: Optional(UploadFile) = Form(...)
):
return None
async def query_ollama(usr: str, sys: str = LLM_SYS_MSG, model: str = DEFAULT_LLM, max_tokens: int = 200):
messages = [{"role": "system", "content": sys},

385
sijapi/routers/loc.py Normal file
View file

@ -0,0 +1,385 @@
'''
Uses Postgres/PostGIS for for location tracking (data obtained via the companion mobile Pythonista scripts), and for geocoding purposes.
'''
from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import HTMLResponse, JSONResponse
import yaml
from typing import List, Tuple, Union
import traceback
from datetime import datetime, timezone
from typing import Union, List
import folium
from zoneinfo import ZoneInfo
from dateutil.parser import parse as dateutil_parse
from typing import Optional, List, Union
from datetime import datetime
from sijapi import L, DB, TZ, NAMED_LOCATIONS, DynamicTZ, GEO
from sijapi.classes import Location
from sijapi.utilities import haversine
loc = APIRouter()
async def dt(
date_time: Union[str, datetime],
tz: Union[str, ZoneInfo, None] = None
) -> datetime:
try:
# Convert string to datetime if necessary
if isinstance(date_time, str):
date_time = dateutil_parse(date_time)
L.DEBUG(f"Converted string '{date_time}' to datetime object.")
if not isinstance(date_time, datetime):
raise ValueError("Input must be a string or datetime object.")
# Handle provided timezone
if tz is not None:
if tz == "local":
last_loc = await get_last_location(date_time)
tz_str = DynamicTZ.find(last_loc.latitude, last_loc.longitude)
try:
tz = ZoneInfo(tz_str)
except Exception as e:
L.WARN(f"Invalid timezone string '{tz_str}' from DynamicTZ. Falling back to UTC. Error: {e}")
tz = ZoneInfo('UTC')
L.DEBUG(f"Using local timezone: {tz}")
elif isinstance(tz, str):
try:
tz = ZoneInfo(tz)
except Exception as e:
L.ERR(f"Invalid timezone string '{tz}'. Error: {e}")
raise ValueError(f"Invalid timezone string: {tz}")
elif isinstance(tz, ZoneInfo):
pass # tz is already a ZoneInfo object
else:
raise ValueError("tz must be 'local', a string, or a ZoneInfo object.")
# Localize to the provided or determined timezone
if date_time.tzinfo is None:
date_time = date_time.replace(tzinfo=tz)
L.DEBUG(f"Localized naive datetime to timezone: {tz}")
else:
date_time = date_time.astimezone(tz)
L.DEBUG(f"Converted datetime from {date_time.tzinfo} to timezone: {tz}")
# If no timezone provided, only fill in missing timezone info
elif date_time.tzinfo is None:
last_loc = get_last_location(date_time)
tz_str = DynamicTZ.find(last_loc.latitude, last_loc.longitude)
try:
tz = ZoneInfo(tz_str)
except Exception as e:
L.WARN(f"Invalid timezone string '{tz_str}' from DynamicTZ. Falling back to UTC. Error: {e}")
tz = ZoneInfo('UTC')
date_time = date_time.replace(tzinfo=tz)
L.DEBUG(f"Filled in missing timezone info: {tz}")
# If datetime already has timezone and no new timezone provided, do nothing
else:
L.DEBUG(f"Datetime already has timezone {date_time.tzinfo}. No changes made.")
return date_time
except ValueError as e:
L.ERR(f"Error in dt: {e}")
raise
except Exception as e:
L.ERR(f"Unexpected error in dt: {e}")
raise ValueError(f"Failed to localize datetime: {e}")
async def get_last_location() -> Optional[Location]:
query_datetime = datetime.now(TZ)
L.DEBUG(f"Query_datetime: {query_datetime}")
this_location = await fetch_last_location_before(query_datetime)
if this_location:
L.DEBUG(f"location: {this_location}")
return this_location
return None
async def fetch_locations(start: datetime, end: datetime = None) -> List[Location]:
start_datetime = await dt(start)
if end is None:
end_datetime = await dt(start_datetime.replace(hour=23, minute=59, second=59))
else:
end_datetime = await dt(end)
if start_datetime.time() == datetime.min.time() and end_datetime.time() == datetime.min.time():
end_datetime = end_datetime.replace(hour=23, minute=59, second=59)
L.DEBUG(f"Fetching locations between {start_datetime} and {end_datetime}")
async with DB.get_connection() as conn:
locations = []
# Check for records within the specified datetime range
range_locations = await conn.fetch('''
SELECT id, datetime,
ST_X(ST_AsText(location)::geometry) AS longitude,
ST_Y(ST_AsText(location)::geometry) AS latitude,
ST_Z(ST_AsText(location)::geometry) AS elevation,
city, state, zip, street,
action, device_type, device_model, device_name, device_os
FROM locations
WHERE datetime >= $1 AND datetime <= $2
ORDER BY datetime DESC
''', start_datetime.replace(tzinfo=None), end_datetime.replace(tzinfo=None))
L.DEBUG(f"Range locations query returned: {range_locations}")
locations.extend(range_locations)
if not locations and (end is None or start_datetime.date() == end_datetime.date()):
location_data = await conn.fetchrow('''
SELECT id, datetime,
ST_X(ST_AsText(location)::geometry) AS longitude,
ST_Y(ST_AsText(location)::geometry) AS latitude,
ST_Z(ST_AsText(location)::geometry) AS elevation,
city, state, zip, street,
action, device_type, device_model, device_name, device_os
FROM locations
WHERE datetime < $1
ORDER BY datetime DESC
LIMIT 1
''', start_datetime.replace(tzinfo=None))
L.DEBUG(f"Fallback query returned: {location_data}")
if location_data:
locations.append(location_data)
L.DEBUG(f"Locations found: {locations}")
# Sort location_data based on the datetime field in descending order
sorted_locations = sorted(locations, key=lambda x: x['datetime'], reverse=True)
# Create Location objects directly from the location data
location_objects = [
Location(
latitude=location['latitude'],
longitude=location['longitude'],
datetime=location['datetime'],
elevation=location.get('elevation'),
city=location.get('city'),
state=location.get('state'),
zip=location.get('zip'),
street=location.get('street'),
context={
'action': location.get('action'),
'device_type': location.get('device_type'),
'device_model': location.get('device_model'),
'device_name': location.get('device_name'),
'device_os': location.get('device_os')
}
) for location in sorted_locations if location['latitude'] is not None and location['longitude'] is not None
]
return location_objects if location_objects else []
# Function to fetch the last location before the specified datetime
async def fetch_last_location_before(datetime: datetime) -> Optional[Location]:
datetime = await dt(datetime)
L.DEBUG(f"Fetching last location before {datetime}")
async with DB.get_connection() as conn:
location_data = await conn.fetchrow('''
SELECT id, datetime,
ST_X(ST_AsText(location)::geometry) AS longitude,
ST_Y(ST_AsText(location)::geometry) AS latitude,
ST_Z(ST_AsText(location)::geometry) AS elevation,
city, state, zip, street, country,
action
FROM locations
WHERE datetime < $1
ORDER BY datetime DESC
LIMIT 1
''', datetime.replace(tzinfo=None))
await conn.close()
if location_data:
L.DEBUG(f"Last location found: {location_data}")
return Location(**location_data)
else:
L.DEBUG("No location found before the specified datetime")
return None
@loc.get("/map/start_date={start_date_str}&end_date={end_date_str}", response_class=HTMLResponse)
async def generate_map_endpoint(start_date_str: str, end_date_str: str):
try:
start_date = await dt(start_date_str)
end_date = await dt(end_date_str)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format")
html_content = await generate_map(start_date, end_date)
return HTMLResponse(content=html_content)
@loc.get("/map", response_class=HTMLResponse)
async def generate_alltime_map_endpoint():
try:
start_date = await dt(datetime.fromisoformat("2022-01-01"))
end_date = dt(datetime.now())
except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format")
html_content = await generate_map(start_date, end_date)
return HTMLResponse(content=html_content)
async def generate_map(start_date: datetime, end_date: datetime):
locations = await fetch_locations(start_date, end_date)
if not locations:
raise HTTPException(status_code=404, detail="No locations found for the given date range")
# Create a folium map centered around the first location
map_center = [locations[0].latitude, locations[0].longitude]
m = folium.Map(location=map_center, zoom_start=5)
# Add markers for each location
for location in locations:
folium.Marker(
location=[location.latitude, location.longitude],
popup=f"{location.city}, {location.state}<br>Elevation: {location.elevation}m<br>Date: {location.datetime}",
tooltip=f"{location.city}, {location.state}"
).add_to(m)
# Save the map to an HTML file and return the HTML content
map_html = "map.html"
m.save(map_html)
with open(map_html, 'r') as file:
html_content = file.read()
return html_content
async def post_location(location: Location):
L.DEBUG(f"post_location called with {location.datetime}")
async with DB.get_connection() as conn:
try:
context = location.context or {}
action = context.get('action', 'manual')
device_type = context.get('device_type', 'Unknown')
device_model = context.get('device_model', 'Unknown')
device_name = context.get('device_name', 'Unknown')
device_os = context.get('device_os', 'Unknown')
# Parse and localize the datetime
localized_datetime = await dt(location.datetime)
await conn.execute('''
INSERT INTO locations (
datetime, location, city, state, zip, street, action, device_type, device_model, device_name, device_os,
class_, type, name, display_name, amenity, house_number, road, quarter, neighbourhood,
suburb, county, country_code, country
)
VALUES ($1, ST_SetSRID(ST_MakePoint($2, $3, $4), 4326), $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15,
$16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26)
''', localized_datetime, location.longitude, location.latitude, location.elevation, location.city, location.state,
location.zip, location.street, action, device_type, device_model, device_name, device_os,
location.class_, location.type, location.name, location.display_name,
location.amenity, location.house_number, location.road, location.quarter, location.neighbourhood,
location.suburb, location.county, location.country_code, location.country)
await conn.close()
L.INFO(f"Successfully posted location: {location.latitude}, {location.longitude}, {location.elevation} on {localized_datetime}")
return {
'datetime': localized_datetime,
'latitude': location.latitude,
'longitude': location.longitude,
'elevation': location.elevation,
'city': location.city,
'state': location.state,
'zip': location.zip,
'street': location.street,
'action': action,
'device_type': device_type,
'device_model': device_model,
'device_name': device_name,
'device_os': device_os,
'class_': location.class_,
'type': location.type,
'name': location.name,
'display_name': location.display_name,
'amenity': location.amenity,
'house_number': location.house_number,
'road': location.road,
'quarter': location.quarter,
'neighbourhood': location.neighbourhood,
'suburb': location.suburb,
'county': location.county,
'country_code': location.country_code,
'country': location.country
}
except Exception as e:
L.ERR(f"Error posting location {e}")
L.ERR(traceback.format_exc())
return None
@loc.post("/locate")
async def post_locate_endpoint(locations: Union[Location, List[Location]]):
if isinstance(locations, Location):
locations = [locations]
# Prepare locations
for location in locations:
if not location.datetime:
tz = DynamicTZ.find(location.latitude, location.longitude)
location.datetime = datetime.now(tz).isoformat()
if not location.context:
location.context = {
"action": "manual",
"device_type": "Pythonista",
"device_model": "Unknown",
"device_name": "Unknown",
"device_os": "Unknown"
}
L.DEBUG(f"Location received for processing: {location}")
geocoded_locations = await GEO.code(locations)
responses = []
for location in geocoded_locations:
L.DEBUG(f"Final location submitted to database: {location}")
location_entry = await post_location(location)
if location_entry:
responses.append({"location_data": location_entry})
else:
L.WARN(f"Posting location to database appears to have failed.")
return {"message": "Locations and weather updated", "results": responses}
@loc.get("/locate", response_model=Location)
async def get_last_location_endpoint() -> JSONResponse:
this_location = await get_last_location()
if this_location:
location_dict = this_location.model_dump()
location_dict["datetime"] = this_location.datetime.isoformat()
return JSONResponse(content=location_dict)
else:
raise HTTPException(status_code=404, detail="No location found before the specified datetime")
@loc.get("/locate/{datetime_str}", response_model=List[Location])
async def get_locate(datetime_str: str, all: bool = False):
try:
date_time = await dt(datetime_str)
except ValueError as e:
L.ERR(f"Invalid datetime string provided: {datetime_str}")
return ["ERROR: INVALID DATETIME PROVIDED. USE YYYYMMDDHHmmss or YYYYMMDD format."]
locations = await fetch_locations(date_time)
if not locations:
raise HTTPException(status_code=404, detail="No nearby data found for this date and time")
return locations if all else [locations[0]]

View file

@ -1,609 +0,0 @@
'''
Uses Postgres/PostGIS for for location tracking (data obtained via the companion mobile Pythonista scripts), and for geocoding purposes.
'''
from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import HTMLResponse, JSONResponse
import requests
import yaml
import time
import pytz
import traceback
from datetime import datetime, timezone
from typing import Union, List
import asyncio
import pytz
import aiohttp
import folium
from zoneinfo import ZoneInfo
import time as timer
from dateutil.parser import parse as dateutil_parse
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from srtm import get_data
from pathlib import Path
from pydantic import BaseModel
from typing import Optional, Any, Dict, List, Union
from datetime import datetime, timedelta, time
from sijapi import L, DB, TZ, NAMED_LOCATIONS, DynamicTZ, GEOLOCATOR
from sijapi.classes import Location, PyGeolocator
from sijapi.utilities import haversine
# from osgeo import gdal
# import elevation
locate = APIRouter()
async def reverse_geocode_local_1(lat, lon, date_time: datetime = datetime.now()) -> Optional[Location]:
date_time = await localize_datetime(date_time)
loc = GEOLOCATOR.lookup(lat, lon)
location = Location(
latitude = lat,
longitude = lon,
elevation = loc['elevation'],
datetime = date_time,
city = loc['city'],
state = loc['state'],
country = loc['country']
)
return location
async def reverse_geocode(latitude: float, longitude: float, elevation: float = None) -> Optional[Location]:
url = f"https://nominatim.openstreetmap.org/reverse?format=json&lat={latitude}&lon={longitude}"
L.DEBUG(f"Calling Nominatim API at {url}")
headers = {
'User-Agent': 'sij.law/1.0 (sij@sij.law)', # replace with your app name and email
}
try:
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
response.raise_for_status()
data = await response.json()
address = data.get("address", {})
elevation = elevation or await get_elevation(latitude, longitude)
location = Location(
latitude=latitude,
longitude=longitude,
elevation=elevation,
datetime=datetime.now(timezone.utc),
zip=address.get("postcode"),
street=address.get("road"),
city=address.get("city"),
state=address.get("state"),
country=address.get("country"),
context={},
class_=data.get("class"),
type=data.get("type"),
name=data.get("name"),
display_name=data.get("display_name"),
amenity=address.get("amenity"),
house_number=address.get("house_number"),
road=address.get("road"),
quarter=address.get("quarter"),
neighbourhood=address.get("neighbourhood"),
suburb=address.get("suburb"),
county=address.get("county"),
country_code=address.get("country_code")
)
L.DEBUG(f"Created Location object: {location}")
return location
except aiohttp.ClientError as e:
L.ERR(f"Error: {e}")
return None
## NOT YET IMPLEMENTED
async def geocode(zip_code: Optional[str] = None, latitude: Optional[float] = None, longitude: Optional[float] = None, city: Optional[str] = None, state: Optional[str] = None, country_code: str = 'US') -> Location:
if (latitude is None or longitude is None) and (zip_code is None) and (city is None or state is None):
L.ERR(f"Must provide sufficient information for geocoding!")
return None
try:
# Establish the database connection
async with DB.get_connection() as conn:
# Build the SQL query based on the provided parameters
query = "SELECT id, street, city, state, country, latitude, longitude, zip, elevation, datetime, date, ST_Distance(geom, ST_SetSRID(ST_MakePoint($1, $2), 4326)) AS distance FROM Locations"
conditions = []
params = []
if latitude is not None and longitude is not None:
conditions.append("ST_DWithin(geom, ST_SetSRID(ST_MakePoint($1, $2), 4326), 50000)") # 50 km radius
params.extend([longitude, latitude])
if zip_code:
conditions.append("zip = $3 AND country = $4")
params.extend([zip_code, country_code])
if city and state:
conditions.append("city ILIKE $5 AND state ILIKE $6 AND country = $7")
params.extend([city, state, country_code])
if conditions:
query += " WHERE " + " OR ".join(conditions)
query += " ORDER BY distance LIMIT 1;"
L.DEBUG(f"Executing query: {query} with params: {params}")
# Execute the query with the provided parameters
result = await conn.fetchrow(query, *params)
# Close the connection
await conn.close()
if result:
location_info = Location(
latitude=result['latitude'],
longitude=result['longitude'],
datetime=result.get['datetime'],
zip=result['zip'],
street=result.get('street', ''),
city=result['city'],
state=result['state'],
country=result['country'],
elevation=result.get('elevation', 0),
distance=result.get('distance')
)
L.DEBUG(f"Found location: {location_info}")
return location_info
else:
L.DEBUG("No location found with provided parameters.")
return Location()
except Exception as e:
L.ERR(f"Error occurred: {e}")
raise Exception("An error occurred while processing your request")
async def localize_datetime(dt: Union[str, datetime], fetch_loc: bool = False) -> datetime:
try:
# Convert string to datetime if necessary
if isinstance(dt, str):
dt = dateutil_parse(dt)
L.DEBUG(f"Converted string '{dt}' to datetime object.")
if not isinstance(dt, datetime):
raise ValueError("Input must be a string or datetime object.")
# Fetch timezone string
if fetch_loc:
loc = await get_last_location()
tz_str = DynamicTZ.find(loc[0], loc[1]) # Assuming loc is (lat, lon)
else:
tz_str = DynamicTZ.last_timezone
L.DEBUG(f"Retrieved timezone string: {tz_str}")
# Convert timezone string to ZoneInfo object
try:
tz = ZoneInfo(tz_str)
except Exception as e:
L.WARN(f"Invalid timezone string '{tz_str}'. Falling back to UTC. Error: {e}")
tz = ZoneInfo('UTC')
L.DEBUG(f"Using timezone: {tz}")
# Localize datetime
if dt.tzinfo is None:
dt = dt.replace(tzinfo=tz)
L.DEBUG(f"Localized naive datetime to {tz}")
elif dt.tzinfo != tz:
dt = dt.astimezone(tz)
L.DEBUG(f"Converted datetime from {dt.tzinfo} to {tz}")
return dt
except ValueError as e:
L.ERR(f"Error parsing datetime: {e}")
raise
except Exception as e:
L.ERR(f"Unexpected error in localize_datetime: {e}")
raise ValueError(f"Failed to localize datetime: {e}")
async def find_override_locations(lat: float, lon: float) -> Optional[str]:
# Load the JSON file
with open(NAMED_LOCATIONS, 'r') as file:
locations = yaml.safe_load(file)
closest_location = None
closest_distance = float('inf')
# Iterate through each location entry in the JSON
for location in locations:
loc_name = location.get("name")
loc_lat = location.get("latitude")
loc_lon = location.get("longitude")
loc_radius = location.get("radius")
# Calculate distance using haversine
distance = haversine(lat, lon, loc_lat, loc_lon)
# Check if the distance is within the specified radius
if distance <= loc_radius:
if distance < closest_distance:
closest_distance = distance
closest_location = loc_name
return closest_location
async def get_elevation(latitude: float, longitude: float, unit: str = "m") -> float:
loop = asyncio.get_running_loop()
# Create a thread pool executor
with ThreadPoolExecutor() as pool:
# Run get_data() in a separate thread
srtm_data = await loop.run_in_executor(pool, get_data)
# Run get_elevation() in a separate thread
elevation = await loop.run_in_executor(
pool,
partial(srtm_data.get_elevation, latitude, longitude)
)
if unit == "m":
return elevation
elif unit == "km":
return elevation / 1000
elif unit == "ft" or unit == "'":
return elevation * 3.280839895
else:
raise ValueError(f"Unsupported unit: {unit}")
async def fetch_locations(start: datetime, end: datetime = None) -> List[Location]:
start_datetime = await localize_datetime(start)
if end is None:
end_datetime = await localize_datetime(start_datetime.replace(hour=23, minute=59, second=59))
else:
end_datetime = await localize_datetime(end)
if start_datetime.time() == datetime.min.time() and end_datetime.time() == datetime.min.time():
end_datetime = end_datetime.replace(hour=23, minute=59, second=59)
L.DEBUG(f"Fetching locations between {start_datetime} and {end_datetime}")
async with DB.get_connection() as conn:
locations = []
# Check for records within the specified datetime range
range_locations = await conn.fetch('''
SELECT id, datetime,
ST_X(ST_AsText(location)::geometry) AS longitude,
ST_Y(ST_AsText(location)::geometry) AS latitude,
ST_Z(ST_AsText(location)::geometry) AS elevation,
city, state, zip, street,
action, device_type, device_model, device_name, device_os
FROM locations
WHERE datetime >= $1 AND datetime <= $2
ORDER BY datetime DESC
''', start_datetime.replace(tzinfo=None), end_datetime.replace(tzinfo=None))
L.DEBUG(f"Range locations query returned: {range_locations}")
locations.extend(range_locations)
if not locations and (end is None or start_datetime.date() == end_datetime.date()):
location_data = await conn.fetchrow('''
SELECT id, datetime,
ST_X(ST_AsText(location)::geometry) AS longitude,
ST_Y(ST_AsText(location)::geometry) AS latitude,
ST_Z(ST_AsText(location)::geometry) AS elevation,
city, state, zip, street,
action, device_type, device_model, device_name, device_os
FROM locations
WHERE datetime < $1
ORDER BY datetime DESC
LIMIT 1
''', start_datetime.replace(tzinfo=None))
L.DEBUG(f"Fallback query returned: {location_data}")
if location_data:
locations.append(location_data)
L.DEBUG(f"Locations found: {locations}")
# Sort location_data based on the datetime field in descending order
sorted_locations = sorted(locations, key=lambda x: x['datetime'], reverse=True)
# Create Location objects directly from the location data
location_objects = [
Location(
latitude=loc['latitude'],
longitude=loc['longitude'],
datetime=loc['datetime'],
elevation=loc.get('elevation'),
city=loc.get('city'),
state=loc.get('state'),
zip=loc.get('zip'),
street=loc.get('street'),
context={
'action': loc.get('action'),
'device_type': loc.get('device_type'),
'device_model': loc.get('device_model'),
'device_name': loc.get('device_name'),
'device_os': loc.get('device_os')
}
) for loc in sorted_locations if loc['latitude'] is not None and loc['longitude'] is not None
]
return location_objects if location_objects else []
# Function to fetch the last location before the specified datetime
async def fetch_last_location_before(datetime: datetime) -> Optional[Location]:
datetime = await localize_datetime(datetime)
L.DEBUG(f"Fetching last location before {datetime}")
async with DB.get_connection() as conn:
location_data = await conn.fetchrow('''
SELECT id, datetime,
ST_X(ST_AsText(location)::geometry) AS longitude,
ST_Y(ST_AsText(location)::geometry) AS latitude,
ST_Z(ST_AsText(location)::geometry) AS elevation,
city, state, zip, street, country,
action
FROM locations
WHERE datetime < $1
ORDER BY datetime DESC
LIMIT 1
''', datetime.replace(tzinfo=None))
await conn.close()
if location_data:
L.DEBUG(f"Last location found: {location_data}")
return Location(**location_data)
else:
L.DEBUG("No location found before the specified datetime")
return None
@locate.get("/map/start_date={start_date_str}&end_date={end_date_str}", response_class=HTMLResponse)
async def generate_map_endpoint(start_date_str: str, end_date_str: str):
try:
start_date = await localize_datetime(start_date_str)
end_date = await localize_datetime(end_date_str)
except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format")
html_content = await generate_map(start_date, end_date)
return HTMLResponse(content=html_content)
@locate.get("/map", response_class=HTMLResponse)
async def generate_alltime_map_endpoint():
try:
start_date = await localize_datetime(datetime.fromisoformat("2022-01-01"))
end_date = localize_datetime(datetime.now())
except ValueError:
raise HTTPException(status_code=400, detail="Invalid date format")
html_content = await generate_map(start_date, end_date)
return HTMLResponse(content=html_content)
async def generate_map(start_date: datetime, end_date: datetime):
locations = await fetch_locations(start_date, end_date)
if not locations:
raise HTTPException(status_code=404, detail="No locations found for the given date range")
# Create a folium map centered around the first location
map_center = [locations[0].latitude, locations[0].longitude]
m = folium.Map(location=map_center, zoom_start=5)
# Add markers for each location
for loc in locations:
folium.Marker(
location=[loc.latitude, loc.longitude],
popup=f"{loc.city}, {loc.state}<br>Elevation: {loc.elevation}m<br>Date: {loc.datetime}",
tooltip=f"{loc.city}, {loc.state}"
).add_to(m)
# Save the map to an HTML file and return the HTML content
map_html = "map.html"
m.save(map_html)
with open(map_html, 'r') as file:
html_content = file.read()
return html_content
async def post_location(location: Location):
L.DEBUG(f"post_location called with {location.datetime}")
async with DB.get_connection() as conn:
try:
context = location.context or {}
action = context.get('action', 'manual')
device_type = context.get('device_type', 'Unknown')
device_model = context.get('device_model', 'Unknown')
device_name = context.get('device_name', 'Unknown')
device_os = context.get('device_os', 'Unknown')
# Parse and localize the datetime
localized_datetime = await localize_datetime(location.datetime)
await conn.execute('''
INSERT INTO locations (
datetime, location, city, state, zip, street, action, device_type, device_model, device_name, device_os,
class_, type, name, display_name, amenity, house_number, road, quarter, neighbourhood,
suburb, county, country_code, country
)
VALUES ($1, ST_SetSRID(ST_MakePoint($2, $3, $4), 4326), $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15,
$16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26)
''', localized_datetime, location.longitude, location.latitude, location.elevation, location.city, location.state,
location.zip, location.street, action, device_type, device_model, device_name, device_os,
location.class_, location.type, location.name, location.display_name,
location.amenity, location.house_number, location.road, location.quarter, location.neighbourhood,
location.suburb, location.county, location.country_code, location.country)
await conn.close()
L.INFO(f"Successfully posted location: {location.latitude}, {location.longitude}, {location.elevation} on {localized_datetime}")
return {
'datetime': localized_datetime,
'latitude': location.latitude,
'longitude': location.longitude,
'elevation': location.elevation,
'city': location.city,
'state': location.state,
'zip': location.zip,
'street': location.street,
'action': action,
'device_type': device_type,
'device_model': device_model,
'device_name': device_name,
'device_os': device_os,
'class_': location.class_,
'type': location.type,
'name': location.name,
'display_name': location.display_name,
'amenity': location.amenity,
'house_number': location.house_number,
'road': location.road,
'quarter': location.quarter,
'neighbourhood': location.neighbourhood,
'suburb': location.suburb,
'county': location.county,
'country_code': location.country_code,
'country': location.country
}
except Exception as e:
L.ERR(f"Error posting location {e}")
L.ERR(traceback.format_exc())
return None
@locate.post("/locate")
async def post_locate_endpoint(locations: Union[Location, List[Location]]):
responses = []
if isinstance(locations, Location):
locations = [locations]
for location in locations:
if not location.datetime:
location.datetime = datetime.now(timezone.utc).isoformat()
if not location.elevation:
location.elevation = location.altitude if location.altitude else await get_elevation(location.latitude, location.longitude)
# Ensure context is a dictionary with default values if not provided
if not location.context:
location.context = {
"action": "manual",
"device_type": "Pythonista",
"device_model": "Unknown",
"device_name": "Unknown",
"device_os": "Unknown"
}
# L.DEBUG(f"datetime before localization: {location.datetime}")
location.datetime = await localize_datetime(location.datetime)
# L.DEBUG(f"datetime after localization: {location.datetime}")
L.DEBUG(f"Location received for processing: {location}")
# Perform reverse geocoding
geocoded_location = await reverse_geocode(location.latitude, location.longitude)
if geocoded_location:
L.DEBUG(f"Reverse geocoding result: {geocoded_location}")
for field in location.__fields__:
if getattr(location, field) is None:
setattr(location, field, getattr(geocoded_location, field))
else:
L.WARN(f"Geocoding failed!")
L.DEBUG(f"Final location submitted to database: {location}")
location_entry = await post_location(location)
if location_entry:
responses.append({"location_data": location_entry}) # Add weather data if necessary
else:
L.WARN(f"Posing location to database appears to have failed.")
return {"message": "Locations and weather updated", "results": responses}
async def get_last_location() -> Optional[Location]:
query_datetime = datetime.now(TZ)
L.DEBUG(f"Query_datetime: {query_datetime}")
this_location = await fetch_last_location_before(query_datetime)
if this_location:
L.DEBUG(f"location: {this_location}")
return this_location
return None
@locate.get("/locate", response_model=Location)
async def get_last_location_endpoint() -> JSONResponse:
this_location = await get_last_location()
if this_location:
location_dict = this_location.model_dump()
location_dict["datetime"] = this_location.datetime.isoformat()
return JSONResponse(content=location_dict)
else:
raise HTTPException(status_code=404, detail="No location found before the specified datetime")
@locate.get("/locate/{datetime_str}", response_model=List[Location])
async def get_locate(datetime_str: str, all: bool = False):
try:
date_time = await localize_datetime(datetime_str)
except ValueError as e:
L.ERR(f"Invalid datetime string provided: {datetime_str}")
return ["ERROR: INVALID DATETIME PROVIDED. USE YYYYMMDDHHmmss or YYYYMMDD format."]
locations = await fetch_locations(date_time)
if not locations:
raise HTTPException(status_code=404, detail="No nearby data found for this date and time")
return locations if all else [locations[0]]
future_elevation = """
def get_elevation_srtm(latitude, longitude, srtm_file):
try:
# Open the SRTM dataset
dataset = gdal.Open(srtm_file)
# Get the geotransform and band information
geotransform = dataset.GetGeoTransform()
band = dataset.GetRasterBand(1)
# Calculate the pixel coordinates from the latitude and longitude
x = int((longitude - geotransform[0]) / geotransform[1])
y = int((latitude - geotransform[3]) / geotransform[5])
# Read the elevation value from the SRTM dataset
elevation = band.ReadAsArray(x, y, 1, 1)[0][0]
# Close the dataset
dataset = None
return elevation
except Exception as e:
L.ERR(f"Error: {e}")
return None
"""
def get_elevation2(latitude: float, longitude: float) -> float:
url = f"https://nationalmap.gov/epqs/pqs.php?x={longitude}&y={latitude}&units=Meters&output=json"
try:
response = requests.get(url)
data = response.json()
elevation = data["USGS_Elevation_Point_Query_Service"]["Elevation_Query"]["Elevation"]
return float(elevation)
except Exception as e:
# Handle exceptions (e.g., network errors, API changes) appropriately
raise RuntimeError(f"Error getting elevation data: {str(e)}")

File diff suppressed because it is too large Load diff

View file

@ -33,7 +33,7 @@ from sijapi import (
MAC_UN, MAC_PW, MAC_ID, TS_TAILNET, DATA_DIR, SD_IMAGE_DIR, PUBLIC_KEY, OBSIDIAN_VAULT_DIR
)
from sijapi.utilities import bool_convert, sanitize_filename, assemble_journal_path
from sijapi.routers import note, locate
from sijapi.routers import loc, note
serve = APIRouter(tags=["public"])
@ -69,7 +69,7 @@ def is_valid_date(date_str: str) -> bool:
@serve.get("/notes/{file_path:path}")
async def get_file_endpoint(file_path: str):
try:
date_time = await locate.localize_datetime(file_path);
date_time = await loc.dt(file_path);
absolute_path, local_path = assemble_journal_path(date_time, no_timestamp = True)
except ValueError as e:
L.DEBUG(f"Unable to parse {file_path} as a date, now trying to use it as a local path")

View file

@ -26,7 +26,7 @@ from collections import defaultdict
from dotenv import load_dotenv
from traceback import format_exc
from sijapi import L, HOME_DIR, TIMING_API_KEY, TIMING_API_URL
from sijapi.routers.locate import localize_datetime
from sijapi.routers import loc
### INITIALIZATIONS ###
time = APIRouter(tags=["private"])
@ -102,8 +102,8 @@ def truncate_project_title(title):
async def fetch_and_prepare_timing_data(start: datetime, end: Optional[datetime] = None) -> List[Dict]:
# start_date = await localize_datetime(start)
# end_date = await localize_datetime(end) if end else None
# start_date = await loc.dt(start)
# end_date = await loc.dt(end) if end else None
# Adjust the start date to include the day before and format the end date
start_date_adjusted = (start - timedelta(days=1)).strftime("%Y-%m-%dT00:00:00")
end_date_formatted = f"{datetime.strftime(end, '%Y-%m-%d')}T23:59:59" if end else f"{datetime.strftime(start, '%Y-%m-%d')}T23:59:59"
@ -317,8 +317,8 @@ async def get_timing_markdown3(
):
# Fetch and process timing data
start = await localize_datetime(start_date)
end = await localize_datetime(end_date) if end_date else None
start = await loc.dt(start_date)
end = await loc.dt(end_date) if end_date else None
timing_data = await fetch_and_prepare_timing_data(start, end)
# Retain these for processing Markdown data with the correct timezone
@ -377,8 +377,8 @@ async def get_timing_markdown(
start: str = Query(..., regex=r"\d{4}-\d{2}-\d{2}"),
end: Optional[str] = Query(None, regex=r"\d{4}-\d{2}-\d{2}")
):
start_date = await localize_datetime(start)
end_date = await localize_datetime(end)
start_date = await loc.dt(start)
end_date = await loc.dt(end)
markdown_formatted_data = await process_timing_markdown(start_date, end_date)
return Response(content=markdown_formatted_data, media_type="text/markdown")
@ -446,8 +446,8 @@ async def get_timing_json(
):
# Fetch and process timing data
start = await localize_datetime(start_date)
end = await localize_datetime(end_date)
start = await loc.dt(start_date)
end = await loc.dt(end_date)
timing_data = await fetch_and_prepare_timing_data(start, end)
# Convert processed data to the required JSON structure

View file

@ -10,9 +10,9 @@ from typing import Dict
from datetime import datetime
from shapely.wkb import loads
from binascii import unhexlify
from sijapi import L, VISUALCROSSING_API_KEY, TZ, DB
from sijapi import L, VISUALCROSSING_API_KEY, TZ, DB, GEO
from sijapi.utilities import haversine
from sijapi.routers import locate
from sijapi.routers import loc
weather = APIRouter()
@ -20,20 +20,20 @@ weather = APIRouter()
async def get_weather(date_time: datetime, latitude: float, longitude: float):
# request_date_str = date_time.strftime("%Y-%m-%d")
L.DEBUG(f"Called get_weather with lat: {latitude}, lon: {longitude}, date_time: {date_time}")
L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in get_weather.")
daily_weather_data = await get_weather_from_db(date_time, latitude, longitude)
fetch_new_data = True
if daily_weather_data:
try:
L.DEBUG(f"Daily weather data from db: {daily_weather_data}")
last_updated = str(daily_weather_data['DailyWeather'].get('last_updated'))
last_updated = await locate.localize_datetime(last_updated)
last_updated = await loc.dt(last_updated)
stored_loc_data = unhexlify(daily_weather_data['DailyWeather'].get('location'))
stored_loc = loads(stored_loc_data)
stored_lat = stored_loc.y
stored_lon = stored_loc.x
stored_ele = stored_loc.z
hourly_weather = daily_weather_data.get('HourlyWeather')
L.DEBUG(f"Hourly: {hourly_weather}")
@ -53,6 +53,7 @@ async def get_weather(date_time: datetime, latitude: float, longitude: float):
if fetch_new_data:
L.DEBUG(f"We require new data!")
request_date_str = date_time.strftime("%Y-%m-%d")
L.WARN(f"Using {date_time.strftime('%Y-%m-%d')} as our datetime for fetching new data.")
url = f"https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/{latitude},{longitude}/{request_date_str}/{request_date_str}?unitGroup=us&key={VISUALCROSSING_API_KEY}"
try:
async with AsyncClient() as client:
@ -85,6 +86,7 @@ async def get_weather(date_time: datetime, latitude: float, longitude: float):
async def store_weather_to_db(date_time: datetime, weather_data: dict):
L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in store_weather_to_db")
async with DB.get_connection() as conn:
try:
day_data = weather_data.get('days')[0]
@ -95,16 +97,21 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
stations_array = day_data.get('stations', []) or []
date_str = date_time.strftime("%Y-%m-%d")
L.WARN(f"Using {date_str} in our query in store_weather_to_db.")
# Get location details from weather data if available
longitude = weather_data.get('longitude')
latitude = weather_data.get('latitude')
elevation = await locate.get_elevation(latitude, longitude) # 152.4 # default until we add a geocoder that can look up actual elevation; weather_data.get('elevation') # assuming 'elevation' key, replace if different
elevation = await GEO.elevation(latitude, longitude)
location_point = f"POINTZ({longitude} {latitude} {elevation})" if longitude and latitude and elevation else None
# Correct for the datetime objects
day_data['datetime'] = await locate.localize_datetime(day_data.get('datetime')) #day_data.get('datetime'))
L.WARN(f"Uncorrected datetime in store_weather_to_db: {day_data['datetime']}")
day_data['datetime'] = await loc.dt(day_data.get('datetime')) #day_data.get('datetime'))
L.WARN(f"Corrected datetime in store_weather_to_db with localized datetime: {day_data['datetime']}")
L.WARN(f"Uncorrected sunrise time in store_weather_to_db: {day_data['sunrise']}")
day_data['sunrise'] = day_data['datetime'].replace(hour=int(day_data.get('sunrise').split(':')[0]), minute=int(day_data.get('sunrise').split(':')[1]))
L.WARN(f"Corrected sunrise time in store_weather_to_db with localized datetime: {day_data['sunrise']}")
day_data['sunset'] = day_data['datetime'].replace(hour=int(day_data.get('sunset').split(':')[0]), minute=int(day_data.get('sunset').split(':')[1]))
daily_weather_params = (
@ -160,7 +167,7 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
await asyncio.sleep(0.1)
# hour_data['datetime'] = parse_date(hour_data.get('datetime'))
hour_timestamp = date_str + ' ' + hour_data['datetime']
hour_data['datetime'] = await locate.localize_datetime(hour_timestamp)
hour_data['datetime'] = await loc.dt(hour_timestamp)
L.DEBUG(f"Processing hours now...")
# L.DEBUG(f"Processing {hour_data['datetime']}")
@ -226,6 +233,7 @@ async def store_weather_to_db(date_time: datetime, weather_data: dict):
async def get_weather_from_db(date_time: datetime, latitude: float, longitude: float):
L.WARN(f"Using {date_time.strftime('%Y-%m-%d %H:%M:%S')} as our datetime in get_weather_from_db.")
async with DB.get_connection() as conn:
query_date = date_time.date()
try:
@ -238,25 +246,38 @@ async def get_weather_from_db(date_time: datetime, latitude: float, longitude: f
LIMIT 1
'''
daily_weather_data = await conn.fetchrow(query, query_date, longitude, latitude, longitude, latitude)
if daily_weather_data is None:
daily_weather_record = await conn.fetchrow(query, query_date, longitude, latitude, longitude, latitude)
if daily_weather_record is None:
L.DEBUG(f"No daily weather data retrieved from database.")
return None
# else:
# L.DEBUG(f"Daily_weather_data: {daily_weather_data}")
# Convert asyncpg.Record to a mutable dictionary
daily_weather_data = dict(daily_weather_record)
# Now we can modify the dictionary
daily_weather_data['datetime'] = await loc.dt(daily_weather_data.get('datetime'))
# Query to get hourly weather data
query = '''
SELECT HW.* FROM HourlyWeather HW
WHERE HW.daily_weather_id = $1
'''
hourly_weather_data = await conn.fetch(query, daily_weather_data['id'])
day: Dict = {
'DailyWeather': dict(daily_weather_data),
'HourlyWeather': [dict(row) for row in hourly_weather_data],
hourly_weather_records = await conn.fetch(query, daily_weather_data['id'])
hourly_weather_data = []
for record in hourly_weather_records:
hour_data = dict(record)
hour_data['datetime'] = await loc.dt(hour_data.get('datetime'))
hourly_weather_data.append(hour_data)
day = {
'DailyWeather': daily_weather_data,
'HourlyWeather': hourly_weather_data,
}
# L.DEBUG(f"day: {day}")
L.DEBUG(f"day: {day}")
return day
except Exception as e:
L.ERR(f"Unexpected error occurred: {e}")