Auto-update: Tue Jun 25 17:37:24 PDT 2024
This commit is contained in:
parent
72eeb0ac7f
commit
a60698fc1f
6 changed files with 368 additions and 3 deletions
195
sijapi/helpers/CourtListener/clHooks.py
Normal file
195
sijapi/helpers/CourtListener/clHooks.py
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
from fastapi import FastAPI, Request, BackgroundTasks, HTTPException, status
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
import os, io
|
||||||
|
from PyPDF2 import PdfReader
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
hook = FastAPI()
|
||||||
|
|
||||||
|
|
||||||
|
# /Users/sij/Library/CloudStorage/OneDrive-WELC/Documents - WELC-Docket
|
||||||
|
SYNC_FOLDER = Path(__file__).resolve().parent.parent
|
||||||
|
HOME_FOLDER = Path.home()
|
||||||
|
DOCKETS_FOLDER = HOME_FOLDER / "Dockets"
|
||||||
|
SEARCH_FOLDER = HOME_FOLDER / "Watched Cases"
|
||||||
|
SCRIPTS_FOLDER = SYNC_FOLDER / ".scripts"
|
||||||
|
REQUESTS_FOLDER = HOME_FOLDER / "sync" / "requests"
|
||||||
|
COURTLISTENER_BASE_URL = "https://www.courtlistener.com"
|
||||||
|
COURTLISTENER_DOCKETS_URL = "https://www.courtlistener.com/api/rest/v3/dockets/"
|
||||||
|
COURTLISTENER_API_KEY = "efb5fe00f3c6c88d65a32541260945befdf53a7e"
|
||||||
|
|
||||||
|
with open(SCRIPTS_FOLDER / 'caseTable.json', 'r') as file:
|
||||||
|
CASE_TABLE = json.load(file)
|
||||||
|
|
||||||
|
@hook.get("/health")
|
||||||
|
async def health():
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
@hook.post("/cl/docket")
|
||||||
|
async def respond(request: Request, background_tasks: BackgroundTasks):
|
||||||
|
client_ip = request.client.host
|
||||||
|
logging.info(f"Received request from IP: {client_ip}")
|
||||||
|
data = await request.json()
|
||||||
|
payload = data['payload']
|
||||||
|
results = data['payload']['results']
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||||
|
payload_file = REQUESTS_FOLDER / f"{timestamp}-{client_ip}_docket.json"
|
||||||
|
with open(payload_file, 'w') as file:
|
||||||
|
json.dump(payload, file, indent=2)
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
background_tasks.add_task(process_docket, result)
|
||||||
|
return JSONResponse(content={"message": "Received"}, status_code=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
async def process_docket(result):
|
||||||
|
async with httpx.AsyncClient() as session:
|
||||||
|
await process_docket_result(result, session)
|
||||||
|
|
||||||
|
|
||||||
|
async def process_docket_result(result, session):
|
||||||
|
docket = str(result.get('docket'))
|
||||||
|
case_code, case_shortname = get_case_details(docket)
|
||||||
|
date_filed = result.get('date_filed', 'No Date Filed')
|
||||||
|
|
||||||
|
try:
|
||||||
|
date_filed_formatted = datetime.strptime(date_filed, '%Y-%m-%d').strftime('%Y%m%d')
|
||||||
|
except ValueError:
|
||||||
|
date_filed_formatted = 'NoDateFiled'
|
||||||
|
|
||||||
|
# Fetching court docket information from the API
|
||||||
|
url = f"{COURTLISTENER_DOCKETS_URL}?id={docket}"
|
||||||
|
headers = {'Authorization': f'Token {COURTLISTENER_API_KEY}'}
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(url, headers=headers) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
logging.info(f"Fetching CourtListener docket information for {docket}...")
|
||||||
|
data = await response.json()
|
||||||
|
court_docket = data['results'][0]['docket_number_core']
|
||||||
|
court_docket = f"{court_docket[:2]}-cv-{court_docket[2:]}" # Formatting the docket number
|
||||||
|
case_name = data['results'][0]['case_name']
|
||||||
|
logging.info(f"Obtained from CourtListener: docket {court_docket}, case name {case_name}.")
|
||||||
|
else:
|
||||||
|
logging.info("Failed to fetch data from CourtListener API.")
|
||||||
|
court_docket = 'NoCourtDocket'
|
||||||
|
case_name = 'NoCaseName'
|
||||||
|
|
||||||
|
for document in result.get('recap_documents', []):
|
||||||
|
filepath_ia = document.get('filepath_ia')
|
||||||
|
filepath_local = document.get('filepath_local')
|
||||||
|
|
||||||
|
if filepath_ia:
|
||||||
|
file_url = filepath_ia
|
||||||
|
logging.info(f"Found IA file at {file_url}.")
|
||||||
|
elif filepath_local:
|
||||||
|
file_url = f"{COURTLISTENER_BASE_URL}/{filepath_local}"
|
||||||
|
logging.info(f"Found local file at {file_url}.")
|
||||||
|
else:
|
||||||
|
logging.info(f"No file URL found in filepath_ia or filepath_local for one of the documents.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
document_number = document.get('document_number', 'NoDocumentNumber')
|
||||||
|
description = document.get('description', 'NoDescription').replace(" ", "_").replace("/", "_")
|
||||||
|
description = description[:50] # Truncate description
|
||||||
|
# case_shortname = case_name # TEMPORARY OVERRIDE
|
||||||
|
file_name = f"{case_code}_{document_number}_{date_filed_formatted}_{description}.pdf"
|
||||||
|
target_path = Path(DOCKETS_FOLDER) / case_shortname / "Docket" / file_name
|
||||||
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
await download_file(file_url, target_path, session)
|
||||||
|
logging.info(f"Downloaded {file_name} to {target_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_case_details(docket):
|
||||||
|
case_info = CASE_TABLE.get(str(docket), {"code": "000", "shortname": "UNKNOWN"})
|
||||||
|
case_code = case_info.get("code")
|
||||||
|
short_name = case_info.get("shortname")
|
||||||
|
return case_code, short_name
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def download_file(url: str, path: Path, session: aiohttp.ClientSession = None):
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36'
|
||||||
|
}
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
logging.info(f"Attempting to download {url} to {path}.")
|
||||||
|
try:
|
||||||
|
async with session.get(url, headers=headers, allow_redirects=True) as response:
|
||||||
|
if response.status == 403:
|
||||||
|
logging.error(f"Access denied (403 Forbidden) for URL: {url}. Skipping download.")
|
||||||
|
return
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
# Check if the response content type is a PDF
|
||||||
|
content_type = response.headers.get('Content-Type')
|
||||||
|
if content_type != 'application/pdf':
|
||||||
|
logging.error(f"Invalid content type: {content_type}. Skipping download.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create an in-memory buffer to store the downloaded content
|
||||||
|
buffer = io.BytesIO()
|
||||||
|
async for chunk in response.content.iter_chunked(1024):
|
||||||
|
buffer.write(chunk)
|
||||||
|
|
||||||
|
# Reset the buffer position to the beginning
|
||||||
|
buffer.seek(0)
|
||||||
|
|
||||||
|
# Validate the downloaded PDF content
|
||||||
|
try:
|
||||||
|
PdfReader(buffer)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Invalid PDF content: {str(e)}. Skipping download.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# If the PDF is valid, write the content to the file on disk
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with path.open('wb') as file:
|
||||||
|
file.write(buffer.getvalue())
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error downloading file: {str(e)}")
|
||||||
|
|
||||||
|
@hook.post("/cl/search")
|
||||||
|
async def respond_search(request: Request, background_tasks: BackgroundTasks):
|
||||||
|
client_ip = request.client.host
|
||||||
|
logging.info(f"Received request from IP: {client_ip}")
|
||||||
|
data = await request.json()
|
||||||
|
payload = data['payload']
|
||||||
|
results = data['payload']['results']
|
||||||
|
|
||||||
|
# Save the payload data
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||||
|
payload_file = REQUESTS_FOLDER / f"{timestamp}-{client_ip}_search.json"
|
||||||
|
with open(payload_file, 'w') as file:
|
||||||
|
json.dump(payload, file, indent=2)
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
background_tasks.add_task(process_search_result, result)
|
||||||
|
return JSONResponse(content={"message": "Received"}, status_code=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
async def process_search_result(result):
|
||||||
|
async with httpx.AsyncClient() as session:
|
||||||
|
download_url = result.get('download_url')
|
||||||
|
court_id = result.get('court_id')
|
||||||
|
case_name_short = result.get('caseNameShort')
|
||||||
|
case_name = result.get('caseName')
|
||||||
|
logging.info(f"Received payload for case {case_name} ({court_id}) and download url {download_url}")
|
||||||
|
|
||||||
|
court_folder = court_id
|
||||||
|
|
||||||
|
if case_name_short:
|
||||||
|
case_folder = case_name_short
|
||||||
|
else:
|
||||||
|
case_folder = case_name
|
||||||
|
|
||||||
|
file_name = download_url.split('/')[-1]
|
||||||
|
target_path = Path(SEARCH_FOLDER) / court_folder / case_folder / file_name
|
||||||
|
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
await download_file(download_url, target_path, session)
|
||||||
|
logging.info(f"Downloaded {file_name} to {target_path}")
|
32
sijapi/helpers/CourtListener/subscribeAlerts.py
Normal file
32
sijapi/helpers/CourtListener/subscribeAlerts.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# Load the caseTable.json file
|
||||||
|
with open('caseTable.json', 'r') as file:
|
||||||
|
case_table = json.load(file)
|
||||||
|
|
||||||
|
# Set the base URL and authorization token
|
||||||
|
base_url = "https://www.courtlistener.com/api/rest/v3/docket-alerts/"
|
||||||
|
auth_token = "a90d3f2de489aa4138a32133ca8bfec9d85fecfa"
|
||||||
|
|
||||||
|
# Iterate through each key (docket ID) in the case table
|
||||||
|
for docket_id in case_table.keys():
|
||||||
|
# Set the data payload and headers for the request
|
||||||
|
data = {'docket': docket_id}
|
||||||
|
headers = {'Authorization': f'Token {auth_token}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Send the POST request to the CourtListener API
|
||||||
|
response = requests.post(base_url, data=data, headers=headers)
|
||||||
|
|
||||||
|
# Check the response status code
|
||||||
|
if response.status_code == 200:
|
||||||
|
print(f"Successfully created docket alert for docket ID: {docket_id}")
|
||||||
|
else:
|
||||||
|
print(f"Failed to create docket alert for docket ID: {docket_id}")
|
||||||
|
print(f"Status code: {response.status_code}")
|
||||||
|
print(f"Response content: {response.content}")
|
||||||
|
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
print(f"Error occurred while creating docket alert for docket ID: {docket_id}")
|
||||||
|
print(f"Error message: {str(e)}")
|
17
sijapi/helpers/Obsidian/month_o_banners.sh
Executable file
17
sijapi/helpers/Obsidian/month_o_banners.sh
Executable file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Iterate from 18 to 30
|
||||||
|
for i in $(seq -w 01 31); do
|
||||||
|
# Construct the date string
|
||||||
|
DATE="2024-07-${i}"
|
||||||
|
|
||||||
|
# Print the date being processed (optional)
|
||||||
|
echo "Processing date: $DATE"
|
||||||
|
|
||||||
|
# Run the curl command
|
||||||
|
curl -X POST -H "Content-Type: application/json" -d '{"mood": "joyful"}' "http://localhost:4444/note/banner?dt=$DATE"
|
||||||
|
|
||||||
|
# Wait for the curl command to finish before starting the next iteration
|
||||||
|
wait
|
||||||
|
done
|
||||||
|
|
63
sijapi/helpers/Pythonista/GPS.py
Normal file
63
sijapi/helpers/Pythonista/GPS.py
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
import location
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
def get_current_location():
|
||||||
|
location.start_updates()
|
||||||
|
time.sleep(1) # Give it a moment to get an accurate fix
|
||||||
|
current_location = location.get_location()
|
||||||
|
location.stop_updates()
|
||||||
|
|
||||||
|
elevation = current_location['altitude']
|
||||||
|
latitude = current_location['latitude']
|
||||||
|
longitude = current_location['longitude']
|
||||||
|
current_time = datetime.now(timezone.utc)
|
||||||
|
timestamp = current_time.isoformat()
|
||||||
|
|
||||||
|
return {
|
||||||
|
'latitude': latitude,
|
||||||
|
'longitude': longitude,
|
||||||
|
'elevation': elevation,
|
||||||
|
'datetime': timestamp
|
||||||
|
}
|
||||||
|
|
||||||
|
def save_location_data(data, context, filename='location_log.json'):
|
||||||
|
if os.path.exists(filename):
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
existing_data = json.load(f)
|
||||||
|
else:
|
||||||
|
existing_data = []
|
||||||
|
|
||||||
|
data['context'] = context
|
||||||
|
existing_data.append(data)
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
json.dump(existing_data, f, indent=4)
|
||||||
|
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
try:
|
||||||
|
context = json.loads(sys.argv[1])
|
||||||
|
context.setdefault('action', 'manual')
|
||||||
|
context.setdefault('device_type', 'Pythonista')
|
||||||
|
context.setdefault('device_model', None)
|
||||||
|
context.setdefault('device_name', None)
|
||||||
|
context.setdefault('device_os', None)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print("Error: The provided argument is not a valid JSON.")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
context = {
|
||||||
|
'action': 'manual',
|
||||||
|
'device_type': 'Pythonista',
|
||||||
|
'device_model': None,
|
||||||
|
'device_name': None,
|
||||||
|
'device_os': None
|
||||||
|
}
|
||||||
|
|
||||||
|
location_data = get_current_location()
|
||||||
|
save_location_data(location_data, context)
|
||||||
|
print(f"Location data: {location_data} with context '{context}' saved locally.")
|
||||||
|
time.sleep(5)
|
57
sijapi/helpers/Pythonista/uploadGPS.py
Normal file
57
sijapi/helpers/Pythonista/uploadGPS.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
import requests
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
filename = 'location_log.json'
|
||||||
|
server = 'https://api.sij.ai'
|
||||||
|
|
||||||
|
def upload_location_data(data):
|
||||||
|
headers = {
|
||||||
|
'Authorization': 'Bearer sk-NhrtQwCHNdK5sRZC',
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.post(f'{server}/locate', json=data, headers=headers)
|
||||||
|
if response.status_code == 200:
|
||||||
|
print('Location and weather updated successfully.')
|
||||||
|
os.remove(filename)
|
||||||
|
else:
|
||||||
|
print(f'Failed to post data. Status code: {response.status_code}')
|
||||||
|
print(response.text)
|
||||||
|
except requests.RequestException as e:
|
||||||
|
print(f'Error posting data: {e}')
|
||||||
|
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
print('No data to upload.')
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
# Ensure all datetime fields are correctly named and add default context if missing
|
||||||
|
for location in data:
|
||||||
|
if 'date' in location:
|
||||||
|
location['datetime'] = location.pop('date')
|
||||||
|
# Ensure context dictionary exists with all required keys
|
||||||
|
if 'context' not in location:
|
||||||
|
location['context'] = {
|
||||||
|
'action': 'manual',
|
||||||
|
'device_type': 'Pythonista',
|
||||||
|
'device_model': None,
|
||||||
|
'device_name': None,
|
||||||
|
'device_os': None
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
context = location['context']
|
||||||
|
context.setdefault('action', 'manual')
|
||||||
|
context.setdefault('device_type', 'Pythonista')
|
||||||
|
context.setdefault('device_model', None)
|
||||||
|
context.setdefault('device_name', None)
|
||||||
|
context.setdefault('device_os', None)
|
||||||
|
upload_location_data(data)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f'File {filename} not found.')
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(f'Error decoding JSON from {filename}.')
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Unexpected error: {e}')
|
|
@ -1,3 +1,4 @@
|
||||||
|
# IMPORTANT: This is just here as a placeholder. It will not work. Export your own widget.shell file from Secure Shellfish to enable the alert and widget functionalities.
|
||||||
# Updates watch complications for Secure ShellFish
|
# Updates watch complications for Secure ShellFish
|
||||||
#
|
#
|
||||||
# This command sends encrypted data through push notifications such
|
# This command sends encrypted data through push notifications such
|
||||||
|
@ -32,9 +33,9 @@ widget --text "50/100"
|
||||||
# return 0
|
# return 0
|
||||||
# fi
|
# fi
|
||||||
|
|
||||||
# local key=d7e810e7601cd296a05776c169b4fe97a6a5ee1fd46abe38de54f415732b3f4b
|
# local key=SECRET
|
||||||
# local user=WuqPwm1VpGijF4U5AnIKzqNMVWGioANTRjJoonPm
|
# local user=SECRET
|
||||||
# local iv=ab5bbeb426015da7eedcee8bee3dffb7
|
# local iv=SECRET
|
||||||
|
|
||||||
# local plain=$(
|
# local plain=$(
|
||||||
# echo Secure ShellFish Widget 2.0
|
# echo Secure ShellFish Widget 2.0
|
Loading…
Add table
Reference in a new issue