diff --git a/sijapi/helpers/article.py b/sijapi/helpers/article.py
deleted file mode 100755
index c76a6e1..0000000
--- a/sijapi/helpers/article.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/Users/sij/miniforge3/envs/sijapi/bin/python
-import sys
-import asyncio
-from fastapi import BackgroundTasks
-from sijapi.routers.news import process_and_save_article
-
-async def main():
-    if len(sys.argv) != 2:
-        print("Usage: python script.py <article_url>")
-        sys.exit(1)
-
-    url = sys.argv[1]
-    bg_tasks = BackgroundTasks()
-
-    try:
-        result = await process_and_save_article(bg_tasks, url)
-        print(result)
-    except Exception as e:
-        print(f"Error processing article: {str(e)}")
-        sys.exit(1)
-
-if __name__ == "__main__":
-    asyncio.run(main())
diff --git a/sijapi/helpers/db/docker-compose.yaml b/sijapi/helpers/db/docker-compose.yaml
deleted file mode 100644
index de8fa37..0000000
--- a/sijapi/helpers/db/docker-compose.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-version: '3.8'
-
-services:
-  db:
-    image: postgis/postgis:16-3.4
-    container_name: sij_postgres
-    environment:
-      POSTGRES_DB: sij
-      POSTGRES_USER: sij
-      POSTGRES_PASSWORD: Synchr0!
-    volumes:
-      - postgres_data:/var/lib/postgresql/data
-      - ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
-    ports:
-      - "5432:5432"
-    networks:
-      - sij_network
-
-networks:
-  sij_network:
-    driver: bridge
-
-volumes:
-  postgres_data:
-
diff --git a/sijapi/helpers/db/init-db.sh b/sijapi/helpers/db/init-db.sh
deleted file mode 100755
index d354e40..0000000
--- a/sijapi/helpers/db/init-db.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-set -e
-
-psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
-    CREATE EXTENSION IF NOT EXISTS postgis;
-    CREATE EXTENSION IF NOT EXISTS postgis_topology;
-EOSQL
-
-# Modify pg_hba.conf to allow connections from Tailscale network
-echo "host all all 100.64.64.0/24 trust" >> /var/lib/postgresql/data/pg_hba.conf
-
diff --git a/sijapi/helpers/fromvm/CaPLSS.py b/sijapi/helpers/fromvm/CaPLSS.py
deleted file mode 100644
index 9d1457a..0000000
--- a/sijapi/helpers/fromvm/CaPLSS.py
+++ /dev/null
@@ -1,225 +0,0 @@
-#!/usr/bin/env python3
-
-import requests
-import json
-import time
-import os
-import subprocess
-import sys
-from requests.adapters import HTTPAdapter
-from urllib3.util.retry import Retry
-from datetime import datetime
-
-# Environment variables for database connection
-DB_NAME = os.getenv('DB_NAME', 'sij')
-DB_USER = os.getenv('DB_USER', 'sij')
-DB_PASSWORD = os.getenv('DB_PASSWORD', 'Synchr0!')
-DB_HOST = os.getenv('DB_HOST', 'localhost')
-DB_PORT = os.getenv('DB_PORT', '5432')
-
-def get_feature_count(url):
-    params = {
-        'where': '1=1',
-        'returnCountOnly': 'true',
-        'f': 'json'
-    }
-    retries = Retry(total=10, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
-    with requests.Session() as session:
-        session.mount("https://", HTTPAdapter(max_retries=retries))
-        response = session.get(url, params=params, timeout=30)
-        response.raise_for_status()
-        data = response.json()
-    return data.get('count', 0)
-
-def fetch_features(url, offset, num, max_retries=5):
-    params = {
-        'where': '1=1',
-        'outFields': '*',
-        'geometryPrecision': 6,
-        'outSR': 4326,
-        'f': 'json',
-        'resultOffset': offset,
-        'resultRecordCount': num
-    }
-    for attempt in range(max_retries):
-        try:
-            retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
-            with requests.Session() as session:
-                session.mount("https://", HTTPAdapter(max_retries=retries))
-                response = session.get(url, params=params, timeout=30)
-                response.raise_for_status()
-                return response.json()
-        except requests.exceptions.RequestException as e:
-            print(f"Error fetching features (attempt {attempt + 1}/{max_retries}): {e}")
-            if attempt == max_retries - 1:
-                raise
-            time.sleep(5 * (attempt + 1))  # Exponential backoff
-
-def download_layer(layer_num, layer_name):
-    base_dir = os.path.expanduser('~/data')
-    file_path = os.path.join(base_dir, f'PLSS_{layer_name}.geojson')
-    temp_file_path = os.path.join(base_dir, f'PLSS_{layer_name}_temp.json')
-
-    url = f"https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/{layer_num}/query"
-
-    total_count = get_feature_count(url)
-    print(f"Total {layer_name} features: {total_count}")
-
-    batch_size = 1000
-    offset = 0
-    all_features = []
-
-    # Check if temporary file exists and load its content
-    if os.path.exists(temp_file_path):
-        with open(temp_file_path, 'r') as f:
-            all_features = json.load(f)
-        offset = len(all_features)
-        print(f"Resuming download from offset {offset}")
-
-    try:
-        while offset < total_count:
-            print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
-            data = fetch_features(url, offset, batch_size)
-
-            new_features = data.get('features', [])
-            if not new_features:
-                break
-
-            all_features.extend(new_features)
-            offset += len(new_features)
-
-            # Progress indicator
-            progress = len(all_features) / total_count
-            bar_length = 30
-            filled_length = int(bar_length * progress)
-            bar = '=' * filled_length + '-' * (bar_length - filled_length)
-            print(f'\rProgress: [{bar}] {progress:.1%} ({len(all_features)}/{total_count} features)', end='', flush=True)
-
-            # Save progress to temporary file
-            with open(temp_file_path, 'w') as f:
-                json.dump(all_features, f)
-
-            time.sleep(1)
-
-        print(f"\nTotal {layer_name} features fetched: {len(all_features)}")
-
-        geojson_features = [
-            {
-                "type": "Feature",
-                "properties": feature['attributes'],
-                "geometry": feature['geometry']
-            } for feature in all_features
-        ]
-
-        full_geojson = {
-            "type": "FeatureCollection",
-            "features": geojson_features
-        }
-
-        os.makedirs(base_dir, exist_ok=True)
-
-        with open(file_path, 'w') as f:
-            json.dump(full_geojson, f)
-
-        print(f"GeoJSON file saved as '{file_path}'")
-
-        # Remove temporary file
-        if os.path.exists(temp_file_path):
-            os.remove(temp_file_path)
-
-        return file_path
-    except Exception as e:
-        print(f"\nError during download: {e}")
-        print(f"Partial data saved in {temp_file_path}")
-        return None
-
-
-def check_postgres_connection():
-    try:
-        subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME, '-c', 'SELECT 1;'],
-                       check=True, capture_output=True, text=True)
-        return True
-    except subprocess.CalledProcessError:
-        return False
-
-def check_postgis_extension():
-    try:
-        result = subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME,
-                                 '-c', "SELECT 1 FROM pg_extension WHERE extname = 'postgis';"],
-                                check=True, capture_output=True, text=True)
-        return '1' in result.stdout
-    except subprocess.CalledProcessError:
-        return False
-
-def create_postgis_extension():
-    try:
-        subprocess.run(['psql', '-h', DB_HOST, '-p', DB_PORT, '-U', DB_USER, '-d', DB_NAME,
-                        '-c', "CREATE EXTENSION IF NOT EXISTS postgis;"],
-                       check=True, capture_output=True, text=True)
-        print("PostGIS extension created successfully.")
-    except subprocess.CalledProcessError as e:
-        print(f"Error creating PostGIS extension: {e}")
-        sys.exit(1)
-
-def import_to_postgis(file_path, table_name):
-    if not check_postgres_connection():
-        print("Error: Unable to connect to PostgreSQL. Please check your connection settings.")
-        sys.exit(1)
-
-    if not check_postgis_extension():
-        print("PostGIS extension not found. Attempting to create it...")
-        create_postgis_extension()
-
-    ogr2ogr_command = [
-        'ogr2ogr',
-        '-f', 'PostgreSQL',
-        f'PG:dbname={DB_NAME} user={DB_USER} password={DB_PASSWORD} host={DB_HOST} port={DB_PORT}',
-        file_path,
-        '-nln', table_name,
-        '-overwrite'
-    ]
-
-    try:
-        subprocess.run(ogr2ogr_command, check=True, capture_output=True, text=True)
-        print(f"Data successfully imported into PostGIS table: {table_name}")
-    except subprocess.CalledProcessError as e:
-        print(f"Error importing data into PostGIS: {e}")
-        print(f"Command that failed: {e.cmd}")
-        print(f"Error output: {e.stderr}")
-
-def check_ogr2ogr():
-    try:
-        subprocess.run(['ogr2ogr', '--version'], check=True, capture_output=True, text=True)
-        return True
-    except subprocess.CalledProcessError:
-        return False
-    except FileNotFoundError:
-        return False
-
-def main():
-    if not check_ogr2ogr():
-        print("Error: ogr2ogr not found. Please install GDAL/OGR tools.")
-        print("On Debian: sudo apt-get install gdal-bin")
-        print("On macOS with Homebrew: brew install gdal")
-        sys.exit(1)
-
-    try:
-        township_file = os.path.expanduser('~/data/PLSS_Townships.geojson')
-        if not os.path.exists(township_file):
-            township_file = download_layer(1, "Townships")
-        if township_file:
-            import_to_postgis(township_file, "public.plss_townships")
-
-        section_file = os.path.expanduser('~/data/PLSS_Sections.geojson')
-        if not os.path.exists(section_file):
-            section_file = download_layer(2, "Sections")
-        if section_file:
-            import_to_postgis(section_file, "public.plss_sections")
-
-    except requests.exceptions.RequestException as e:
-        print(f"Error fetching data: {e}")
-    except Exception as e:
-        print(f"An unexpected error occurred: {e}")
-
-if __name__ == "__main__":
-    main()
diff --git a/sijapi/helpers/fromvm/CaPLSS_downloader_and_importer.py b/sijapi/helpers/fromvm/CaPLSS_downloader_and_importer.py
deleted file mode 100644
index 5b3ea00..0000000
--- a/sijapi/helpers/fromvm/CaPLSS_downloader_and_importer.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# CaPLSS_downloader_and_importer.py
-import requests
-import json
-import time
-import os
-import subprocess
-import requests
-from requests.adapters import HTTPAdapter
-from urllib3.util.retry import Retry
-
-def get_feature_count(url):
-    params = {
-        'where': '1=1',
-        'returnCountOnly': 'true',
-        'f': 'json'
-    }
-    retries = Retry(total=10, backoff_factor=0.5)
-    adapter = HTTPAdapter(max_retries=retries)
-    session = requests.Session()
-    session.mount("https://", adapter)
-
-    response = session.get(url, params=params, timeout=15)  # Add timeout parameter
-    response.raise_for_status()
-    data = response.json()
-    return data.get('count', 0)
-
-
-def fetch_features(url, offset, num):
-    params = {
-        'where': '1=1',
-        'outFields': '*',
-        'geometryPrecision': 6,
-        'outSR': 4326,
-        'f': 'json',
-        'resultOffset': offset,
-        'resultRecordCount': num
-    }
-    response = requests.get(url, params=params)
-    response.raise_for_status()
-    return response.json()
-
-def download_layer(layer_num, layer_name):
-    url = f"https://gis.blm.gov/arcgis/rest/services/Cadastral/BLM_Natl_PLSS_CadNSDI/MapServer/{layer_num}/query"
-    
-    total_count = get_feature_count(url)
-    print(f"Total {layer_name} features: {total_count}")
-
-    batch_size = 1000
-    offset = 0
-    all_features = []
-
-    while offset < total_count:
-        print(f"Fetching {layer_name} features {offset} to {offset + batch_size}...")
-        data = fetch_features(url, offset, batch_size)
-        
-        new_features = data.get('features', [])
-        if not new_features:
-            break
-
-        all_features.extend(new_features)
-        offset += len(new_features)
-
-        print(f"Progress: {len(all_features)}/{total_count} features")
-
-        time.sleep(1)  # Be nice to the server
-
-    print(f"Total {layer_name} features fetched: {len(all_features)}")
-
-    # Convert to GeoJSON
-    geojson_features = [
-        {
-            "type": "Feature",
-            "properties": feature['attributes'],
-            "geometry": feature['geometry']
-        } for feature in all_features
-    ]
-
-    full_geojson = {
-        "type": "FeatureCollection",
-        "features": geojson_features
-    }
-
-    # Define a base directory that exists on both macOS and Debian
-    base_dir = os.path.expanduser('~/data')
-    os.makedirs(base_dir, exist_ok=True)  # Create the directory if it doesn't exist
-    
-    # Use os.path.join to construct the file path
-    file_path = os.path.join(base_dir, f'PLSS_{layer_name}.geojson')
-    
-    # Save to file
-    with open(file_path, 'w') as f:
-        json.dump(full_geojson, f)
-    
-    print(f"GeoJSON file saved as '{file_path}'")
-    
-    return file_path
-
-def import_to_postgis(file_path, table_name):
-    db_name = 'sij'
-    db_user = 'sij'
-    db_password = 'Synchr0!'
-
-    ogr2ogr_command = [
-        'ogr2ogr',
-        '-f', 'PostgreSQL',
-        f'PG:dbname={db_name} user={db_user} password={db_password}',
-        file_path,
-        '-nln', table_name,
-        '-overwrite'
-    ]
-
-    subprocess.run(ogr2ogr_command, check=True)
-    print(f"Data successfully imported into PostGIS table: {table_name}")
-
-def main():
-    try:
-        # Download and import Townships (Layer 1)
-        township_file = download_layer(1, "Townships")
-        import_to_postgis(township_file, "public.plss_townships")
-
-        # Download and import Sections (Layer 2)
-        section_file = download_layer(2, "Sections")
-        import_to_postgis(section_file, "public.plss_sections")
-
-    except requests.exceptions.RequestException as e:
-        print(f"Error fetching data: {e}")
-    except subprocess.CalledProcessError as e:
-        print(f"Error importing data into PostGIS: {e}")
-    except Exception as e:
-        print(f"An unexpected error occurred: {e}")
-
-if __name__ == "__main__":
-    main()
diff --git a/sijapi/helpers/fromvm/CalFire_THP_scraper.py b/sijapi/helpers/fromvm/CalFire_THP_scraper.py
deleted file mode 100644
index 92909a6..0000000
--- a/sijapi/helpers/fromvm/CalFire_THP_scraper.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import requests
-import PyPDF2
-import io
-import re
-
-def scrape_data_from_pdf(url):
-    response = requests.get(url)
-    pdf_file = io.BytesIO(response.content)
-    
-    pdf_reader = PyPDF2.PdfReader(pdf_file)
-    
-    all_text = ""
-    for page in pdf_reader.pages:
-        all_text += page.extract_text() + "\n"
-    
-    return all_text
-
-def parse_data(raw_data):
-    lines = raw_data.split('\n')
-    data = []
-    current_entry = None
-    
-    for line in lines:
-        line = line.strip()
-        if re.match(r'\d+-\d+-\d+-\w+', line):
-            if current_entry:
-                data.append(current_entry)
-            current_entry = {'Harvest Document': line, 'Raw Data': []}
-        elif current_entry:
-            current_entry['Raw Data'].append(line)
-    
-    if current_entry:
-        data.append(current_entry)
-    
-    return data
-
-def filter_data(data):
-    return [entry for entry in data if any(owner.lower() in ' '.join(entry['Raw Data']).lower() for owner in ["Sierra Pacific", "SPI", "Land & Timber"])]
-
-def extract_location(raw_data):
-    location = []
-    for line in raw_data:
-        if 'MDBM:' in line or 'HBM:' in line:
-            location.append(line)
-    return ' '.join(location)
-
-def extract_plss_coordinates(text):
-    pattern = r'(\w+): T(\d+)([NSEW]) R(\d+)([NSEW]) S(\d+)'
-    return re.findall(pattern, text)
-
-# Main execution
-url = "https://caltreesplans.resources.ca.gov/Caltrees/Report/ShowReport.aspx?module=TH_Document&reportID=492&reportType=LINK_REPORT_LIST"
-raw_data = scrape_data_from_pdf(url)
-
-parsed_data = parse_data(raw_data)
-print(f"Total timber plans parsed: {len(parsed_data)}")
-
-filtered_data = filter_data(parsed_data)
-print(f"Found {len(filtered_data)} matching entries.")
-
-for plan in filtered_data:
-    print("\nHarvest Document:", plan['Harvest Document'])
-    
-    location = extract_location(plan['Raw Data'])
-    print("Location:", location)
-    
-    plss_coordinates = extract_plss_coordinates(location)
-    print("PLSS Coordinates:")
-    for coord in plss_coordinates:
-        meridian, township, township_dir, range_, range_dir, section = coord
-        print(f"  {meridian}: T{township}{township_dir} R{range_}{range_dir} S{section}")
-    
-    print("-" * 50)
diff --git a/sijapi/helpers/fromvm/article.py b/sijapi/helpers/fromvm/article.py
deleted file mode 100755
index c76a6e1..0000000
--- a/sijapi/helpers/fromvm/article.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/Users/sij/miniforge3/envs/sijapi/bin/python
-import sys
-import asyncio
-from fastapi import BackgroundTasks
-from sijapi.routers.news import process_and_save_article
-
-async def main():
-    if len(sys.argv) != 2:
-        print("Usage: python script.py <article_url>")
-        sys.exit(1)
-
-    url = sys.argv[1]
-    bg_tasks = BackgroundTasks()
-
-    try:
-        result = await process_and_save_article(bg_tasks, url)
-        print(result)
-    except Exception as e:
-        print(f"Error processing article: {str(e)}")
-        sys.exit(1)
-
-if __name__ == "__main__":
-    asyncio.run(main())
diff --git a/sijapi/helpers/fromvm/db/docker-compose.yaml b/sijapi/helpers/fromvm/db/docker-compose.yaml
deleted file mode 100644
index de8fa37..0000000
--- a/sijapi/helpers/fromvm/db/docker-compose.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-version: '3.8'
-
-services:
-  db:
-    image: postgis/postgis:16-3.4
-    container_name: sij_postgres
-    environment:
-      POSTGRES_DB: sij
-      POSTGRES_USER: sij
-      POSTGRES_PASSWORD: Synchr0!
-    volumes:
-      - postgres_data:/var/lib/postgresql/data
-      - ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
-    ports:
-      - "5432:5432"
-    networks:
-      - sij_network
-
-networks:
-  sij_network:
-    driver: bridge
-
-volumes:
-  postgres_data:
-
diff --git a/sijapi/helpers/fromvm/db/init-db.sh b/sijapi/helpers/fromvm/db/init-db.sh
deleted file mode 100755
index d354e40..0000000
--- a/sijapi/helpers/fromvm/db/init-db.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-set -e
-
-psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
-    CREATE EXTENSION IF NOT EXISTS postgis;
-    CREATE EXTENSION IF NOT EXISTS postgis_topology;
-EOSQL
-
-# Modify pg_hba.conf to allow connections from Tailscale network
-echo "host all all 100.64.64.0/24 trust" >> /var/lib/postgresql/data/pg_hba.conf
-
diff --git a/sijapi/helpers/fromvm/elevenlabs_history_ids_20240630_131617.json b/sijapi/helpers/fromvm/elevenlabs_history_ids_20240630_131617.json
deleted file mode 100644
index d10f7da..0000000
--- a/sijapi/helpers/fromvm/elevenlabs_history_ids_20240630_131617.json
+++ /dev/null
@@ -1,104 +0,0 @@
-{
-  "history_item_ids": [
-    "ncRYNd0Xef4LiUE74VjP",
-    "13pQLDAPYGIATwW1ySL5",
-    "dhsQNAYTWpcwo1X6rixf",
-    "V7wUip1NJuWAUw26sePF",
-    "mOYMa5lcI7wRHddIQTSa",
-    "mP97iOpA4oG7pwUBthq4",
-    "WTU5nsX6qZCYxLyoT5hq",
-    "15DPGnBgjr74KT3TMbK4",
-    "aCyBS1zoaweVjUoPf2TF",
-    "J8SUMQqZPtoy3Cgdhi3J",
-    "qKHaaJHfqh2je60Wmadb",
-    "2PaugQJ8c4rY44JGlaO5",
-    "TwzxcmYjo6XNebbMabcd",
-    "xdEK7rYq9UofOlkr565b",
-    "wik4jYd97aGMLgttTjC9",
-    "7oXn2yH7gdyhi6sEoWKd",
-    "jv8aZFiVe8gPMrAOBcNT",
-    "B2BctCDkCtLDxEMMBu9z",
-    "4KFO77NHDruNQvXIykwp",
-    "d033NizZaNZPc45fvxCO",
-    "yBKxOxfzsjpZYOFzoIM7",
-    "oEihKwMLWgvvoTLGx4yF",
-    "Q3guBm4hGml0KPAWKl7t",
-    "jaojY1gSafQmqshR48oT",
-    "yqGDMfcceaoceFEEurqa",
-    "oLdnyUp7plGrUMRVQ8Cf",
-    "FZAGCGosYEGMf8GCRFaA",
-    "TrWnXRdGkiH0K9kgwFiS",
-    "th16OEbg3u0XHslT9A33",
-    "856BAsn6dnzF7HeqGPfK",
-    "KjLoAfDXVBqR9s39T25j",
-    "uHQQJMMOfOxPAhEYQXLl",
-    "HO8WCIhkkI7AxwkU5MC6",
-    "9nxdesHWTRLCOd6YgWe9",
-    "tmx5tlIQ7hdSTgJt16P2",
-    "M9JN0YcBuCF6LhnqKN66",
-    "M9xkP4ecn0LIi7mQOfU6",
-    "CNtJgh52Ykh9ZqEppZeH",
-    "lgobcoiqmtWfbXkhEwbE",
-    "nr9jxnsE4DnwmTwCaHqC",
-    "Rnzo03tcyBqGPdmHemCb",
-    "X3YVGp7yf9GLgZ7WOuSU",
-    "wL3bkqxR9xqeFTvkJpSI",
-    "wNx3XDgFLTjVbMyGrIAO",
-    "rb0jj1ywBetmdvve5qIL",
-    "WdNnqvNswXeh6JFoaRSS",
-    "WT2ViyerKpodYmHDHhCw",
-    "OvhIRehXNwx7xMJHuTd7",
-    "EQb1iZtsADxJ0GxLJzEK",
-    "WXVfBJYoYGB7S61VyETD",
-    "q0q3Di1YJKF07dOhoa7E",
-    "a2XBIUPa68UiiKlzwFnG",
-    "YBuD7KsUpz8jxc5ItZcF",
-    "KdoucRVCVQGRVQ8Di9Ih",
-    "CkmDny98GEdfGuj2kaAx",
-    "R0R2p8luRZL7wwPtDilw",
-    "awvztgQnuaquK0dTpIuH",
-    "3ZPN0nJo8UQZYhFhoIOK",
-    "RJJeTkjYIgdv1ZoXXAax",
-    "ppxUNzWHAQafsM6OvEUE",
-    "f2VBm7yE7qmnjdS9CbYz",
-    "SZIMwz2T5ZAhTxTDBFol",
-    "YjC91PRgnQbAcdPhnWqU",
-    "fDTV7n8f6QK5yCwLkBwg",
-    "KbPpWUuiLPADj9H3OlvG",
-    "DIuqVoAg7lLxpvFBip84",
-    "pEwFAKMLGWUMHqfljJSq",
-    "9wwl7UbsgeKqrk8kNZin",
-    "2uLvjJgcZDiY9dqB8JlP",
-    "U5f1qZQM08t2YzJqEmxK",
-    "gnwn7QIhrCXRAGNddZ1H",
-    "g5nGEIHirFzKstdrGI1h",
-    "CQWH5dGSeS38VC4X4yg7",
-    "C5YGjhJPrTkVOpxIOHdj",
-    "YLbtnf1pSb9Ra7wgFHiF",
-    "qNLgNSvMr4VSoisKS9qj",
-    "Bq2ALvQVsj9L2wMpUvYO",
-    "gi0yTXLZLMhUKeKcalWc",
-    "3JQN9UbCsqj9ggi5sCkq",
-    "oPflJoA9kqBzjlmWY6zL",
-    "0kUZFgtZdqgdUBXFsXs9",
-    "aFTi7XdjR8W52ThmFpgc",
-    "pgIfjcy2UvKggfqJ1aNx",
-    "r0VguLaqnxTL9jza9H4y",
-    "444ehr4RtqgU1xjhhTLo",
-    "pEuzoznVDaQRBhIA9VTy",
-    "T9hdW9eJkEqDmOsSUoeY",
-    "wJjHbGzoWiKKOIGmf82T",
-    "kij4uMmkUlsSDu2zSH1k",
-    "oWt5rns196JsKIYPyrBS",
-    "SJ1m9mSOGOLIhkMgA8kq",
-    "kAaqe0ATrYtkifmZLOE5",
-    "O2Pvz7CP5rfyNvzFSDmy",
-    "w1rb8qN5nohVUovC0XAx",
-    "njFs4I4F7rtd9I6fEn6x",
-    "miFrp9GBm3MsHO03Z4eY",
-    "5DJywiPsfeVP9hFdqRhd",
-    "mUephoXhk5QdWrOfr9Xr",
-    "tDDiW3Yp0BptZ2wBv21A",
-    "YpX06liXWHquUVYFlKYa"
-  ]
-}
\ No newline at end of file
diff --git a/sijapi/helpers/fromvm/log_prior_emails.py b/sijapi/helpers/fromvm/log_prior_emails.py
deleted file mode 100644
index 10148cf..0000000
--- a/sijapi/helpers/fromvm/log_prior_emails.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import asyncio
-from pathlib import Path
-from sijapi import L, EMAIL_CONFIG, EMAIL_LOGS
-from sijapi.classes import EmailAccount
-from sijapi.routers import email
-
-async def initialize_log_files():
-    summarized_log = EMAIL_LOGS / "summarized.txt"
-    autoresponded_log = EMAIL_LOGS / "autoresponded.txt"
-    diagnostic_log = EMAIL_LOGS / "diagnostic.txt"
-    for log_file in [summarized_log, autoresponded_log, diagnostic_log]:
-        log_file.parent.mkdir(parents=True, exist_ok=True)
-        log_file.write_text("")
-    L.DEBUG(f"Log files initialized: {summarized_log}, {autoresponded_log}, {diagnostic_log}")
-    return summarized_log, autoresponded_log, diagnostic_log
-
-async def process_all_emails(account: EmailAccount, summarized_log: Path, autoresponded_log: Path, diagnostic_log: Path):
-    try:
-        with email.get_imap_connection(account) as inbox:
-            L.DEBUG(f"Connected to {account.name}, processing all emails...")
-            all_messages = inbox.messages()
-            unread_messages = set(uid for uid, _ in inbox.messages(unread=True))
-            
-            processed_count = 0
-            for identifier, message in all_messages:
-                # Log diagnostic information
-                with open(diagnostic_log, 'a') as f:
-                    f.write(f"Account: {account.name}, Raw Identifier: {identifier}, Type: {type(identifier)}\n")
-                
-                # Attempt to get a string representation of the identifier
-                if isinstance(identifier, bytes):
-                    id_str = identifier.decode()
-                elif isinstance(identifier, (int, str)):
-                    id_str = str(identifier)
-                else:
-                    id_str = repr(identifier)
-                
-                if identifier not in unread_messages:
-                    processed_count += 1
-                    for log_file in [summarized_log, autoresponded_log]:
-                        with open(log_file, 'a') as f:
-                            f.write(f"{id_str}\n")
-            
-            L.INFO(f"Processed {processed_count} non-unread emails for account {account.name}")
-    except Exception as e:
-        L.logger.error(f"An error occurred while processing emails for account {account.name}: {e}")
-
-async def main():
-    email_accounts = email.load_email_accounts(EMAIL_CONFIG)
-    summarized_log, autoresponded_log, diagnostic_log = await initialize_log_files()
-
-    L.DEBUG(f"Processing {len(email_accounts)} email accounts")
-
-    tasks = [process_all_emails(account, summarized_log, autoresponded_log, diagnostic_log) for account in email_accounts]
-    await asyncio.gather(*tasks)
-
-    # Final verification
-    with open(summarized_log, 'r') as f:
-        final_count = len(f.readlines())
-    L.INFO(f"Final non-unread email count: {final_count}")
-
-if __name__ == "__main__":
-    asyncio.run(main())
\ No newline at end of file
diff --git a/sijapi/helpers/fromvm/migrate_db_to_uuid.py b/sijapi/helpers/fromvm/migrate_db_to_uuid.py
deleted file mode 100644
index 60034fb..0000000
--- a/sijapi/helpers/fromvm/migrate_db_to_uuid.py
+++ /dev/null
@@ -1,191 +0,0 @@
-import psycopg2
-from psycopg2 import sql
-import sys
-
-def connect_to_db():
-	return psycopg2.connect(
-		dbname='sij',
-		user='sij',
-		password='Synchr0!',
-		host='localhost'
-	)
-
-def get_tables(cur):
-	cur.execute("""
-		SELECT table_name 
-		FROM information_schema.tables 
-		WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
-		AND table_name NOT LIKE '%_uuid' AND table_name NOT LIKE '%_orig'
-		AND table_name != 'spatial_ref_sys'
-	""")
-	return [row[0] for row in cur.fetchall()]
-
-def get_columns(cur, table_name):
-	cur.execute("""
-		SELECT column_name, udt_name, 
-			   is_nullable, column_default,
-			   character_maximum_length, numeric_precision, numeric_scale
-		FROM information_schema.columns 
-		WHERE table_name = %s
-		ORDER BY ordinal_position
-	""", (table_name,))
-	return cur.fetchall()
-
-def get_constraints(cur, table_name):
-	cur.execute("""
-		SELECT conname, contype, pg_get_constraintdef(c.oid)
-		FROM pg_constraint c
-		JOIN pg_namespace n ON n.oid = c.connamespace
-		WHERE conrelid = %s::regclass
-		AND n.nspname = 'public'
-	""", (table_name,))
-	return cur.fetchall()
-
-def drop_table_if_exists(cur, table_name):
-	cur.execute(sql.SQL("DROP TABLE IF EXISTS {} CASCADE").format(sql.Identifier(table_name)))
-
-def create_uuid_table(cur, old_table, new_table):
-	drop_table_if_exists(cur, new_table)
-	columns = get_columns(cur, old_table)
-	constraints = get_constraints(cur, old_table)
-	
-	column_defs = []
-	has_id_column = any(col[0] == 'id' for col in columns)
-	
-	for col in columns:
-		col_name, udt_name, is_nullable, default, max_length, precision, scale = col
-		if col_name == 'id' and has_id_column:
-			column_defs.append(sql.SQL("{} UUID PRIMARY KEY DEFAULT gen_random_uuid()").format(sql.Identifier(col_name)))
-		else:
-			type_sql = sql.SQL("{}").format(sql.Identifier(udt_name))
-			if max_length:
-				type_sql = sql.SQL("{}({})").format(type_sql, sql.Literal(max_length))
-			elif precision and scale:
-				type_sql = sql.SQL("{}({},{})").format(type_sql, sql.Literal(precision), sql.Literal(scale))
-			
-			column_def = sql.SQL("{} {}").format(sql.Identifier(col_name), type_sql)
-			if is_nullable == 'NO':
-				column_def = sql.SQL("{} NOT NULL").format(column_def)
-			if default and 'nextval' not in default:  # Skip auto-increment defaults
-				column_def = sql.SQL("{} DEFAULT {}").format(column_def, sql.SQL(default))
-			column_defs.append(column_def)
-
-	constraint_defs = []
-	for constraint in constraints:
-		conname, contype, condef = constraint
-		if contype != 'p' or not has_id_column:  # Keep primary key if there's no id column
-			constraint_defs.append(sql.SQL(condef))
-
-	if not has_id_column:
-		column_defs.append(sql.SQL("uuid UUID DEFAULT gen_random_uuid()"))
-
-	query = sql.SQL("CREATE TABLE {} ({})").format(
-		sql.Identifier(new_table),
-		sql.SQL(", ").join(column_defs + constraint_defs)
-	)
-	cur.execute(query)
-
-def migrate_data(cur, old_table, new_table):
-	columns = get_columns(cur, old_table)
-	column_names = [col[0] for col in columns]
-	has_id_column = 'id' in column_names
-	
-	if has_id_column:
-		column_names.remove('id')
-		old_cols = sql.SQL(", ").join(map(sql.Identifier, column_names))
-		new_cols = sql.SQL(", ").join(map(sql.Identifier, ['id'] + column_names))
-		query = sql.SQL("INSERT INTO {} ({}) SELECT gen_random_uuid(), {} FROM {}").format(
-			sql.Identifier(new_table),
-			new_cols,
-			old_cols,
-			sql.Identifier(old_table)
-		)
-	else:
-		old_cols = sql.SQL(", ").join(map(sql.Identifier, column_names))
-		new_cols = sql.SQL(", ").join(map(sql.Identifier, column_names + ['uuid']))
-		query = sql.SQL("INSERT INTO {} ({}) SELECT {}, gen_random_uuid() FROM {}").format(
-			sql.Identifier(new_table),
-			new_cols,
-			old_cols,
-			sql.Identifier(old_table)
-		)
-	cur.execute(query)
-
-def update_foreign_keys(cur, tables):
-	for table in tables:
-		constraints = get_constraints(cur, table)
-		for constraint in constraints:
-			conname, contype, condef = constraint
-			if contype == 'f':  # Foreign key constraint
-				referenced_table = condef.split('REFERENCES ')[1].split('(')[0].strip()
-				referenced_column = condef.split('(')[2].split(')')[0].strip()
-				local_column = condef.split('(')[1].split(')')[0].strip()
-				
-				cur.execute(sql.SQL("""
-					UPDATE {table_uuid}
-					SET {local_column} = subquery.new_id::text::{local_column_type}
-					FROM (
-						SELECT old.{ref_column} AS old_id, new_table.id AS new_id
-						FROM {ref_table} old
-						JOIN public.{ref_table_uuid} new_table ON new_table.{ref_column}::text = old.{ref_column}::text
-					) AS subquery
-					WHERE {local_column}::text = subquery.old_id::text
-				""").format(
-					table_uuid=sql.Identifier(f"{table}_uuid"),
-					local_column=sql.Identifier(local_column),
-					local_column_type=sql.SQL(get_column_type(cur, f"{table}_uuid", local_column)),
-					ref_column=sql.Identifier(referenced_column),
-					ref_table=sql.Identifier(referenced_table),
-					ref_table_uuid=sql.Identifier(f"{referenced_table}_uuid")
-				))
-
-def get_column_type(cur, table_name, column_name):
-	cur.execute("""
-		SELECT data_type 
-		FROM information_schema.columns 
-		WHERE table_name = %s AND column_name = %s
-	""", (table_name, column_name))
-	return cur.fetchone()[0]
-
-def rename_tables(cur, tables):
-	for table in tables:
-		drop_table_if_exists(cur, f"{table}_orig")
-		cur.execute(sql.SQL("ALTER TABLE IF EXISTS {} RENAME TO {}").format(
-			sql.Identifier(table), sql.Identifier(f"{table}_orig")
-		))
-		cur.execute(sql.SQL("ALTER TABLE IF EXISTS {} RENAME TO {}").format(
-			sql.Identifier(f"{table}_uuid"), sql.Identifier(table)
-		))
-
-def main():
-	try:
-		with connect_to_db() as conn:
-			with conn.cursor() as cur:
-				tables = get_tables(cur)
-				
-				# Create new UUID tables
-				for table in tables:
-					print(f"Creating UUID table for {table}...")
-					create_uuid_table(cur, table, f"{table}_uuid")
-				
-				# Migrate data
-				for table in tables:
-					print(f"Migrating data for {table}...")
-					migrate_data(cur, table, f"{table}_uuid")
-				
-				# Update foreign keys
-				print("Updating foreign key references...")
-				update_foreign_keys(cur, tables)
-				
-				# Rename tables
-				print("Renaming tables...")
-				rename_tables(cur, tables)
-				
-			conn.commit()
-		print("Migration completed successfully.")
-	except Exception as e:
-		print(f"An error occurred: {e}")
-		conn.rollback()
-
-if __name__ == "__main__":
-	main()
diff --git a/sijapi/helpers/fromvm/schema_info.yaml b/sijapi/helpers/fromvm/schema_info.yaml
deleted file mode 100644
index 4899924..0000000
--- a/sijapi/helpers/fromvm/schema_info.yaml
+++ /dev/null
@@ -1,1103 +0,0 @@
-click_logs:
-  clicked_at:
-    data_type: timestamp without time zone
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: uuid
-    is_nullable: 'NO'
-    max_length: null
-  ip_address:
-    data_type: text
-    is_nullable: 'YES'
-    max_length: null
-  short_code:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 3
-  user_agent:
-    data_type: text
-    is_nullable: 'YES'
-    max_length: null
-dailyweather:
-  cloudcover:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  conditions:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  datetime:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  datetimeepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  description:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  dew:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslike:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslikemax:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslikemin:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  humidity:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  icon:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  last_updated:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  location:
-    data_type: USER-DEFINED
-    is_nullable: 'YES'
-    max_length: null
-  moonphase:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precip:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precipcover:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precipprob:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  preciptype:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  pressure:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  severerisk:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snow:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snowdepth:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarenergy:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarradiation:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  source:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  stations:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  sunrise:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  sunriseepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  sunset:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  sunsetepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  temp:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  tempmax:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  tempmin:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  uvindex:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  visibility:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  winddir:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  windgust:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  windspeed:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-dailyweather_uuid:
-  cloudcover:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  conditions:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  datetime:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  datetimeepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  description:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  dew:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslike:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslikemax:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslikemin:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  humidity:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  icon:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: uuid
-    is_nullable: 'NO'
-    max_length: null
-  last_updated:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  location:
-    data_type: USER-DEFINED
-    is_nullable: 'YES'
-    max_length: null
-  moonphase:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precip:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precipcover:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precipprob:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  preciptype:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  pressure:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  severerisk:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snow:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snowdepth:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarenergy:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarradiation:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  source:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  stations:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  sunrise:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  sunriseepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  sunset:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  sunsetepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  temp:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  tempmax:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  tempmin:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  uvindex:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  visibility:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  winddir:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  windgust:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  windspeed:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-geography_columns:
-  coord_dimension:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  f_geography_column:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  f_table_catalog:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  f_table_name:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  f_table_schema:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  srid:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  type:
-    data_type: text
-    is_nullable: 'YES'
-    max_length: null
-geometry_columns:
-  coord_dimension:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  f_geometry_column:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  f_table_catalog:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 256
-  f_table_name:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  f_table_schema:
-    data_type: name
-    is_nullable: 'YES'
-    max_length: null
-  srid:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  type:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 30
-hourlyweather:
-  cloudcover:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  conditions:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  daily_weather_id:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  datetime:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  datetimeepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  dew:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslike:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  humidity:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  icon:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  precip:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precipprob:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  preciptype:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  pressure:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  severerisk:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snow:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snowdepth:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarenergy:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarradiation:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  source:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  stations:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  temp:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  uvindex:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  visibility:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  winddir:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  windgust:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  windspeed:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-hourlyweather_uuid:
-  cloudcover:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  conditions:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  daily_weather_id:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  datetime:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  datetimeepoch:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  dew:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  feelslike:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  humidity:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  icon:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: uuid
-    is_nullable: 'NO'
-    max_length: null
-  precip:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  precipprob:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  preciptype:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  pressure:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  severerisk:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snow:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  snowdepth:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarenergy:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  solarradiation:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  source:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  stations:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  temp:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  uvindex:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  visibility:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  winddir:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  windgust:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  windspeed:
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-locations:
-  action:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  amenity:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  boundingbox:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  city:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-  class:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  class_:
-    data_type: text
-    is_nullable: 'YES'
-    max_length: null
-  context:
-    data_type: jsonb
-    is_nullable: 'YES'
-    max_length: null
-  country:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  country_code:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  county:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  daily_weather_id:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  datetime:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  device_model:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  device_name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  device_os:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  device_type:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  display_name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  house_number:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  location:
-    data_type: USER-DEFINED
-    is_nullable: 'YES'
-    max_length: null
-  name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  neighbourhood:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  quarter:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  road:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  state:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-  street:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-  suburb:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  type:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  zip:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-locations_uuid:
-  action:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  amenity:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  boundingbox:
-    data_type: ARRAY
-    is_nullable: 'YES'
-    max_length: null
-  city:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-  class:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  class_:
-    data_type: text
-    is_nullable: 'YES'
-    max_length: null
-  context:
-    data_type: jsonb
-    is_nullable: 'YES'
-    max_length: null
-  country:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  country_code:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  county:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  daily_weather_id:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  datetime:
-    data_type: timestamp with time zone
-    is_nullable: 'YES'
-    max_length: null
-  device_model:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  device_name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  device_os:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  device_type:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  display_name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  house_number:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: uuid
-    is_nullable: 'NO'
-    max_length: null
-  location:
-    data_type: USER-DEFINED
-    is_nullable: 'YES'
-    max_length: null
-  name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  neighbourhood:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  quarter:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  road:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  state:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-  street:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-  suburb:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  type:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  zip:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 255
-plss_sections:
-  frstdivdup:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  frstdivid:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  frstdivlab:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  frstdivno:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  frstdivtxt:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  frstdivtyp:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  objectid:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  ogc_fid:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  plssid:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  shape.starea():
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  shape.stlength():
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  sourcedate:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  sourceref:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  survtyp:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  survtyptxt:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  wkb_geometry:
-    data_type: USER-DEFINED
-    is_nullable: 'YES'
-    max_length: null
-plss_townships:
-  created_date:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  created_user:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  globalid:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  last_edited_date:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  last_edited_user:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  objectid:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  ogc_fid:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  origid:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  partcount:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  plssid:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  prinmer:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  prinmercd:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  rangedir:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  rangefrac:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  rangeno:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  secsrvname:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  shape.starea():
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  shape.stlength():
-    data_type: double precision
-    is_nullable: 'YES'
-    max_length: null
-  sourcedate:
-    data_type: bigint
-    is_nullable: 'YES'
-    max_length: null
-  sourceref:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  srvname:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  stateabbr:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  steward:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  survtyp:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  survtyptxt:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  twnshpdir:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  twnshpdpcd:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  twnshpfrac:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  twnshplab:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  twnshpno:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: null
-  wkb_geometry:
-    data_type: USER-DEFINED
-    is_nullable: 'YES'
-    max_length: null
-relations:
-  id:
-    data_type: bigint
-    is_nullable: 'NO'
-    max_length: null
-  members:
-    data_type: jsonb
-    is_nullable: 'YES'
-    max_length: null
-  tags:
-    data_type: jsonb
-    is_nullable: 'YES'
-    max_length: null
-short_urls:
-  created_at:
-    data_type: timestamp without time zone
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  long_url:
-    data_type: text
-    is_nullable: 'NO'
-    max_length: null
-  short_code:
-    data_type: character varying
-    is_nullable: 'NO'
-    max_length: 3
-short_urls_uuid:
-  created_at:
-    data_type: timestamp without time zone
-    is_nullable: 'YES'
-    max_length: null
-  id:
-    data_type: uuid
-    is_nullable: 'NO'
-    max_length: null
-  long_url:
-    data_type: text
-    is_nullable: 'NO'
-    max_length: null
-  short_code:
-    data_type: character varying
-    is_nullable: 'NO'
-    max_length: 3
-spatial_ref_sys:
-  auth_name:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 256
-  auth_srid:
-    data_type: integer
-    is_nullable: 'YES'
-    max_length: null
-  proj4text:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 2048
-  srid:
-    data_type: integer
-    is_nullable: 'NO'
-    max_length: null
-  srtext:
-    data_type: character varying
-    is_nullable: 'YES'
-    max_length: 2048
diff --git a/sijapi/helpers/fromvm/start.py b/sijapi/helpers/fromvm/start.py
deleted file mode 100644
index 031e4fb..0000000
--- a/sijapi/helpers/fromvm/start.py
+++ /dev/null
@@ -1,211 +0,0 @@
-import yaml
-import requests
-import paramiko
-import time
-from pathlib import Path
-import logging
-import subprocess
-import os
-import argparse
-import sys
-
-logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
-
-def load_config():
-    config_path = Path(__file__).parent.parent / 'config' / 'sys.yaml'
-    with open(config_path, 'r') as file:
-        return yaml.safe_load(file)
-
-def load_env():
-    env_path = Path(__file__).parent.parent / 'config' / '.env'
-    if env_path.exists():
-        with open(env_path, 'r') as file:
-            for line in file:
-                line = line.strip()
-                if line and not line.startswith('#'):
-                    try:
-                        key, value = line.split('=', 1)
-                        os.environ[key.strip()] = value.strip()
-                    except ValueError:
-                        logging.warning(f"Skipping invalid line in .env file: {line}")
-
-def check_server(ip, port, ts_id):
-    address = f"http://{ip}:{port}/id"
-    try:
-        response = requests.get(address, timeout=5)
-        response_text = response.text.strip().strip('"')
-        return response.status_code == 200 and response_text == ts_id
-    except requests.RequestException as e:
-        logging.error(f"Error checking server {ts_id}: {str(e)}")
-        return False
-
-def execute_ssh_command(ssh, command):
-    stdin, stdout, stderr = ssh.exec_command(command)
-    exit_status = stdout.channel.recv_exit_status()
-    output = stdout.read().decode().strip()
-    error = stderr.read().decode().strip()
-    return exit_status, output, error
-
-def is_local_tmux_session_running(session_name):
-    try:
-        result = subprocess.run(['tmux', 'has-session', '-t', session_name], capture_output=True, text=True)
-        return result.returncode == 0
-    except subprocess.CalledProcessError:
-        return False
-
-def start_local_server(server, pull=False, push=False):
-    try:
-        if is_local_tmux_session_running('sijapi'):
-            logging.info("Local sijapi tmux session is already running.")
-            return
-
-        git_command = ""
-        if pull:
-            git_command = "git pull &&"
-        elif push:
-            git_command = "git add -A . && git commit -m \"auto-update\" && git push origin --force &&"
-
-        command = f"{server['tmux']} new-session -d -s sijapi 'cd {server['path']} && {git_command} {server['conda_env']}/bin/python -m sijapi'"
-        logging.info(f"Executing local command: {command}")
-        result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
-        logging.info(f"Successfully started sijapi session on local machine")
-        logging.debug(f"Command output: {result.stdout}")
-    except subprocess.CalledProcessError as e:
-        logging.error(f"Failed to start sijapi session on local machine. Error: {e}")
-        logging.error(f"Error output: {e.stderr}")
-
-def start_remote_server(server, pull=False, push=False):
-    ssh = paramiko.SSHClient()
-    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-
-    try:
-        ssh.connect(
-            server['ts_ip'],
-            port=server['ssh_port'],
-            username=server['ssh_user'],
-            password=server['ssh_pass'],
-            timeout=10
-        )
-
-        status, output, error = execute_ssh_command(ssh, f"{server['tmux']} has-session -t sijapi 2>/dev/null && echo 'exists' || echo 'not exists'")
-        if output == 'exists':
-            logging.info(f"sijapi session already exists on {server['ts_id']}")
-            return
-
-        git_command = ""
-        if pull:
-            git_command = "git pull &&"
-        elif push:
-            git_command = "git add -A . && git commit -m \"auto-update\" && git push origin --force &&"
-
-        command = f"{server['tmux']} new-session -d -s sijapi 'cd {server['path']} && {git_command} {server['conda_env']}/bin/python -m sijapi'"
-        status, output, error = execute_ssh_command(ssh, command)
-
-        if status == 0:
-            logging.info(f"Successfully started sijapi session on {server['ts_id']}")
-        else:
-            logging.error(f"Failed to start sijapi session on {server['ts_id']}. Error: {error}")
-
-    except paramiko.SSHException as e:
-        logging.error(f"Failed to connect to {server['ts_id']}: {str(e)}")
-    finally:
-        ssh.close()
-
-def kill_local_server():
-    try:
-        if is_local_tmux_session_running('sijapi'):
-            subprocess.run(['tmux', 'kill-session', '-t', 'sijapi'], check=True)
-            logging.info("Killed local sijapi tmux session.")
-        else:
-            logging.info("No local sijapi tmux session to kill.")
-    except subprocess.CalledProcessError as e:
-        logging.error(f"Failed to kill local sijapi tmux session. Error: {e}")
-
-def kill_remote_server(server):
-    ssh = paramiko.SSHClient()
-    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-
-    try:
-        ssh.connect(
-            server['ts_ip'],
-            port=server['ssh_port'],
-            username=server['ssh_user'],
-            password=server['ssh_pass'],
-            timeout=10
-        )
-
-        command = f"{server['tmux']} kill-session -t sijapi"
-        status, output, error = execute_ssh_command(ssh, command)
-
-        if status == 0:
-            logging.info(f"Successfully killed sijapi session on {server['ts_id']}")
-        else:
-            logging.error(f"Failed to kill sijapi session on {server['ts_id']}. Error: {error}")
-
-    except paramiko.SSHException as e:
-        logging.error(f"Failed to connect to {server['ts_id']}: {str(e)}")
-    finally:
-        ssh.close()
-
-def main():
-    load_env()
-    config = load_config()
-    pool = config['POOL']
-    local_ts_id = os.environ.get('TS_ID')
-
-    parser = argparse.ArgumentParser(description='Manage sijapi servers')
-    parser.add_argument('--kill', action='store_true', help='Kill the local sijapi tmux session')
-    parser.add_argument('--restart', action='store_true', help='Restart the local sijapi tmux session')
-    parser.add_argument('--all', action='store_true', help='Apply the action to all servers')
-    parser.add_argument('--pull', action='store_true', help='Pull latest changes before starting the server')
-    parser.add_argument('--push', action='store_true', help='Push changes before starting the server')
-
-    args = parser.parse_args()
-
-    if args.kill:
-        if args.all:
-            for server in pool:
-                if server['ts_id'] == local_ts_id:
-                    kill_local_server()
-                else:
-                    kill_remote_server(server)
-        else:
-            kill_local_server()
-        sys.exit(0)
-
-    if args.restart or args.pull or args.push:
-        if args.all:
-            for server in pool:
-                if server['ts_id'] == local_ts_id:
-                    kill_local_server()
-                    start_local_server(server, pull=args.pull, push=args.push)
-                else:
-                    kill_remote_server(server)
-                    start_remote_server(server, pull=args.pull, push=args.push)
-        else:
-            kill_local_server()
-            local_server = next(server for server in pool if server['ts_id'] == local_ts_id)
-            start_local_server(local_server, pull=args.pull, push=args.push)
-        sys.exit(0)
-
-    # If no specific arguments, run the default behavior
-    local_server = next(server for server in pool if server['ts_id'] == local_ts_id)
-    if not check_server(local_server['ts_ip'], local_server['app_port'], local_server['ts_id']):
-        logging.info(f"Local server {local_server['ts_id']} is not responding correctly. Attempting to start...")
-        kill_local_server()
-        start_local_server(local_server, push=True)
-
-    for server in pool:
-        if server['ts_id'] != local_ts_id:
-            if not check_server(server['ts_ip'], server['app_port'], server['ts_id']):
-                logging.info(f"{server['ts_id']} is not responding correctly. Attempting to start...")
-                kill_remote_server(server)
-                start_remote_server(server, pull=True)
-            else:
-                logging.info(f"{server['ts_id']} is running and responding correctly.")
-
-        time.sleep(1)
-
-if __name__ == "__main__":
-    main()
-
diff --git a/sijapi/helpers/fromvm/upscaler.py b/sijapi/helpers/fromvm/upscaler.py
deleted file mode 100644
index 4e7ea5d..0000000
--- a/sijapi/helpers/fromvm/upscaler.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from aura_sr import AuraSR
-from PIL import Image
-import torch
-import os
-
-# Set environment variables for MPS
-os.environ['PYTORCH_MPS_HIGH_WATERMARK_RATIO'] = '0.0'
-os.environ['PYTORCH_ENABLE_MPS_FALLBACK'] = '1'
-
-# Initialize device as CPU for default
-device = torch.device('cpu')
-
-# Check if MPS is available
-if torch.backends.mps.is_available():
-    if not torch.backends.mps.is_built():
-        print("MPS not available because the current PyTorch install was not built with MPS enabled.")
-    else:
-        device = torch.device('mps:0')
-
-# Overwrite the default CUDA device with MPS
-torch.cuda.default_stream = device
-
-aura_sr = AuraSR.from_pretrained("fal-ai/AuraSR").to(device)
-
-def load_image_from_path(file_path):
-    return Image.open(file_path)
-
-def upscale_and_save(original_path):
-    original_image = load_image_from_path(original_path)
-    upscaled_image = aura_sr.upscale_4x(original_image)
-    upscaled_image.save(original_path)
-
-# Insert your image path
-upscale_and_save("/Users/sij/workshop/sijapi/sijapi/testbed/API__00482_ 2.png")
diff --git a/sijapi/helpers/start.py b/sijapi/helpers/start.py
deleted file mode 100644
index 031e4fb..0000000
--- a/sijapi/helpers/start.py
+++ /dev/null
@@ -1,211 +0,0 @@
-import yaml
-import requests
-import paramiko
-import time
-from pathlib import Path
-import logging
-import subprocess
-import os
-import argparse
-import sys
-
-logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
-
-def load_config():
-    config_path = Path(__file__).parent.parent / 'config' / 'sys.yaml'
-    with open(config_path, 'r') as file:
-        return yaml.safe_load(file)
-
-def load_env():
-    env_path = Path(__file__).parent.parent / 'config' / '.env'
-    if env_path.exists():
-        with open(env_path, 'r') as file:
-            for line in file:
-                line = line.strip()
-                if line and not line.startswith('#'):
-                    try:
-                        key, value = line.split('=', 1)
-                        os.environ[key.strip()] = value.strip()
-                    except ValueError:
-                        logging.warning(f"Skipping invalid line in .env file: {line}")
-
-def check_server(ip, port, ts_id):
-    address = f"http://{ip}:{port}/id"
-    try:
-        response = requests.get(address, timeout=5)
-        response_text = response.text.strip().strip('"')
-        return response.status_code == 200 and response_text == ts_id
-    except requests.RequestException as e:
-        logging.error(f"Error checking server {ts_id}: {str(e)}")
-        return False
-
-def execute_ssh_command(ssh, command):
-    stdin, stdout, stderr = ssh.exec_command(command)
-    exit_status = stdout.channel.recv_exit_status()
-    output = stdout.read().decode().strip()
-    error = stderr.read().decode().strip()
-    return exit_status, output, error
-
-def is_local_tmux_session_running(session_name):
-    try:
-        result = subprocess.run(['tmux', 'has-session', '-t', session_name], capture_output=True, text=True)
-        return result.returncode == 0
-    except subprocess.CalledProcessError:
-        return False
-
-def start_local_server(server, pull=False, push=False):
-    try:
-        if is_local_tmux_session_running('sijapi'):
-            logging.info("Local sijapi tmux session is already running.")
-            return
-
-        git_command = ""
-        if pull:
-            git_command = "git pull &&"
-        elif push:
-            git_command = "git add -A . && git commit -m \"auto-update\" && git push origin --force &&"
-
-        command = f"{server['tmux']} new-session -d -s sijapi 'cd {server['path']} && {git_command} {server['conda_env']}/bin/python -m sijapi'"
-        logging.info(f"Executing local command: {command}")
-        result = subprocess.run(command, shell=True, check=True, capture_output=True, text=True)
-        logging.info(f"Successfully started sijapi session on local machine")
-        logging.debug(f"Command output: {result.stdout}")
-    except subprocess.CalledProcessError as e:
-        logging.error(f"Failed to start sijapi session on local machine. Error: {e}")
-        logging.error(f"Error output: {e.stderr}")
-
-def start_remote_server(server, pull=False, push=False):
-    ssh = paramiko.SSHClient()
-    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-
-    try:
-        ssh.connect(
-            server['ts_ip'],
-            port=server['ssh_port'],
-            username=server['ssh_user'],
-            password=server['ssh_pass'],
-            timeout=10
-        )
-
-        status, output, error = execute_ssh_command(ssh, f"{server['tmux']} has-session -t sijapi 2>/dev/null && echo 'exists' || echo 'not exists'")
-        if output == 'exists':
-            logging.info(f"sijapi session already exists on {server['ts_id']}")
-            return
-
-        git_command = ""
-        if pull:
-            git_command = "git pull &&"
-        elif push:
-            git_command = "git add -A . && git commit -m \"auto-update\" && git push origin --force &&"
-
-        command = f"{server['tmux']} new-session -d -s sijapi 'cd {server['path']} && {git_command} {server['conda_env']}/bin/python -m sijapi'"
-        status, output, error = execute_ssh_command(ssh, command)
-
-        if status == 0:
-            logging.info(f"Successfully started sijapi session on {server['ts_id']}")
-        else:
-            logging.error(f"Failed to start sijapi session on {server['ts_id']}. Error: {error}")
-
-    except paramiko.SSHException as e:
-        logging.error(f"Failed to connect to {server['ts_id']}: {str(e)}")
-    finally:
-        ssh.close()
-
-def kill_local_server():
-    try:
-        if is_local_tmux_session_running('sijapi'):
-            subprocess.run(['tmux', 'kill-session', '-t', 'sijapi'], check=True)
-            logging.info("Killed local sijapi tmux session.")
-        else:
-            logging.info("No local sijapi tmux session to kill.")
-    except subprocess.CalledProcessError as e:
-        logging.error(f"Failed to kill local sijapi tmux session. Error: {e}")
-
-def kill_remote_server(server):
-    ssh = paramiko.SSHClient()
-    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-
-    try:
-        ssh.connect(
-            server['ts_ip'],
-            port=server['ssh_port'],
-            username=server['ssh_user'],
-            password=server['ssh_pass'],
-            timeout=10
-        )
-
-        command = f"{server['tmux']} kill-session -t sijapi"
-        status, output, error = execute_ssh_command(ssh, command)
-
-        if status == 0:
-            logging.info(f"Successfully killed sijapi session on {server['ts_id']}")
-        else:
-            logging.error(f"Failed to kill sijapi session on {server['ts_id']}. Error: {error}")
-
-    except paramiko.SSHException as e:
-        logging.error(f"Failed to connect to {server['ts_id']}: {str(e)}")
-    finally:
-        ssh.close()
-
-def main():
-    load_env()
-    config = load_config()
-    pool = config['POOL']
-    local_ts_id = os.environ.get('TS_ID')
-
-    parser = argparse.ArgumentParser(description='Manage sijapi servers')
-    parser.add_argument('--kill', action='store_true', help='Kill the local sijapi tmux session')
-    parser.add_argument('--restart', action='store_true', help='Restart the local sijapi tmux session')
-    parser.add_argument('--all', action='store_true', help='Apply the action to all servers')
-    parser.add_argument('--pull', action='store_true', help='Pull latest changes before starting the server')
-    parser.add_argument('--push', action='store_true', help='Push changes before starting the server')
-
-    args = parser.parse_args()
-
-    if args.kill:
-        if args.all:
-            for server in pool:
-                if server['ts_id'] == local_ts_id:
-                    kill_local_server()
-                else:
-                    kill_remote_server(server)
-        else:
-            kill_local_server()
-        sys.exit(0)
-
-    if args.restart or args.pull or args.push:
-        if args.all:
-            for server in pool:
-                if server['ts_id'] == local_ts_id:
-                    kill_local_server()
-                    start_local_server(server, pull=args.pull, push=args.push)
-                else:
-                    kill_remote_server(server)
-                    start_remote_server(server, pull=args.pull, push=args.push)
-        else:
-            kill_local_server()
-            local_server = next(server for server in pool if server['ts_id'] == local_ts_id)
-            start_local_server(local_server, pull=args.pull, push=args.push)
-        sys.exit(0)
-
-    # If no specific arguments, run the default behavior
-    local_server = next(server for server in pool if server['ts_id'] == local_ts_id)
-    if not check_server(local_server['ts_ip'], local_server['app_port'], local_server['ts_id']):
-        logging.info(f"Local server {local_server['ts_id']} is not responding correctly. Attempting to start...")
-        kill_local_server()
-        start_local_server(local_server, push=True)
-
-    for server in pool:
-        if server['ts_id'] != local_ts_id:
-            if not check_server(server['ts_ip'], server['app_port'], server['ts_id']):
-                logging.info(f"{server['ts_id']} is not responding correctly. Attempting to start...")
-                kill_remote_server(server)
-                start_remote_server(server, pull=True)
-            else:
-                logging.info(f"{server['ts_id']} is running and responding correctly.")
-
-        time.sleep(1)
-
-if __name__ == "__main__":
-    main()
-