Remove files not on the main branch

This commit is contained in:
sanj 2024-07-30 19:53:05 -07:00
parent 9b4f85f1e1
commit c281d3ac38
35 changed files with 0 additions and 1623 deletions

BIN
.DS_Store vendored

Binary file not shown.

2
2fa
View file

@ -1,2 +0,0 @@
source ~/.zshrc
python ~/.2fa.py

29
all
View file

@ -1,29 +0,0 @@
#!/bin/bash
# Create a new tmux session
tmux new-session -d -s servers -n 'ssh'
# Split the first window horizontally
tmux split-window -h
# Split the first and second panes vertically
tmux select-pane -t 0
tmux split-window -v
tmux select-pane -t 2
tmux split-window -v
# Function to connect to servers with retry
connect_to_server_with_retry() {
tmux send-keys -t "$1" "while true; do ssh $2; sleep 30; done" C-m
}
# Connect to servers with retry
connect_to_server_with_retry 0 "100.64.64.11"
connect_to_server_with_retry 1 "100.64.64.30"
connect_to_server_with_retry 2 "100.64.64.15"
connect_to_server_with_retry 3 "root@10.13.37.10"
# Attach to the tmux session
tmux attach -t servers

113
cf
View file

@ -1,113 +0,0 @@
#!/bin/bash
if [ "$EUID" -ne 0 ]; then
echo "This script must be run as root. Try using 'sudo'."
exit 1
fi
source /home/sij/.zshrc
source /home/sij/.GLOBAL_VARS
ddns
# Initialize variables
full_domain=$1
shift # Shift the arguments to left so we can get remaining arguments as before
caddyIP="" # Optional IP for Caddyfile
port=""
# Fixed IP for Cloudflare from ip.txt
cloudflareIP=$(cat /home/sij/.services/ip.txt)
api_key=$CF_API_KEY
cf_domains_file=/home/sij/.services/cf_domains.json
# Usage message
usage() {
echo "Usage: $0 <full-domain> [--ip <ip address>] --port <port>"
echo "Note: <full-domain> is required and can be a subdomain or a full domain."
exit 1
}
# Parse command-line arguments
while [[ $# -gt 0 ]]; do
case $1 in
--ip|-i)
caddyIP="$2"
shift 2
;;
--port|-p)
port="$2"
shift 2
;;
*)
usage
;;
esac
done
# Check required parameter
if [[ -z "$full_domain" ]] || [[ -z "$port" ]]; then
usage
fi
# Extract subdomain and domain
subdomain=$(echo "$full_domain" | awk -F"." '{print $1}')
remaining_parts=$(echo "$full_domain" | awk -F"." '{print NF}')
if [ "$remaining_parts" -eq 2 ]; then
# Handle root domain (e.g., env.esq)
domain=$full_domain
subdomain="@" # Use "@" for root domain
else
# Handle subdomain (e.g., sub.env.esq)
domain=$(echo "$full_domain" | sed "s/^$subdomain\.//")
fi
# Default to localhost for Caddyfile if IP is not provided via --ip
if [[ -z "$caddyIP" ]]; then
caddyIP="localhost"
fi
# Extract zone_id from JSON file
zone_id=$(jq -r ".\"$domain\".zone_id" "$cf_domains_file")
# Check if zone_id was successfully retrieved
if [ "$zone_id" == "null" ] || [ -z "$zone_id" ]; then
echo "Error: Zone ID for $domain could not be found."
exit 1
fi
# API call setup for Cloudflare A record using the fixed IP from ip.txt
endpoint="https://api.cloudflare.com/client/v4/zones/$zone_id/dns_records"
data="{\"type\":\"A\",\"name\":\"$subdomain\",\"content\":\"$cloudflareIP\",\"ttl\":120,\"proxied\":true}"
# Make API call
response=$(curl -s -X POST "$endpoint" -H "Authorization: Bearer $api_key" -H "Content-Type: application/json" --data "$data")
# Parse response
record_id=$(echo "$response" | jq -r '.result.id')
success=$(echo "$response" | jq -r '.success')
error_message=$(echo "$response" | jq -r '.errors[0].message')
error_code=$(echo "$response" | jq -r '.errors[0].code')
# Function to update Caddyfile with correct indentation
update_caddyfile() {
echo "$full_domain {
reverse_proxy $caddyIP:$port
tls {
dns cloudflare {env.CLOUDFLARE_API_TOKEN}
}
}" >> /etc/caddy/Caddyfile
echo "Configuration appended to /etc/caddy/Caddyfile with correct formatting."
}
# Check for success or specific error to update Caddyfile
if [ "$success" == "true" ]; then
jq ".\"$domain\".subdomains[\"$full_domain\"] = \"$record_id\"" "$cf_domains_file" > temp.json && mv temp.json "$cf_domains_file"
echo "A record created and cf_domains.json updated successfully."
update_caddyfile
elif [ "$error_message" == "Record already exists." ]; then
echo "Record already exists. Updating Caddyfile anyway."
update_caddyfile
else
echo "Failed to create A record. Error: $error_message (Code: $error_code)"
fi
echo "Restarting caddy!"
sudo systemctl restart caddy

View file

@ -1,55 +0,0 @@
#!/bin/bash
echo "Checking for remnants of Node.js, npm, and nvm..."
# Check PATH
echo "Checking PATH..."
echo $PATH | grep -q 'node\|npm' && echo "Found Node.js or npm in PATH"
# Check Homebrew
echo "Checking Homebrew..."
brew list | grep -q 'node\|npm' && echo "Found Node.js or npm installed with Homebrew"
# Check Yarn
echo "Checking Yarn..."
command -v yarn >/dev/null 2>&1 && echo "Found Yarn"
# Check Node.js and npm directories
echo "Checking Node.js and npm directories..."
ls ~/.npm >/dev/null 2>&1 && echo "Found ~/.npm directory"
ls ~/.node-gyp >/dev/null 2>&1 && echo "Found ~/.node-gyp directory"
# Check open files and sockets
echo "Checking open files and sockets..."
lsof | grep -q 'node' && echo "Found open files or sockets related to Node.js"
# Check other version managers
echo "Checking other version managers..."
command -v n >/dev/null 2>&1 && echo "Found 'n' version manager"
# Check temporary directories
echo "Checking temporary directories..."
ls /tmp | grep -q 'node\|npm' && echo "Found Node.js or npm related files in /tmp"
# Check Browserify and Webpack cache
echo "Checking Browserify and Webpack cache..."
ls ~/.config/browserify >/dev/null 2>&1 && echo "Found Browserify cache"
ls ~/.config/webpack >/dev/null 2>&1 && echo "Found Webpack cache"
# Check Electron cache
echo "Checking Electron cache..."
ls ~/.electron >/dev/null 2>&1 && echo "Found Electron cache"
# Check logs
echo "Checking logs..."
ls ~/.npm/_logs >/dev/null 2>&1 && echo "Found npm logs"
ls ~/.node-gyp/*.log >/dev/null 2>&1 && echo "Found Node.js logs"
# Check miscellaneous directories
echo "Checking miscellaneous directories..."
ls ~/.node_repl_history >/dev/null 2>&1 && echo "Found ~/.node_repl_history"
ls ~/.v8flags* >/dev/null 2>&1 && echo "Found ~/.v8flags*"
ls ~/.npm-global >/dev/null 2>&1 && echo "Found ~/.npm-global"
ls ~/.nvm-global >/dev/null 2>&1 && echo "Found ~/.nvm-global"
echo "Check completed."

26
comfy
View file

@ -1,26 +0,0 @@
#!/bin/bash
# Create a new tmux session named "comfy" detached (-d) and start the first command in the left pane
tmux new-session -d -s comfy -n comfypane
# Split the window into two panes. By default, this creates a vertical split.
tmux split-window -h -t comfy
# Select the first pane to setup comfy environment
tmux select-pane -t 0
COMFY_MAMBA=$(mamba env list | grep "^comfy" | awk '{print $2}')
tmux send-keys -t 0 "cd ~/workshop/sd/ComfyUI" C-m
tmux send-keys -t 0 "export PATH=\"$COMFY_MAMBA/bin:\$PATH\"" C-m
tmux send-keys -t 0 "source ~/.zshrc" C-m
tmux send-keys -t 0 "mamba activate comfy; sleep 1; while true; do PYTORCH_MPS_HIGH_WATERMARK_RATIO=0.0 PYTORCH_ENABLE_MPS_FALLBACK=1 python main.py --preview-method auto --force-fp16 --enable-cors-header; exit_status=\$?; if [ \$exit_status -ne 0 ]; then osascript -e 'display notification \"ComfyUI script exited unexpectedly\" with title \"Error in ComfyUI\"'; fi; sleep 1; done" C-m
# Select the second pane to setup extracomfy environment
tmux select-pane -t 1
IG_MAMBA=$(mamba env list | grep "^insta" | awk '{print $2}')
tmux send-keys -t 1 "export PATH=\"$IG_MAMBA/bin:\$PATH\"" C-m
tmux send-keys -t 1 "source ~/.zshrc" C-m
tmux send-keys -t 1 "mamba activate instabot; cd workshop/igbot" C-m
# Attach to the tmux session
# tmux attach -t comfy

55
ddns
View file

@ -1,55 +0,0 @@
#!/bin/bash
source /home/sij/.GLOBAL_VARS
service="https://am.i.mullvad.net/ip"
# Obtain the current public IP address
#current_ip=$(ssh -n sij@10.13.37.10 curl -s $service)
current_ip=$(curl -s $service)
last_ip=$(cat /home/sij/.services/ip.txt)
api_token=$CF_API_KEY
# Path to the JSON file with zone IDs, subdomains, and DNS IDs mappings
json_file="/home/sij/.services/cf_domains.json"
force_update=false
# Parse command line arguments for --force flag
while [[ "$#" -gt 0 ]]; do
case $1 in
-f|--force) force_update=true ;;
*) echo "Unknown parameter passed: $1"; exit 1 ;;
esac
shift
done
# Temporary file to store update results
temp_file=$(mktemp)
# Function to update DNS records
update_dns_record() {
zone_id=$1
subdomain=$2
dns_id=$3
update_result=$(curl -s -X PUT "https://api.cloudflare.com/client/v4/zones/$zone_id/dns_records/$dns_id" \
-H "Authorization: Bearer $api_token" \
-H "Content-Type: application/json" \
--data "{\"type\":\"A\",\"name\":\"$subdomain\",\"content\":\"$current_ip\",\"ttl\":120,\"proxied\":true}")
echo "$update_result" >> "$temp_file"
}
# Check if IP has changed or --force flag is used
if [ "$current_ip" != "$last_ip" ] || [ "$force_update" = true ]; then
echo $current_ip > /home/sij/.services/ip.txt
# Iterate through each domain in the JSON
/home/sij/miniforge3/bin/jq -r '.[] | .zone_id as $zone_id | .subdomains | to_entries[] | [$zone_id, .key, .value] | @tsv' $json_file |
while IFS=$'\t' read -r zone_id subdomain dns_id; do
update_dns_record "$zone_id" "$subdomain" "$dns_id"
done
# Combine all update results into a single JSON array
/home/sij/miniforge3/bin/jq -s '.' "$temp_file"
# Remove the temporary file
rm "$temp_file"
else
echo "IP address has not changed from ${last_ip}. No action taken."
fi

View file

@ -1,27 +0,0 @@
#!/bin/bash
# Default directories to search
directories=("~/sync" "~/workshop")
# Check if a command line argument is provided
if [ $# -gt 0 ]; then
if [ "$1" == "." ]; then
# Use the current directory
directories=(".")
else
# Use the provided directory
directories=("$1")
fi
fi
# Iterate through each directory
for dir in "${directories[@]}"; do
# Expand tilde to home directory
expanded_dir=$(eval echo "$dir")
# Find and delete __pycache__ directories
find "$expanded_dir" -type d -name "__pycache__" -exec rm -rf {} +
done
echo "Deletion of __pycache__ folders completed."

View file

@ -1,14 +0,0 @@
#!/usr/bin/env python3
import sys
def flag_emoji(country_code):
offset = 127397
flag = ''.join(chr(ord(char) + offset) for char in country_code.upper())
return flag
if __name__ == "__main__":
if len(sys.argv) > 1:
country_code = sys.argv[1]
print(flag_emoji(country_code))
else:
print("No country code provided")

51
get
View file

@ -1,51 +0,0 @@
#!/bin/bash
# Check if a URL is provided
if [ $# -eq 0 ]; then
echo "Please provide a git repository URL."
exit 1
fi
# Extract the repository URL and name
repo_url=$1
repo_name=$(basename "$repo_url" .git)
# Clone the repository
git clone "$repo_url"
# Check if the clone was successful
if [ $? -ne 0 ]; then
echo "Failed to clone the repository."
exit 1
fi
# Change to the newly created directory
cd "$repo_name" || exit
# Check for setup.py or requirements.txt
if [ -f "setup.py" ] || [ -f "requirements.txt" ]; then
# Create a new Mamba environment
mamba create -n "$repo_name" python -y
# Activate the new environment
eval "$(conda shell.bash hook)"
mamba activate "$repo_name"
# Install dependencies
if [ -f "setup.py" ]; then
echo "Installing from setup.py..."
python setup.py install
fi
if [ -f "requirements.txt" ]; then
echo "Installing from requirements.txt..."
pip install -r requirements.txt
fi
echo "Environment setup complete."
else
echo "No setup.py or requirements.txt found. Skipping environment setup."
fi
echo "Repository cloned and set up successfully."

37
getreq
View file

@ -1,37 +0,0 @@
#!/bin/bash
# Script to find Python imports and install them if necessary
# Check for input argument
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <path-to-python-file>"
exit 1
fi
PYTHON_FILE="$1"
# Extract import statements
IMPORTS=$(grep -E "^import |^from " "$PYTHON_FILE" | \
awk '{print $2}' | cut -d. -f1 | sort | uniq)
# Function to check and install packages
check_and_install() {
PACKAGE=$1
# Check if the package is installed via pip
if pip list | grep -q "^$PACKAGE "; then
echo "$PACKAGE is already installed via pip."
# Check if the package is installed via conda
elif conda list | grep -q "^$PACKAGE "; then
echo "$PACKAGE is already installed via conda."
else
# Install the package using kip
echo "Installing $PACKAGE using kip..."
kip install "$PACKAGE"
fi
}
# Iterate over imports and check/install them
for pkg in $IMPORTS; do
check_and_install $pkg
done

46
i
View file

@ -1,46 +0,0 @@
#!/Users/sij/miniforge3/envs/minicpm/bin/python
import os
import torch
from transformers import AutoModel, AutoTokenizer
from PIL import Image
import argparse
# Set environment variable to enable CPU fallback
os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1"
# Parse command line arguments
parser = argparse.ArgumentParser(description='Process image for model.')
parser.add_argument('image_path', type=str, help='Path to the image file')
args = parser.parse_args()
# Check if MPS is available
if torch.backends.mps.is_available():
device = torch.device("mps")
else:
device = torch.device("cpu")
# Load the model with appropriate torch dtype
model = AutoModel.from_pretrained('openbmb/MiniCPM-Llama3-V-2_5', trust_remote_code=True, torch_dtype=torch.float32 if device == torch.device("cpu") else torch.float16)
model = model.to(device)
tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-Llama3-V-2_5', trust_remote_code=True)
model.eval()
# Load and prepare the image
image_path = "/Users/sij/adorablenakedskinny.jpeg" # args.image_path
image = Image.open(image_path).convert('RGB')
question = 'Using modern English and adult slang words like cock, cum, pussy, fuck, etc., describe the scene pictured in the image.'
msgs = [{'role': 'user', 'content': question}]
# Perform inference
with torch.no_grad():
res = model.chat(
image=image,
msgs=msgs,
tokenizer=tokenizer,
sampling=True,
temperature=0.7
)
print(res)

View file

@ -1,144 +0,0 @@
#!/usr/bin/env python3
import os
import re
import requests
import time
import pkg_resources
# List of Python built-in modules
BUILTIN_MODULES = {
'abc', 'aifc', 'argparse', 'array', 'ast', 'asynchat', 'asyncio', 'asyncore', 'atexit',
'audioop', 'base64', 'bdb', 'binascii', 'binhex', 'bisect', 'builtins', 'bz2', 'calendar',
'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections', 'colorsys',
'compileall', 'concurrent', 'configparser', 'contextlib', 'copy', 'copyreg', 'crypt', 'csv',
'ctypes', 'curses', 'dataclasses', 'datetime', 'dbm', 'decimal', 'difflib', 'dis', 'distutils',
'doctest', 'dummy_threading', 'email', 'encodings', 'ensurepip', 'enum', 'errno', 'faulthandler',
'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fractions', 'ftplib', 'functools',
'gc', 'getopt', 'getpass', 'gettext', 'glob', 'gzip', 'hashlib', 'heapq', 'hmac', 'html', 'http',
'imaplib', 'imghdr', 'imp', 'importlib', 'inspect', 'io', 'ipaddress', 'itertools', 'json',
'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'lzma', 'mailbox', 'mailcap', 'marshal',
'math', 'mimetypes', 'modulefinder', 'multiprocessing', 'netrc', 'nntplib', 'numbers', 'operator',
'optparse', 'os', 'ossaudiodev', 'parser', 'pathlib', 'pdb', 'pickle', 'pickletools', 'pipes',
'pkgutil', 'platform', 'plistlib', 'poplib', 'posix', 'pprint', 'profile', 'pstats', 'pty',
'pwd', 'py_compile', 'pyclbr', 'pydoc', 'queue', 'quopri', 'random', 're', 'readline',
'reprlib', 'resource', 'rlcompleter', 'runpy', 'sched', 'secrets', 'select', 'selectors', 'shelve',
'shlex', 'shutil', 'signal', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'socketserver',
'spwd', 'sqlite3', 'ssl', 'stat', 'statistics', 'string', 'stringprep', 'struct', 'subprocess',
'sunau', 'symtable', 'sys', 'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile',
'termios', 'test', 'textwrap', 'threading', 'time', 'timeit', 'token', 'tokenize', 'trace',
'traceback', 'tracemalloc', 'tty', 'turtle', 'types', 'typing', 'unicodedata', 'unittest',
'urllib', 'uu', 'uuid', 'venv', 'warnings', 'wave', 'weakref', 'webbrowser', 'xdrlib', 'xml',
'xmlrpc', 'zipapp', 'zipfile', 'zipimport', 'zlib'
}
# Known corrections for PyPI package names
KNOWN_CORRECTIONS = {
'dateutil': 'python-dateutil',
'dotenv': 'python-dotenv',
'docx': 'python-docx',
'tesseract': 'pytesseract',
'magic': 'python-magic',
'multipart': 'python-multipart',
'newspaper': 'newspaper3k',
'srtm': 'elevation',
'yaml': 'pyyaml',
'zoneinfo': 'backports.zoneinfo'
}
# List of generic names to exclude
EXCLUDED_NAMES = {'models', 'data', 'convert', 'example', 'tests'}
def find_imports(root_dir):
imports_by_file = {}
for dirpath, _, filenames in os.walk(root_dir):
for filename in filenames:
if filename.endswith('.py'):
filepath = os.path.join(dirpath, filename)
with open(filepath, 'r') as file:
import_lines = []
for line in file:
line = line.strip()
if line.startswith(('import ', 'from ')) and not line.startswith('#'):
import_lines.append(line)
imports_by_file[filepath] = import_lines
return imports_by_file
def process_import_lines(import_lines):
processed_lines = set() # Use a set to remove duplicates
for line in import_lines:
# Handle 'import xyz' and 'import abc, def, geh'
if line.startswith('import '):
modules = line.replace('import ', '').split(',')
for mod in modules:
mod = re.sub(r'\s+as\s+\w+', '', mod).split('.')[0].strip()
if mod and not mod.isupper() and mod not in EXCLUDED_NAMES:
processed_lines.add(mod)
# Handle 'from abc import def, geh'
elif line.startswith('from '):
mod = line.split(' ')[1].split('.')[0].strip()
if mod and not mod.isupper() and mod not in EXCLUDED_NAMES:
processed_lines.add(mod)
return processed_lines
def check_pypi_availability(libraries):
available = set()
unavailable = set()
for lib in libraries:
if lib in BUILTIN_MODULES: # Skip built-in modules
continue
corrected_lib = KNOWN_CORRECTIONS.get(lib, lib)
try:
if check_library_on_pypi(corrected_lib):
available.add(corrected_lib)
else:
unavailable.add(corrected_lib)
except requests.exceptions.RequestException:
print(f"Warning: Unable to check {corrected_lib} on PyPI due to network error.")
unavailable.add(corrected_lib)
return available, unavailable
def check_library_on_pypi(library):
max_retries = 3
for attempt in range(max_retries):
try:
response = requests.get(f"https://pypi.org/pypi/{library}/json", timeout=5)
return response.status_code == 200
except requests.exceptions.RequestException:
if attempt < max_retries - 1:
time.sleep(1) # Wait for 1 second before retrying
else:
raise
def save_to_requirements_file(available, output_file='requirements.txt'):
existing_requirements = set()
if os.path.isfile(output_file):
with open(output_file, 'r') as file:
existing_requirements = set(line.strip() for line in file)
with open(output_file, 'a') as file:
for pkg in sorted(available - existing_requirements):
print(f"Adding to requirements.txt: {pkg}")
file.write(pkg + '\n')
def save_to_missing_file(unavailable, output_file='missing-packages.txt'):
existing_missing = set()
if os.path.isfile(output_file):
with open(output_file, 'r') as file:
existing_missing = set(line.strip() for line in file)
with open(output_file, 'a') as file:
for pkg in sorted(unavailable - existing_missing):
print(f"Adding to missing-packages.txt: {pkg}")
file.write(pkg + '\n')
if __name__ == "__main__":
root_dir = os.getcwd() # Get the current working directory
imports_by_file = find_imports(root_dir)
for filepath, import_lines in imports_by_file.items():
print(f"# Processing {filepath}")
processed_lines = process_import_lines(import_lines)
available, unavailable = check_pypi_availability(processed_lines)
save_to_requirements_file(available)
save_to_missing_file(unavailable)
print(f"Processed import statements have been saved to requirements.txt and missing-packages.txt")

29
ison
View file

@ -1,29 +0,0 @@
#!/Users/sij/miniforge3/envs/sijapi/bin/python
import requests
def check_health(url):
try:
response = requests.get(url)
if response.status_code == 200:
return f"{url} is up"
else:
return f"{url} returned status code {response.status_code}"
except requests.exceptions.RequestException:
return f"{url} is down"
def main():
addresses = [
"http://localhost:4444/health",
"http://100.64.64.20:4444/health",
"http://100.64.64.30:4444/health",
"http://100.64.64.11:4444/health",
"http://100.64.64.15:4444/health"
]
for address in addresses:
print(check_health(address))
if __name__ == "__main__":
main()

52
kip
View file

@ -1,52 +0,0 @@
#!/bin/bash
# Function to install a package
install_package() {
package=$1
if mamba list | grep -q "^$package\s"; then
echo "Package '$package' already installed by mamba."
elif pip list | grep -q "^$package\s"; then
echo "Package '$package' already installed by pip."
else
echo "Installing package '$package'."
mamba install -y -c conda-forge "$package" || pip install "$package"
fi
}
# Function to process Python file for import statements
process_python_file() {
file_path=$1
# Extracting module names from import statements, handling both 'import' and 'from ... import ...'
IMPORTS=$(grep -E "^import |^from " "$file_path" | \
awk '{if($1=="from") print $2; else print $2}' | \
cut -d. -f1 | sort | uniq)
for package in $IMPORTS; do
echo "Processing $package from $file_path"
install_package $package
done
}
# Check if any arguments were passed
if [ "$#" -lt 1 ]; then
echo "Usage: kip <package1> [<package2> ...] or kip <script.py>"
exit 1
fi
MAMBA_BASE=$(mamba info --base)
export PYTHONPATH="${PYTHONPATH}:${MAMBA_BASE}/${CONDA_DEFAULT_ENV}/lib/python"
for arg in "$@"; do
# Check if the argument is a Python file
if [[ "$arg" == *.py ]]; then
if [ -f "$arg" ]; then
process_python_file "$arg"
else
echo "File $arg not found."
fi
else
install_package "$arg"
fi
done

19
lsd
View file

@ -1,19 +0,0 @@
#!/bin/bash
# Default options for lsd
default_options="--color=always -F --long --size=short --permission=octal --group-dirs=first -X"
# Check if the first argument is a directory or an option
if [[ $# -gt 0 && ! $1 =~ ^- ]]; then
# First argument is a directory, store it and remove from arguments list
directory=$1
shift
else
# No directory specified, default to the current directory
directory="."
fi
# Execute lsd with the default options, directory, and any additional arguments provided
/opt/homebrew/bin/lsd $default_options "$directory" "$@"

View file

@ -1,15 +0,0 @@
#!/bin/bash
# List all conda environments and cut the output to get just the names
envs=$(mamba env list | awk '{print $1}' | grep -v '^#' | grep -v 'base')
# Loop through each environment name
for env in $envs; do
# Use conda (or mamba, but conda is preferred for compatibility reasons) to export the environment to a YAML file
# No need to activate the environment; conda can export directly by specifying the name
echo "Exporting $env..."
mamba env export --name $env > "${env}.yml"
done
echo "All environments have been exported."

View file

@ -1,26 +0,0 @@
#!/bin/bash
# Function to process a single .yml file
process_file() {
file="$1"
if [[ -f "$file" ]]; then
env_name=$(echo "$file" | sed 's/.yml$//')
echo "Creating environment from $file..."
conda env create -f "$file" || echo "Failed to create environment from $file"
else
echo "File $file does not exist."
fi
}
# Check if a .yml file was provided as an argument
if [[ $# -eq 1 && $1 == *.yml ]]; then
# Process the provided .yml file
process_file "$1"
else
# No argument provided, process all .yml files in the current directory
for file in *.yml; do
process_file "$file"
done
echo "Environment creation process completed."
fi

24
murder
View file

@ -1,24 +0,0 @@
#!/bin/bash
# Check if an argument is given
if [ $# -eq 0 ]; then
echo "Usage: murder [process name or port]"
exit 1
fi
# Get the input parameter
ARGUMENT=$1
# Check if the argument is numeric
if [[ $ARGUMENT =~ ^[0-9]+$ ]]; then
echo "Killing processes listening on port $ARGUMENT"
lsof -t -i:$ARGUMENT | xargs kill
else
# Process name was given instead of a port number
echo "Killing processes with name $ARGUMENT"
for PID in $(ps aux | grep $ARGUMENT | grep -v grep | awk '{print $2}'); do
echo "Killing process $PID"
sudo kill -9 $PID
done
fi

116
noon
View file

@ -1,116 +0,0 @@
#!/Users/sij/miniforge3/bin/python
import os
import sys
import argparse
def ask_for_confirmation(message):
while True:
user_input = input(message + " (y/n): ").strip().lower()
if user_input in ('y', 'n'):
return user_input == 'y'
else:
print("Invalid input. Please enter 'y' or 'n'.")
def rename_files_orig(root_dir, manual):
for dirpath, _, filenames in os.walk(root_dir, followlinks=False):
for filename in filenames:
if '(orig)' in filename:
orig_filepath = os.path.join(dirpath, filename)
base_filename, ext = os.path.splitext(filename)
new_filename = base_filename.replace('(orig)', '')
new_filepath = os.path.join(dirpath, new_filename + ext)
if os.path.exists(new_filepath):
new_file_new_name = new_filename + '(new)' + ext
new_file_new_path = os.path.join(dirpath, new_file_new_name)
if manual:
if not ask_for_confirmation(f"Do you want to rename {new_filepath} to {new_file_new_path}?"):
continue
if os.path.exists(new_file_new_path):
print(f"Error: Cannot rename {new_filepath} to {new_file_new_path} because the target file already exists.")
continue
os.rename(new_filepath, new_file_new_path)
print(f'Renamed: {new_filepath} -> {new_file_new_name}')
else:
print(f"No associated file found for: {orig_filepath}")
orig_file_new_name = new_filename + ext
orig_file_new_path = os.path.join(dirpath, orig_file_new_name)
if manual:
if not ask_for_confirmation(f"Do you want to rename {orig_filepath} to {orig_file_new_path}?"):
continue
if os.path.exists(orig_file_new_path):
print(f"Error: Cannot rename {orig_filepath} to {orig_file_new_path} because the target file already exists.")
continue
os.rename(orig_filepath, orig_file_new_path)
print(f'Renamed: {orig_filepath} -> {orig_file_new_name}')
def rename_files_new(root_dir, manual):
for dirpath, _, filenames in os.walk(root_dir, followlinks=False):
for filename in filenames:
if '(new)' in filename:
new_filepath = os.path.join(dirpath, filename)
base_filename, ext = os.path.splitext(filename)
orig_filename = base_filename.replace('(new)', '')
orig_filepath = os.path.join(dirpath, orig_filename + ext)
if os.path.exists(orig_filepath):
orig_file_orig_name = orig_filename + '(orig)' + ext
orig_file_orig_path = os.path.join(dirpath, orig_file_orig_name)
if manual:
if not ask_for_confirmation(f"Do you want to rename {orig_filepath} to {orig_file_orig_path}?"):
continue
if os.path.exists(orig_file_orig_path):
print(f"Error: Cannot rename {orig_filepath} to {orig_file_orig_path} because the target file already exists.")
continue
os.rename(orig_filepath, orig_file_orig_path)
print(f'Renamed: {orig_filepath} -> {orig_file_orig_name}')
else:
print(f"No associated file found for: {new_filepath}")
new_file_new_name = orig_filename + ext
new_file_new_path = os.path.join(dirpath, new_file_new_name)
if manual:
if not ask_for_confirmation(f"Do you want to rename {new_filepath} to {new_file_new_path}?"):
continue
if os.path.exists(new_file_new_path):
print(f"Error: Cannot rename {new_filepath} to {new_file_new_path} because the target file already exists.")
continue
os.rename(new_filepath, new_file_new_path)
print(f'Renamed: {new_filepath} -> {new_file_new_name}')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on given criteria.')
parser.add_argument('-o', '--orig', action='store_true', help='Rename files ending with (orig)')
parser.add_argument('-n', '--new', action='store_true', help='Rename files ending with (new)')
parser.add_argument('-m', '--manual', action='store_true', help='Manual mode: ask for confirmation before each renaming')
parser.add_argument('directory', nargs='?', default=os.getcwd(), help='Directory to start the search (default: current directory)')
args = parser.parse_args()
if args.orig and args.new:
print("Error: Please specify either -o or -n, not both.")
sys.exit(1)
if args.orig:
print("Running in ORIG mode")
rename_files_orig(args.directory, args.manual)
elif args.new:
print("Running in NEW mode")
rename_files_new(args.directory, args.manual)
else:
print("Error: Please specify either -o or -n.")
sys.exit(1)

40
nv
View file

@ -1,40 +0,0 @@
#!/bin/bash
SESSION_NAME="$1"
PYTHON_VERSION_INPUT="${2:-3.10}" # Default to 3.8 if not specified
# Normalize the Python version input
if [[ "$PYTHON_VERSION_INPUT" =~ ^python[0-9.]+$ ]]; then
PYTHON_VERSION="${PYTHON_VERSION_INPUT//python/}"
elif [[ "$PYTHON_VERSION_INPUT" =~ ^py[0-9.]+$ ]]; then
PYTHON_VERSION="${PYTHON_VERSION_INPUT//py/}"
else
PYTHON_VERSION="$PYTHON_VERSION_INPUT"
fi
# Format for Conda
MAMBA_PYTHON_VERSION="python=$PYTHON_VERSION"
# Check if Conda environment exists
if ! mamba env list | grep -q "^$SESSION_NAME\s"; then
echo "Creating new Mamba environment: $SESSION_NAME with $MAMBA_PYTHON_VERSION"
mamba create --name "$SESSION_NAME" "$MAMBA_PYTHON_VERSION" --yes
fi
# Find Conda env directory
CONDA_ENV_DIR=$(mamba env list | grep "^$SESSION_NAME" | awk '{print $2}')
# Handle tmux session
if ! tmux has-session -t "$SESSION_NAME" 2>/dev/null; then
echo "Creating new tmux session: $SESSION_NAME"
tmux new-session -d -s "$SESSION_NAME"
sleep 2
fi
# Attach to tmux session and update PATH before activating Conda environment
sleep 1
tmux send-keys -t "$SESSION_NAME" "export PATH=\"$MAMBA_ENV_DIR/bin:\$PATH\"" C-m
tmux send-keys -t "$SESSION_NAME" "source ~/.zshrc" C-m
tmux send-keys -t "$SESSION_NAME" "mamba activate $SESSION_NAME" C-m
tmux attach -t "$SESSION_NAME"

8
o
View file

@ -1,8 +0,0 @@
#! /bin/bash
if [[ -z $(pidof ollama) ]]; then
ollama serve &>/dev/null &
disown
fi
/usr/bin/env python3 /Users/sij/AI/osh/osh.py $@

75
pf
View file

@ -1,75 +0,0 @@
#!/usr/bin/python3
import socket
import threading
import select
def forward(source, destination):
try:
while True:
ready, _, _ = select.select([source], [], [], 1)
if ready:
data = source.recv(4096)
if not data:
break
destination.sendall(data)
except (OSError, socket.error) as e:
print(f"Connection error: {e}")
finally:
try:
source.shutdown(socket.SHUT_RD)
except OSError:
pass
try:
destination.shutdown(socket.SHUT_WR)
except OSError:
pass
def handle(client_socket, remote_host, remote_port):
try:
remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote_socket.connect((remote_host, remote_port))
thread1 = threading.Thread(target=forward, args=(client_socket, remote_socket))
thread2 = threading.Thread(target=forward, args=(remote_socket, client_socket))
thread1.start()
thread2.start()
thread1.join()
thread2.join()
except Exception as e:
print(f"Error in handle: {e}")
finally:
client_socket.close()
remote_socket.close()
def create_forwarder(local_host, local_port, remote_host, remote_port):
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((local_host, local_port))
server_socket.listen(5)
print(f"Forwarding {local_host}:{local_port} to {remote_host}:{remote_port}")
while True:
try:
client_socket, address = server_socket.accept()
print(f"Received connection from {address}")
threading.Thread(target=handle, args=(client_socket, remote_host, remote_port)).start()
except Exception as e:
print(f"Error accepting connection: {e}")
def main():
listen_ip = '0.0.0.0'
imap_thread = threading.Thread(target=create_forwarder, args=(listen_ip, 1143, '127.0.0.1', 1142))
imap_thread.start()
smtp_thread = threading.Thread(target=create_forwarder, args=(listen_ip, 1025, '127.0.0.1', 1024))
smtp_thread.start()
imap_thread.join()
smtp_thread.join()
if __name__ == "__main__":
main()

17
purge
View file

@ -1,17 +0,0 @@
#!/bin/bash
# Fetch the latest changes from the remote repository
git fetch origin
# Checkout the main branch
git checkout main
# Delete files that are not on the main branch
git ls-tree -r main --name-only | xargs -I {} git rm -r {}
# Commit the changes
git commit -m "Remove files not on the main branch"
# Push the changes to the remote repository
git push origin main

46
serv
View file

@ -1,46 +0,0 @@
#!/bin/bash
# Check if an executable is provided as an argument
if [ -z "$1" ]; then
echo "Usage: $0 <executable>"
exit 1
fi
# Find the executable path using 'which'
EXEC_PATH=$(which "$1")
# Check if the executable exists
if [ -z "$EXEC_PATH" ]; then
echo "Error: Executable '$1' not found."
exit 1
fi
# Get the executable name
EXEC_NAME=$(basename "$EXEC_PATH")
# Create the launchd plist file content
PLIST_FILE_CONTENT="<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">
<plist version=\"1.0\">
<dict>
<key>Label</key>
<string>$EXEC_NAME</string>
<key>ProgramArguments</key>
<array>
<string>$EXEC_PATH</string>
</array>
<key>KeepAlive</key>
<true/>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>"
# Create the launchd plist file
PLIST_FILE="$HOME/Library/LaunchAgents/$EXEC_NAME.plist"
echo "$PLIST_FILE_CONTENT" > "$PLIST_FILE"
# Load the launchd service
launchctl load "$PLIST_FILE"
echo "Service '$EXEC_NAME' has been created and loaded."

View file

@ -1,39 +0,0 @@
#!/bin/bash
# Check if a filename has been provided
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <filename>"
exit 1
fi
filename="$1"
# Check if the file exists
if [ ! -f "$filename" ]; then
echo "Error: File does not exist."
exit 1
fi
# Assuming GLOBAL_API_KEY is exported in your environment
if [ -z "$GLOBAL_API_KEY" ]; then
echo "Error: GLOBAL_API_KEY is not set."
exit 1
fi
# Endpoint
endpoint="https://api.sij.ai/speaksummary"
# Make the request
curl -X POST "$endpoint" \
-H "Authorization: Bearer $GLOBAL_API_KEY" \
-H "Content-Type: multipart/form-data" \
-F "file=@$filename" \
-o "response.wav"
# Check if the output was saved successfully
if [ -f "response.wav" ]; then
echo "The summary has been processed and saved as 'response.wav'"
else
echo "Failed to save the summary."
fi

View file

@ -1,41 +0,0 @@
#!/bin/bash
# Get the first session as the target for all panes
target_session=$(tmux list-sessions -F '#{session_name}' | head -n 1)
target_window="${target_session}:0" # assuming the first window is index 0
target_pane="${target_window}.0" # assuming the first pane is index 0
# Loop through each session
tmux list-sessions -F '#{session_name}' | while read session; do
# Skip the target session
if [[ "$session" == "$target_session" ]]; then
continue
fi
# Loop through each window in the session
tmux list-windows -t "$session" -F '#{window_index}' | while read window; do
# Loop through each pane in the window
tmux list-panes -t "${session}:${window}" -F '#{pane_index}' | while read pane; do
source="${session}:${window}.${pane}"
# Check if the source is not the same as the target
if [[ "$source" != "$target_pane" ]]; then
# Join the pane to the target pane
tmux join-pane -s "$source" -t "$target_pane"
fi
done
done
# After moving all panes from a session, kill the now-empty session
# Check if the session to be killed is not the target session
if [[ "$session" != "$target_session" ]]; then
tmux kill-session -t "$session"
fi
done
# After moving all panes, you may want to manually adjust the layout.
# For a simple automatic layout adjustment, you can use:
tmux select-layout -t "$target_window" tiled
# Attach to the master session after everything is merged
tmux attach-session -t "$target_session"

85
tts
View file

@ -1,85 +0,0 @@
#!/Users/sij/miniforge3/bin/python
import sys
import os
import tempfile
from pathlib import Path
import uuid
import hashlib
from pydub import AudioSegment
import torch
from TTS.api import TTS # Adjust with actual import
from playsound import playsound
from TTS.api import TTS
device = torch.device('cpu') # keep trying 'mps' it will eventually be implemented
model_name = "tts_models/multilingual/multi-dataset/xtts_v2"
tts = TTS(model_name=model_name).to(device)
DEFAULT_VOICE = "kiel"
def select_voice(voice_name: str) -> str:
voice_dir = Path('/Users/sij/AI/banana-phone/voices')
voice_file = voice_dir / f"{voice_name}.wav"
if voice_file.is_file():
return str(voice_file)
else:
print(f"Voice file not found for {voice_name}, using default")
return str(voice_dir / f"{DEFAULT_VOICE}.wav")
def generate_speech(text, speed, voice_file):
output_dir = Path(tempfile.gettempdir())
output_dir.mkdir(exist_ok=True)
short_uuid = str(uuid.uuid4())[:8]
output_file_name = f"{Path(voice_file).stem}-{short_uuid}.wav"
output_file = output_dir / output_file_name
tts.tts_to_file(
text=text,
speed=speed,
file_path=output_file,
speaker_wav=[voice_file],
language="en"
)
return output_file
def main():
if len(sys.argv) < 2:
print("Usage: python script.py <text/file> [voice] [speed]")
sys.exit(1)
text_input = sys.argv[1]
if len(text_input) < 255 and os.path.isfile(text_input):
with open(text_input, 'r') as file:
text = file.read()
else:
text = text_input
voice = sys.argv[2] if len(sys.argv) > 2 else DEFAULT_VOICE
speed = float(sys.argv[3]) if len(sys.argv) > 3 else 1.1
voice_file_path = select_voice(voice)
print(f"Using voice file at {voice_file_path}")
combined_audio = AudioSegment.silent(duration=0)
output_file = generate_speech(text, speed, voice_file_path)
combined_audio += AudioSegment.from_wav(str(output_file))
# Exporting combined audio
final_output_path = Path(tempfile.gettempdir()) / "output.wav"
combined_audio.export(str(final_output_path), format="wav")
# Now playing the generated speech file
print(f"Playing generated speech from {final_output_path}")
playsound(str(final_output_path))
# Cleanup
os.remove(output_file)
os.remove(final_output_path)
if __name__ == "__main__":
main()

View file

@ -1,37 +0,0 @@
#!/usr/bin/env python3
import sys
def merge_files(file_paths):
if not file_paths:
print("At least one file path is required.")
return
# Read all lines from all files, including the first one
all_lines = set()
for file_path in file_paths:
with open(file_path, 'r') as f:
all_lines.update(f.read().splitlines())
# Sort the unique lines
# sorted_lines = sorted(all_lines)
sorted_lines = sorted(all_lines, key=str.lower)
# Write the sorted, unique lines to the first file, overwriting its contents
with open(file_paths[0], 'w') as f:
for line in sorted_lines:
f.write(line + '\n')
print(f"Merged {len(file_paths)} files into {file_paths[0]}")
if __name__ == "__main__":
# Get file paths from command line arguments
file_paths = sys.argv[1:]
if not file_paths:
print("Usage: txt-line-merge-abc file1.txt file2.txt file3.txt ...")
else:
merge_files(file_paths)

16
txtsort
View file

@ -1,16 +0,0 @@
#!/bin/bash
# Checking if the user provided a file name
if [ $# -ne 1 ]; then
echo "Usage: $0 filename"
exit 1
fi
# Checking if the given file is readable
if ! [ -r "$1" ]; then
echo "The file '$1' is not readable or does not exist."
exit 1
fi
sort $1

View file

@ -1,75 +0,0 @@
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Uninstall App
# @raycast.mode fullOutput
# Optional parameters:
# @raycast.icon 🗑️
# @raycast.argument1 { "type": "text", "placeholder": "App name" }
# Documentation:
# @raycast.description Move an application and its related files to the Trash
app_name="$1"
move_to_trash() {
local file_path="$1"
osascript -e "tell application \"Finder\" to delete POSIX file \"$file_path\"" > /dev/null 2>&1
}
uninstall_app() {
local app_name="$1"
if [[ ! "$app_name" =~ \.app$ ]]; then
app_name="${app_name}.app"
fi
local app_paths=$(mdfind "kMDItemKind == 'Application' && kMDItemDisplayName == '${app_name%.*}'")
if [ -z "$app_paths" ]; then
echo "Application not found. Please check the name and try again."
return 1
fi
if [ $(echo "$app_paths" | wc -l) -gt 1 ]; then
echo "Multiple applications found:"
select app_path in $app_paths; do
if [ -n "$app_path" ]; then
break
fi
done
else
app_path=$app_paths
fi
echo "Are you sure you want to move $app_path to the Trash?"
echo "Type 'yes' to confirm:"
read confirmation
if [ "$confirmation" != "yes" ]; then
echo "Uninstallation cancelled."
return 1
fi
echo "Moving $app_path to Trash"
move_to_trash "$app_path"
echo "Moving related files to Trash..."
local app_identifier=$(mdls -name kMDItemCFBundleIdentifier -r "$app_path")
if [ -n "$app_identifier" ]; then
find /Library/Application\ Support /Library/Caches /Library/Preferences ~/Library/Application\ Support ~/Library/Caches ~/Library/Preferences -name "*$app_identifier*" -maxdepth 1 -print0 | while IFS= read -r -d '' file; do
move_to_trash "$file"
done
else
echo "Couldn't find bundle identifier. Attempting to move files based on app name..."
find /Library/Application\ Support /Library/Caches /Library/Preferences ~/Library/Application\ Support ~/Library/Caches ~/Library/Preferences -name "*${app_name%.*}*" -maxdepth 1 -print0 | while IFS= read -r -d '' file; do
move_to_trash "$file"
done
fi
echo "Uninstallation complete. Files have been moved to the Trash."
}
uninstall_app "$app_name"

69
up
View file

@ -1,69 +0,0 @@
#!/bin/bash
# Path to the file containing the list of repositories
REPOS_FILE=~/workshop/repos.txt
# Check if the repos file exists
if [ ! -f "$REPOS_FILE" ]; then
echo "Error: $REPOS_FILE does not exist."
exit 1
fi
# Read the repos file and process each directory
while IFS= read -r repo_path || [[ -n "$repo_path" ]]; do
# Trim whitespace
repo_path=$(echo "$repo_path" | xargs)
# Skip empty lines
[ -z "$repo_path" ] && continue
# Expand tilde to home directory
repo_path="${repo_path/#\~/$HOME}"
# Check if the directory exists
if [ ! -d "$repo_path" ]; then
echo "Warning: Directory $repo_path does not exist. Skipping."
continue
fi
echo "Processing repository: $repo_path"
# Navigate to the project directory
cd "$repo_path" || { echo "Error: Unable to change to directory $repo_path"; continue; }
# Check if it's a git repository
if [ ! -d .git ]; then
echo "Warning: $repo_path is not a git repository. Skipping."
continue
fi
# Get the current branch
current_branch=$(git rev-parse --abbrev-ref HEAD)
# Pull the latest changes from the repository
echo "Pulling from $current_branch branch..."
git pull origin "$current_branch"
# Add changes to the Git index (staging area)
echo "Adding all changes..."
git add .
# Check if there are changes to commit
if git diff-index --quiet HEAD --; then
echo "No changes to commit."
else
# Commit changes
echo "Committing changes..."
git commit -m "Auto-update: $(date)"
# Push changes to the remote repository
echo "Pushing all changes..."
git push origin "$current_branch"
fi
echo "Update complete for $repo_path!"
echo "----------------------------------------"
done < "$REPOS_FILE"
echo "All repositories processed."

77
vitals
View file

@ -1,77 +0,0 @@
#!/bin/bash
tailscale='/Applications/Tailscale.app/Contents/MacOS/Tailscale'
# Get local IP
local_ip=$(hostname -I | awk '{print $1}')
wan_info=$(curl -s --max-time 10 https://am.i.mullvad.net/json)
wan_connected=false
if [ ! -z "$wan_info" ]; then
wan_connected=true
wan_ip=$(echo "$wan_info" | jq -r '.ip')
mullvad_exit_ip=$(echo "$wan_info" | jq '.mullvad_exit_ip')
blacklisted=$(echo "$wan_info" | jq '.blacklisted.blacklisted')
else
wan_ip="Unavailable"
fi
# Check if Tailscale is installed and get IP
if command -v $tailscale &> /dev/null; then
has_tailscale=true
tailscale_ip=$($tailscale ip -4)
# Get Tailscale exit-node information
ts_exitnode_output=$($tailscale exit-node list)
# Parse exit node hostname
if echo "$ts_exitnode_output" | grep -q 'selected'; then
mullvad_exitnode=true
# Extract the hostname of the selected exit node
mullvad_hostname=$(echo "$ts_exitnode_output" | grep 'selected' | awk '{print $2}')
else
mullvad_exitnode=false
mullvad_hostname=""
fi
else
has_tailscale=false
tailscale_ip="Not installed"
mullvad_exitnode=false
mullvad_hostname=""
fi
nextdns_info=$(curl -sL --max-time 10 https://test.nextdns.io)
if [ -z "$nextdns_info" ]; then
echo "Failed to fetch NextDNS status or no internet connection."
else
nextdns_status=$(echo "$nextdns_info" | jq -r '.status')
if [ "$nextdns_status" = "unconfigured" ]; then
echo "You are not using NextDNS."
nextdns_connected=false
nextdns_protocol=""
nextdns_client=""
else
nextdns_connected=true
nextdns_protocol=$(echo "$nextdns_info" | jq -r '.protocol')
nextdns_client=$(echo "$nextdns_info" | jq -r '.clientName')
echo "Connected to NextDNS via $nextdns_protocol. Client: $nextdns_client."
fi
fi
# read uptime_seconds _ < /proc/uptime
# uptime_days=$(echo "$uptime_seconds / 86400" | bc -l)
# uptime_rounded=$(printf "%.2f" $uptime_days)
cat <<EOF
{
"local_ip": "$local_ip",
"wan_connected": $wan_connected,
"wan_ip": "$wan_ip",
"has_tailscale": $has_tailscale,
"tailscale_ip": "$tailscale_ip",
"mullvad_exitnode": $mullvad_exitnode,
"mullvad_hostname": "$mullvad_hostname",
"mullvad_exit_ip": $mullvad_exit_ip,
"blacklisted": $blacklisted,
"nextdns_connected": $nextdns_connected,
"nextdns_protocol": "$nextdns_protocol",
"nextdns_client": "$nextdns_client",
"uptime": ""
}

115
vpn
View file

@ -1,115 +0,0 @@
#!/usr/bin/env python3
import subprocess
import requests
import argparse
import json
import random
PRIVACY_FRIENDLY_COUNTRIES = ['Sweden', 'Switzerland', 'Germany', 'Finland', 'Netherlands', 'Norway']
def get_current_exit_node():
result = subprocess.run(['tailscale', 'status', '--json'], capture_output=True, text=True)
if result.returncode != 0:
raise Exception("Failed to get Tailscale status")
status = json.loads(result.stdout)
current_exit_node = status.get('Peer', {}).get('Tailnet', {}).get('ExitNode', {}).get('Name')
return current_exit_node
def set_exit_node():
# Get the suggested exit node
result = subprocess.run(['tailscale', 'exit-node', 'suggest'], capture_output=True, text=True)
exit_node = ''
for line in result.stdout.splitlines():
if 'Suggested exit node' in line:
exit_node = line.split(': ')[1].strip()
break
print(f"Suggested exit node: {exit_node}")
# Set the exit node
subprocess.run(['tailscale', 'set', f'--exit-node={exit_node}'], check=True)
# Verify the exit node
response = requests.get('https://am.i.mullvad.net/json')
exit_node_info = response.json()
exit_node_hostname = exit_node_info.get('mullvad_exit_ip_hostname')
print(f"Current exit node hostname: {exit_node_hostname}")
# Get the part before the first '.' in the exit_node
exit_node_short = exit_node.split('.')[0]
# Verify that the exit_node_short and exit_node_hostname are equal
if exit_node_short == exit_node_hostname:
print("Exit node set successfully!")
else:
print("Failed to set exit node!")
def unset_exit_node():
# Unset the exit node
subprocess.run(['tailscale', 'set', '--exit-node='], check=True)
print("Exit node unset successfully!")
def start_exit_node():
current_exit_node = get_current_exit_node()
if current_exit_node:
print(f"Already connected to exit node: {current_exit_node}")
else:
set_exit_node()
def get_random_privacy_friendly_exit_node():
result = subprocess.run(['tailscale', 'exit-node', 'list'], capture_output=True, text=True)
if result.returncode != 0:
raise Exception("Failed to list Tailscale exit nodes")
exit_nodes = []
for line in result.stdout.splitlines():
parts = line.split()
if len(parts) > 3 and parts[2] in PRIVACY_FRIENDLY_COUNTRIES:
exit_nodes.append(parts[1])
if not exit_nodes:
raise Exception("No privacy-friendly exit nodes available")
return random.choice(exit_nodes)
def set_random_privacy_friendly_exit_node():
exit_node = get_random_privacy_friendly_exit_node()
print(f"Selected random privacy-friendly exit node: {exit_node}")
# Set the exit node
subprocess.run(['tailscale', 'set', f'--exit-node={exit_node}'], check=True)
# Verify the exit node
response = requests.get('https://am.i.mullvad.net/json')
exit_node_info = response.json()
exit_node_hostname = exit_node_info.get('mullvad_exit_ip_hostname')
print(f"Current exit node hostname: {exit_node_hostname}")
# Get the part before the first '.' in the exit_node
exit_node_short = exit_node.split('.')[0]
# Verify that the exit_node_short and exit_node_hostname are equal
if exit_node_short == exit_node_hostname:
print("Exit node set successfully!")
else:
print("Failed to set exit node!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Manage VPN exit nodes.')
parser.add_argument('action', choices=['start', 'stop', 'new', 'shh'], help='Action to perform: start, stop, new, or shh')
args = parser.parse_args()
if args.action == 'start':
start_exit_node()
elif args.action == 'stop':
unset_exit_node()
elif args.action == 'new':
set_exit_node()
elif args.action == 'shh':
set_random_privacy_friendly_exit_node()

3
z
View file

@ -1,3 +0,0 @@
source ~/.zprofile
source ~/.zshrc
source ~/.zshenv