From fb4ee4fc692c39cc03381759b56607d6baa46bc4 Mon Sep 17 00:00:00 2001 From: sanj <67624670+iodrift@users.noreply.github.com> Date: Tue, 30 Jul 2024 19:30:24 -0700 Subject: [PATCH] Auto-update: Tue Jul 30 19:30:24 PDT 2024 --- 2fa | 2 + all | 29 ++++++++++ checknode | 55 +++++++++++++++++++ comfy | 26 +++++++++ emoji-flag | 14 +++++ getreq | 37 +++++++++++++ import-finder | 144 +++++++++++++++++++++++++++++++++++++++++++++++++ ison | 29 ++++++++++ lsd | 19 +++++++ mamba-exporter | 15 ++++++ mamba-importer | 26 +++++++++ murder | 24 +++++++++ o | 8 +++ summarize | 39 ++++++++++++++ txtsort | 16 ++++++ vpn | 126 ++++++++++++++++++++++++++++++++++++------- z | 3 ++ 17 files changed, 592 insertions(+), 20 deletions(-) create mode 100755 2fa create mode 100755 all create mode 100755 checknode create mode 100755 comfy create mode 100755 emoji-flag create mode 100755 getreq create mode 100755 import-finder create mode 100755 ison create mode 100755 lsd create mode 100755 mamba-exporter create mode 100755 mamba-importer create mode 100755 murder create mode 100755 o create mode 100755 summarize create mode 100755 txtsort create mode 100755 z diff --git a/2fa b/2fa new file mode 100755 index 0000000..22db023 --- /dev/null +++ b/2fa @@ -0,0 +1,2 @@ +source ~/.zshrc +python ~/.2fa.py diff --git a/all b/all new file mode 100755 index 0000000..0a84c7d --- /dev/null +++ b/all @@ -0,0 +1,29 @@ +#!/bin/bash + +# Create a new tmux session +tmux new-session -d -s servers -n 'ssh' + +# Split the first window horizontally +tmux split-window -h + +# Split the first and second panes vertically +tmux select-pane -t 0 +tmux split-window -v + +tmux select-pane -t 2 +tmux split-window -v + +# Function to connect to servers with retry +connect_to_server_with_retry() { + tmux send-keys -t "$1" "while true; do ssh $2; sleep 30; done" C-m +} + +# Connect to servers with retry +connect_to_server_with_retry 0 "100.64.64.11" +connect_to_server_with_retry 1 "100.64.64.30" +connect_to_server_with_retry 2 "100.64.64.15" +connect_to_server_with_retry 3 "root@10.13.37.10" + +# Attach to the tmux session +tmux attach -t servers + diff --git a/checknode b/checknode new file mode 100755 index 0000000..327a6e6 --- /dev/null +++ b/checknode @@ -0,0 +1,55 @@ +#!/bin/bash + +echo "Checking for remnants of Node.js, npm, and nvm..." + +# Check PATH +echo "Checking PATH..." +echo $PATH | grep -q 'node\|npm' && echo "Found Node.js or npm in PATH" + +# Check Homebrew +echo "Checking Homebrew..." +brew list | grep -q 'node\|npm' && echo "Found Node.js or npm installed with Homebrew" + +# Check Yarn +echo "Checking Yarn..." +command -v yarn >/dev/null 2>&1 && echo "Found Yarn" + +# Check Node.js and npm directories +echo "Checking Node.js and npm directories..." +ls ~/.npm >/dev/null 2>&1 && echo "Found ~/.npm directory" +ls ~/.node-gyp >/dev/null 2>&1 && echo "Found ~/.node-gyp directory" + +# Check open files and sockets +echo "Checking open files and sockets..." +lsof | grep -q 'node' && echo "Found open files or sockets related to Node.js" + +# Check other version managers +echo "Checking other version managers..." +command -v n >/dev/null 2>&1 && echo "Found 'n' version manager" + +# Check temporary directories +echo "Checking temporary directories..." +ls /tmp | grep -q 'node\|npm' && echo "Found Node.js or npm related files in /tmp" + +# Check Browserify and Webpack cache +echo "Checking Browserify and Webpack cache..." +ls ~/.config/browserify >/dev/null 2>&1 && echo "Found Browserify cache" +ls ~/.config/webpack >/dev/null 2>&1 && echo "Found Webpack cache" + +# Check Electron cache +echo "Checking Electron cache..." +ls ~/.electron >/dev/null 2>&1 && echo "Found Electron cache" + +# Check logs +echo "Checking logs..." +ls ~/.npm/_logs >/dev/null 2>&1 && echo "Found npm logs" +ls ~/.node-gyp/*.log >/dev/null 2>&1 && echo "Found Node.js logs" + +# Check miscellaneous directories +echo "Checking miscellaneous directories..." +ls ~/.node_repl_history >/dev/null 2>&1 && echo "Found ~/.node_repl_history" +ls ~/.v8flags* >/dev/null 2>&1 && echo "Found ~/.v8flags*" +ls ~/.npm-global >/dev/null 2>&1 && echo "Found ~/.npm-global" +ls ~/.nvm-global >/dev/null 2>&1 && echo "Found ~/.nvm-global" + +echo "Check completed." diff --git a/comfy b/comfy new file mode 100755 index 0000000..3093b61 --- /dev/null +++ b/comfy @@ -0,0 +1,26 @@ +#!/bin/bash + +# Create a new tmux session named "comfy" detached (-d) and start the first command in the left pane +tmux new-session -d -s comfy -n comfypane + +# Split the window into two panes. By default, this creates a vertical split. +tmux split-window -h -t comfy + +# Select the first pane to setup comfy environment +tmux select-pane -t 0 +COMFY_MAMBA=$(mamba env list | grep "^comfy" | awk '{print $2}') +tmux send-keys -t 0 "cd ~/workshop/sd/ComfyUI" C-m +tmux send-keys -t 0 "export PATH=\"$COMFY_MAMBA/bin:\$PATH\"" C-m +tmux send-keys -t 0 "source ~/.zshrc" C-m +tmux send-keys -t 0 "mamba activate comfy; sleep 1; while true; do PYTORCH_MPS_HIGH_WATERMARK_RATIO=0.0 PYTORCH_ENABLE_MPS_FALLBACK=1 python main.py --preview-method auto --force-fp16 --enable-cors-header; exit_status=\$?; if [ \$exit_status -ne 0 ]; then osascript -e 'display notification \"ComfyUI script exited unexpectedly\" with title \"Error in ComfyUI\"'; fi; sleep 1; done" C-m + +# Select the second pane to setup extracomfy environment +tmux select-pane -t 1 +IG_MAMBA=$(mamba env list | grep "^insta" | awk '{print $2}') +tmux send-keys -t 1 "export PATH=\"$IG_MAMBA/bin:\$PATH\"" C-m +tmux send-keys -t 1 "source ~/.zshrc" C-m +tmux send-keys -t 1 "mamba activate instabot; cd workshop/igbot" C-m + +# Attach to the tmux session +# tmux attach -t comfy + diff --git a/emoji-flag b/emoji-flag new file mode 100755 index 0000000..eba028f --- /dev/null +++ b/emoji-flag @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 +import sys + +def flag_emoji(country_code): + offset = 127397 + flag = ''.join(chr(ord(char) + offset) for char in country_code.upper()) + return flag + +if __name__ == "__main__": + if len(sys.argv) > 1: + country_code = sys.argv[1] + print(flag_emoji(country_code)) + else: + print("No country code provided") \ No newline at end of file diff --git a/getreq b/getreq new file mode 100755 index 0000000..3ee156d --- /dev/null +++ b/getreq @@ -0,0 +1,37 @@ +#!/bin/bash + +# Script to find Python imports and install them if necessary + +# Check for input argument +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +PYTHON_FILE="$1" + +# Extract import statements +IMPORTS=$(grep -E "^import |^from " "$PYTHON_FILE" | \ + awk '{print $2}' | cut -d. -f1 | sort | uniq) + +# Function to check and install packages +check_and_install() { + PACKAGE=$1 + # Check if the package is installed via pip + if pip list | grep -q "^$PACKAGE "; then + echo "$PACKAGE is already installed via pip." + # Check if the package is installed via conda + elif conda list | grep -q "^$PACKAGE "; then + echo "$PACKAGE is already installed via conda." + else + # Install the package using kip + echo "Installing $PACKAGE using kip..." + kip install "$PACKAGE" + fi +} + +# Iterate over imports and check/install them +for pkg in $IMPORTS; do + check_and_install $pkg +done + diff --git a/import-finder b/import-finder new file mode 100755 index 0000000..41cbf6b --- /dev/null +++ b/import-finder @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 + +import os +import re +import requests +import time +import pkg_resources + +# List of Python built-in modules +BUILTIN_MODULES = { + 'abc', 'aifc', 'argparse', 'array', 'ast', 'asynchat', 'asyncio', 'asyncore', 'atexit', + 'audioop', 'base64', 'bdb', 'binascii', 'binhex', 'bisect', 'builtins', 'bz2', 'calendar', + 'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections', 'colorsys', + 'compileall', 'concurrent', 'configparser', 'contextlib', 'copy', 'copyreg', 'crypt', 'csv', + 'ctypes', 'curses', 'dataclasses', 'datetime', 'dbm', 'decimal', 'difflib', 'dis', 'distutils', + 'doctest', 'dummy_threading', 'email', 'encodings', 'ensurepip', 'enum', 'errno', 'faulthandler', + 'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fractions', 'ftplib', 'functools', + 'gc', 'getopt', 'getpass', 'gettext', 'glob', 'gzip', 'hashlib', 'heapq', 'hmac', 'html', 'http', + 'imaplib', 'imghdr', 'imp', 'importlib', 'inspect', 'io', 'ipaddress', 'itertools', 'json', + 'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'lzma', 'mailbox', 'mailcap', 'marshal', + 'math', 'mimetypes', 'modulefinder', 'multiprocessing', 'netrc', 'nntplib', 'numbers', 'operator', + 'optparse', 'os', 'ossaudiodev', 'parser', 'pathlib', 'pdb', 'pickle', 'pickletools', 'pipes', + 'pkgutil', 'platform', 'plistlib', 'poplib', 'posix', 'pprint', 'profile', 'pstats', 'pty', + 'pwd', 'py_compile', 'pyclbr', 'pydoc', 'queue', 'quopri', 'random', 're', 'readline', + 'reprlib', 'resource', 'rlcompleter', 'runpy', 'sched', 'secrets', 'select', 'selectors', 'shelve', + 'shlex', 'shutil', 'signal', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'socketserver', + 'spwd', 'sqlite3', 'ssl', 'stat', 'statistics', 'string', 'stringprep', 'struct', 'subprocess', + 'sunau', 'symtable', 'sys', 'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile', + 'termios', 'test', 'textwrap', 'threading', 'time', 'timeit', 'token', 'tokenize', 'trace', + 'traceback', 'tracemalloc', 'tty', 'turtle', 'types', 'typing', 'unicodedata', 'unittest', + 'urllib', 'uu', 'uuid', 'venv', 'warnings', 'wave', 'weakref', 'webbrowser', 'xdrlib', 'xml', + 'xmlrpc', 'zipapp', 'zipfile', 'zipimport', 'zlib' +} + +# Known corrections for PyPI package names +KNOWN_CORRECTIONS = { + 'dateutil': 'python-dateutil', + 'dotenv': 'python-dotenv', + 'docx': 'python-docx', + 'tesseract': 'pytesseract', + 'magic': 'python-magic', + 'multipart': 'python-multipart', + 'newspaper': 'newspaper3k', + 'srtm': 'elevation', + 'yaml': 'pyyaml', + 'zoneinfo': 'backports.zoneinfo' +} + +# List of generic names to exclude +EXCLUDED_NAMES = {'models', 'data', 'convert', 'example', 'tests'} + +def find_imports(root_dir): + imports_by_file = {} + for dirpath, _, filenames in os.walk(root_dir): + for filename in filenames: + if filename.endswith('.py'): + filepath = os.path.join(dirpath, filename) + with open(filepath, 'r') as file: + import_lines = [] + for line in file: + line = line.strip() + if line.startswith(('import ', 'from ')) and not line.startswith('#'): + import_lines.append(line) + imports_by_file[filepath] = import_lines + return imports_by_file + +def process_import_lines(import_lines): + processed_lines = set() # Use a set to remove duplicates + for line in import_lines: + # Handle 'import xyz' and 'import abc, def, geh' + if line.startswith('import '): + modules = line.replace('import ', '').split(',') + for mod in modules: + mod = re.sub(r'\s+as\s+\w+', '', mod).split('.')[0].strip() + if mod and not mod.isupper() and mod not in EXCLUDED_NAMES: + processed_lines.add(mod) + # Handle 'from abc import def, geh' + elif line.startswith('from '): + mod = line.split(' ')[1].split('.')[0].strip() + if mod and not mod.isupper() and mod not in EXCLUDED_NAMES: + processed_lines.add(mod) + return processed_lines + +def check_pypi_availability(libraries): + available = set() + unavailable = set() + for lib in libraries: + if lib in BUILTIN_MODULES: # Skip built-in modules + continue + corrected_lib = KNOWN_CORRECTIONS.get(lib, lib) + try: + if check_library_on_pypi(corrected_lib): + available.add(corrected_lib) + else: + unavailable.add(corrected_lib) + except requests.exceptions.RequestException: + print(f"Warning: Unable to check {corrected_lib} on PyPI due to network error.") + unavailable.add(corrected_lib) + return available, unavailable + +def check_library_on_pypi(library): + max_retries = 3 + for attempt in range(max_retries): + try: + response = requests.get(f"https://pypi.org/pypi/{library}/json", timeout=5) + return response.status_code == 200 + except requests.exceptions.RequestException: + if attempt < max_retries - 1: + time.sleep(1) # Wait for 1 second before retrying + else: + raise + +def save_to_requirements_file(available, output_file='requirements.txt'): + existing_requirements = set() + if os.path.isfile(output_file): + with open(output_file, 'r') as file: + existing_requirements = set(line.strip() for line in file) + with open(output_file, 'a') as file: + for pkg in sorted(available - existing_requirements): + print(f"Adding to requirements.txt: {pkg}") + file.write(pkg + '\n') + +def save_to_missing_file(unavailable, output_file='missing-packages.txt'): + existing_missing = set() + if os.path.isfile(output_file): + with open(output_file, 'r') as file: + existing_missing = set(line.strip() for line in file) + with open(output_file, 'a') as file: + for pkg in sorted(unavailable - existing_missing): + print(f"Adding to missing-packages.txt: {pkg}") + file.write(pkg + '\n') + +if __name__ == "__main__": + root_dir = os.getcwd() # Get the current working directory + + imports_by_file = find_imports(root_dir) + for filepath, import_lines in imports_by_file.items(): + print(f"# Processing {filepath}") + processed_lines = process_import_lines(import_lines) + available, unavailable = check_pypi_availability(processed_lines) + save_to_requirements_file(available) + save_to_missing_file(unavailable) + + print(f"Processed import statements have been saved to requirements.txt and missing-packages.txt") diff --git a/ison b/ison new file mode 100755 index 0000000..323a7f9 --- /dev/null +++ b/ison @@ -0,0 +1,29 @@ +#!/Users/sij/miniforge3/envs/sijapi/bin/python + +import requests + +def check_health(url): + try: + response = requests.get(url) + if response.status_code == 200: + return f"{url} is up" + else: + return f"{url} returned status code {response.status_code}" + except requests.exceptions.RequestException: + return f"{url} is down" + +def main(): + addresses = [ + "http://localhost:4444/health", + "http://100.64.64.20:4444/health", + "http://100.64.64.30:4444/health", + "http://100.64.64.11:4444/health", + "http://100.64.64.15:4444/health" + ] + + for address in addresses: + print(check_health(address)) + +if __name__ == "__main__": + main() + diff --git a/lsd b/lsd new file mode 100755 index 0000000..963f539 --- /dev/null +++ b/lsd @@ -0,0 +1,19 @@ +#!/bin/bash + +# Default options for lsd +default_options="--color=always -F --long --size=short --permission=octal --group-dirs=first -X" + +# Check if the first argument is a directory or an option +if [[ $# -gt 0 && ! $1 =~ ^- ]]; then + # First argument is a directory, store it and remove from arguments list + directory=$1 + shift +else + # No directory specified, default to the current directory + directory="." +fi + +# Execute lsd with the default options, directory, and any additional arguments provided +/opt/homebrew/bin/lsd $default_options "$directory" "$@" + + diff --git a/mamba-exporter b/mamba-exporter new file mode 100755 index 0000000..176e713 --- /dev/null +++ b/mamba-exporter @@ -0,0 +1,15 @@ +#!/bin/bash + +# List all conda environments and cut the output to get just the names +envs=$(mamba env list | awk '{print $1}' | grep -v '^#' | grep -v 'base') + +# Loop through each environment name +for env in $envs; do + # Use conda (or mamba, but conda is preferred for compatibility reasons) to export the environment to a YAML file + # No need to activate the environment; conda can export directly by specifying the name + echo "Exporting $env..." + mamba env export --name $env > "${env}.yml" +done + +echo "All environments have been exported." + diff --git a/mamba-importer b/mamba-importer new file mode 100755 index 0000000..79886e1 --- /dev/null +++ b/mamba-importer @@ -0,0 +1,26 @@ +#!/bin/bash + +# Function to process a single .yml file +process_file() { + file="$1" + if [[ -f "$file" ]]; then + env_name=$(echo "$file" | sed 's/.yml$//') + echo "Creating environment from $file..." + conda env create -f "$file" || echo "Failed to create environment from $file" + else + echo "File $file does not exist." + fi +} + +# Check if a .yml file was provided as an argument +if [[ $# -eq 1 && $1 == *.yml ]]; then + # Process the provided .yml file + process_file "$1" +else + # No argument provided, process all .yml files in the current directory + for file in *.yml; do + process_file "$file" + done + echo "Environment creation process completed." +fi + diff --git a/murder b/murder new file mode 100755 index 0000000..3c76f1b --- /dev/null +++ b/murder @@ -0,0 +1,24 @@ +#!/bin/bash + +# Check if an argument is given +if [ $# -eq 0 ]; then + echo "Usage: murder [process name or port]" + exit 1 +fi + +# Get the input parameter +ARGUMENT=$1 + +# Check if the argument is numeric +if [[ $ARGUMENT =~ ^[0-9]+$ ]]; then + echo "Killing processes listening on port $ARGUMENT" + lsof -t -i:$ARGUMENT | xargs kill +else + # Process name was given instead of a port number + echo "Killing processes with name $ARGUMENT" + for PID in $(ps aux | grep $ARGUMENT | grep -v grep | awk '{print $2}'); do + echo "Killing process $PID" + sudo kill -9 $PID + done +fi + diff --git a/o b/o new file mode 100755 index 0000000..d763e15 --- /dev/null +++ b/o @@ -0,0 +1,8 @@ +#! /bin/bash + +if [[ -z $(pidof ollama) ]]; then + ollama serve &>/dev/null & + disown +fi + +/usr/bin/env python3 /Users/sij/AI/osh/osh.py $@ diff --git a/summarize b/summarize new file mode 100755 index 0000000..483de30 --- /dev/null +++ b/summarize @@ -0,0 +1,39 @@ +#!/bin/bash + +# Check if a filename has been provided +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +filename="$1" + +# Check if the file exists +if [ ! -f "$filename" ]; then + echo "Error: File does not exist." + exit 1 +fi + +# Assuming GLOBAL_API_KEY is exported in your environment +if [ -z "$GLOBAL_API_KEY" ]; then + echo "Error: GLOBAL_API_KEY is not set." + exit 1 +fi + +# Endpoint +endpoint="https://api.sij.ai/speaksummary" + +# Make the request +curl -X POST "$endpoint" \ + -H "Authorization: Bearer $GLOBAL_API_KEY" \ + -H "Content-Type: multipart/form-data" \ + -F "file=@$filename" \ + -o "response.wav" + +# Check if the output was saved successfully +if [ -f "response.wav" ]; then + echo "The summary has been processed and saved as 'response.wav'" +else + echo "Failed to save the summary." +fi + diff --git a/txtsort b/txtsort new file mode 100755 index 0000000..a2235aa --- /dev/null +++ b/txtsort @@ -0,0 +1,16 @@ +#!/bin/bash + +# Checking if the user provided a file name +if [ $# -ne 1 ]; then + echo "Usage: $0 filename" + exit 1 +fi + +# Checking if the given file is readable +if ! [ -r "$1" ]; then + echo "The file '$1' is not readable or does not exist." + exit 1 +fi + +sort $1 + diff --git a/vpn b/vpn index a31721e..8748795 100755 --- a/vpn +++ b/vpn @@ -1,29 +1,115 @@ -#!/bin/bash +#!/usr/bin/env python3 -# Get the suggested exit node -exit_node=$(tailscale exit-node suggest | awk -F': ' '/Suggested exit node/ {print substr($2, 1, length($2)-1)}') +import subprocess +import requests +import argparse +import json +import random -# Print the exit node -echo "Suggested exit node: $exit_node" +PRIVACY_FRIENDLY_COUNTRIES = ['Sweden', 'Switzerland', 'Germany', 'Finland', 'Netherlands', 'Norway'] -# Set the exit node -tailscale set --exit-node=$exit_node +def get_current_exit_node(): + result = subprocess.run(['tailscale', 'status', '--json'], capture_output=True, text=True) + if result.returncode != 0: + raise Exception("Failed to get Tailscale status") + + status = json.loads(result.stdout) + current_exit_node = status.get('Peer', {}).get('Tailnet', {}).get('ExitNode', {}).get('Name') + return current_exit_node -# Verify the exit node -exit_node_info=$(curl -s https://am.i.mullvad.net/json) +def set_exit_node(): + # Get the suggested exit node + result = subprocess.run(['tailscale', 'exit-node', 'suggest'], capture_output=True, text=True) + exit_node = '' + for line in result.stdout.splitlines(): + if 'Suggested exit node' in line: + exit_node = line.split(': ')[1].strip() + break -# Parse the JSON response to get the hostname -exit_node_hostname=$(echo $exit_node_info | jq -r '.mullvad_exit_ip_hostname') + print(f"Suggested exit node: {exit_node}") -echo "Current exit node hostname: $exit_node_hostname" + # Set the exit node + subprocess.run(['tailscale', 'set', f'--exit-node={exit_node}'], check=True) -# Get the part before the first '.' in the exit_node -exit_node_short=$(echo $exit_node | cut -d'.' -f1) + # Verify the exit node + response = requests.get('https://am.i.mullvad.net/json') + exit_node_info = response.json() + exit_node_hostname = exit_node_info.get('mullvad_exit_ip_hostname') -# Verify that the exit_node_short and exit_node_hostname are equal -if [ "$exit_node_short" == "$exit_node_hostname" ]; then - echo "Exit node set successfully!" -else - echo "Failed to set exit node!" -fi + print(f"Current exit node hostname: {exit_node_hostname}") + + # Get the part before the first '.' in the exit_node + exit_node_short = exit_node.split('.')[0] + + # Verify that the exit_node_short and exit_node_hostname are equal + if exit_node_short == exit_node_hostname: + print("Exit node set successfully!") + else: + print("Failed to set exit node!") + +def unset_exit_node(): + # Unset the exit node + subprocess.run(['tailscale', 'set', '--exit-node='], check=True) + print("Exit node unset successfully!") + +def start_exit_node(): + current_exit_node = get_current_exit_node() + if current_exit_node: + print(f"Already connected to exit node: {current_exit_node}") + else: + set_exit_node() + +def get_random_privacy_friendly_exit_node(): + result = subprocess.run(['tailscale', 'exit-node', 'list'], capture_output=True, text=True) + if result.returncode != 0: + raise Exception("Failed to list Tailscale exit nodes") + + exit_nodes = [] + for line in result.stdout.splitlines(): + parts = line.split() + if len(parts) > 3 and parts[2] in PRIVACY_FRIENDLY_COUNTRIES: + exit_nodes.append(parts[1]) + + if not exit_nodes: + raise Exception("No privacy-friendly exit nodes available") + + return random.choice(exit_nodes) + +def set_random_privacy_friendly_exit_node(): + exit_node = get_random_privacy_friendly_exit_node() + print(f"Selected random privacy-friendly exit node: {exit_node}") + + # Set the exit node + subprocess.run(['tailscale', 'set', f'--exit-node={exit_node}'], check=True) + + # Verify the exit node + response = requests.get('https://am.i.mullvad.net/json') + exit_node_info = response.json() + exit_node_hostname = exit_node_info.get('mullvad_exit_ip_hostname') + + print(f"Current exit node hostname: {exit_node_hostname}") + + # Get the part before the first '.' in the exit_node + exit_node_short = exit_node.split('.')[0] + + # Verify that the exit_node_short and exit_node_hostname are equal + if exit_node_short == exit_node_hostname: + print("Exit node set successfully!") + else: + print("Failed to set exit node!") + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Manage VPN exit nodes.') + parser.add_argument('action', choices=['start', 'stop', 'new', 'shh'], help='Action to perform: start, stop, new, or shh') + + args = parser.parse_args() + + if args.action == 'start': + start_exit_node() + elif args.action == 'stop': + unset_exit_node() + elif args.action == 'new': + set_exit_node() + elif args.action == 'shh': + set_random_privacy_friendly_exit_node() diff --git a/z b/z new file mode 100755 index 0000000..83030b7 --- /dev/null +++ b/z @@ -0,0 +1,3 @@ +source ~/.zprofile +source ~/.zshrc +source ~/.zshenv