Compare commits
10 commits
ee4f74b78c
...
fbbb92fe7a
Author | SHA1 | Date | |
---|---|---|---|
|
fbbb92fe7a | ||
|
d6477298fa | ||
|
cbf3310f13 | ||
|
e7fd105d47 | ||
|
07ccb97361 | ||
|
5f4134e6a1 | ||
|
1a6c288d55 | ||
|
dba2def7ad | ||
|
f017305517 | ||
|
1f426e0956 |
22 changed files with 689 additions and 0 deletions
BIN
.DS_Store
vendored
BIN
.DS_Store
vendored
Binary file not shown.
2
2fa
Executable file
2
2fa
Executable file
|
@ -0,0 +1,2 @@
|
|||
source ~/.zshrc
|
||||
python ~/.2fa.py
|
29
all
Executable file
29
all
Executable file
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Create a new tmux session
|
||||
tmux new-session -d -s servers -n 'ssh'
|
||||
|
||||
# Split the first window horizontally
|
||||
tmux split-window -h
|
||||
|
||||
# Split the first and second panes vertically
|
||||
tmux select-pane -t 0
|
||||
tmux split-window -v
|
||||
|
||||
tmux select-pane -t 2
|
||||
tmux split-window -v
|
||||
|
||||
# Function to connect to servers with retry
|
||||
connect_to_server_with_retry() {
|
||||
tmux send-keys -t "$1" "while true; do ssh $2; sleep 30; done" C-m
|
||||
}
|
||||
|
||||
# Connect to servers with retry
|
||||
connect_to_server_with_retry 0 "100.64.64.11"
|
||||
connect_to_server_with_retry 1 "100.64.64.30"
|
||||
connect_to_server_with_retry 2 "100.64.64.15"
|
||||
connect_to_server_with_retry 3 "root@10.13.37.10"
|
||||
|
||||
# Attach to the tmux session
|
||||
tmux attach -t servers
|
||||
|
0
cf
Normal file → Executable file
0
cf
Normal file → Executable file
55
checknode
Executable file
55
checknode
Executable file
|
@ -0,0 +1,55 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "Checking for remnants of Node.js, npm, and nvm..."
|
||||
|
||||
# Check PATH
|
||||
echo "Checking PATH..."
|
||||
echo $PATH | grep -q 'node\|npm' && echo "Found Node.js or npm in PATH"
|
||||
|
||||
# Check Homebrew
|
||||
echo "Checking Homebrew..."
|
||||
brew list | grep -q 'node\|npm' && echo "Found Node.js or npm installed with Homebrew"
|
||||
|
||||
# Check Yarn
|
||||
echo "Checking Yarn..."
|
||||
command -v yarn >/dev/null 2>&1 && echo "Found Yarn"
|
||||
|
||||
# Check Node.js and npm directories
|
||||
echo "Checking Node.js and npm directories..."
|
||||
ls ~/.npm >/dev/null 2>&1 && echo "Found ~/.npm directory"
|
||||
ls ~/.node-gyp >/dev/null 2>&1 && echo "Found ~/.node-gyp directory"
|
||||
|
||||
# Check open files and sockets
|
||||
echo "Checking open files and sockets..."
|
||||
lsof | grep -q 'node' && echo "Found open files or sockets related to Node.js"
|
||||
|
||||
# Check other version managers
|
||||
echo "Checking other version managers..."
|
||||
command -v n >/dev/null 2>&1 && echo "Found 'n' version manager"
|
||||
|
||||
# Check temporary directories
|
||||
echo "Checking temporary directories..."
|
||||
ls /tmp | grep -q 'node\|npm' && echo "Found Node.js or npm related files in /tmp"
|
||||
|
||||
# Check Browserify and Webpack cache
|
||||
echo "Checking Browserify and Webpack cache..."
|
||||
ls ~/.config/browserify >/dev/null 2>&1 && echo "Found Browserify cache"
|
||||
ls ~/.config/webpack >/dev/null 2>&1 && echo "Found Webpack cache"
|
||||
|
||||
# Check Electron cache
|
||||
echo "Checking Electron cache..."
|
||||
ls ~/.electron >/dev/null 2>&1 && echo "Found Electron cache"
|
||||
|
||||
# Check logs
|
||||
echo "Checking logs..."
|
||||
ls ~/.npm/_logs >/dev/null 2>&1 && echo "Found npm logs"
|
||||
ls ~/.node-gyp/*.log >/dev/null 2>&1 && echo "Found Node.js logs"
|
||||
|
||||
# Check miscellaneous directories
|
||||
echo "Checking miscellaneous directories..."
|
||||
ls ~/.node_repl_history >/dev/null 2>&1 && echo "Found ~/.node_repl_history"
|
||||
ls ~/.v8flags* >/dev/null 2>&1 && echo "Found ~/.v8flags*"
|
||||
ls ~/.npm-global >/dev/null 2>&1 && echo "Found ~/.npm-global"
|
||||
ls ~/.nvm-global >/dev/null 2>&1 && echo "Found ~/.nvm-global"
|
||||
|
||||
echo "Check completed."
|
26
comfy
Executable file
26
comfy
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Create a new tmux session named "comfy" detached (-d) and start the first command in the left pane
|
||||
tmux new-session -d -s comfy -n comfypane
|
||||
|
||||
# Split the window into two panes. By default, this creates a vertical split.
|
||||
tmux split-window -h -t comfy
|
||||
|
||||
# Select the first pane to setup comfy environment
|
||||
tmux select-pane -t 0
|
||||
COMFY_MAMBA=$(mamba env list | grep "^comfy" | awk '{print $2}')
|
||||
tmux send-keys -t 0 "cd ~/workshop/sd/ComfyUI" C-m
|
||||
tmux send-keys -t 0 "export PATH=\"$COMFY_MAMBA/bin:\$PATH\"" C-m
|
||||
tmux send-keys -t 0 "source ~/.zshrc" C-m
|
||||
tmux send-keys -t 0 "mamba activate comfy; sleep 1; while true; do PYTORCH_MPS_HIGH_WATERMARK_RATIO=0.0 PYTORCH_ENABLE_MPS_FALLBACK=1 python main.py --preview-method auto --force-fp16 --enable-cors-header; exit_status=\$?; if [ \$exit_status -ne 0 ]; then osascript -e 'display notification \"ComfyUI script exited unexpectedly\" with title \"Error in ComfyUI\"'; fi; sleep 1; done" C-m
|
||||
|
||||
# Select the second pane to setup extracomfy environment
|
||||
tmux select-pane -t 1
|
||||
IG_MAMBA=$(mamba env list | grep "^insta" | awk '{print $2}')
|
||||
tmux send-keys -t 1 "export PATH=\"$IG_MAMBA/bin:\$PATH\"" C-m
|
||||
tmux send-keys -t 1 "source ~/.zshrc" C-m
|
||||
tmux send-keys -t 1 "mamba activate instabot; cd workshop/igbot" C-m
|
||||
|
||||
# Attach to the tmux session
|
||||
# tmux attach -t comfy
|
||||
|
0
ddns
Normal file → Executable file
0
ddns
Normal file → Executable file
14
emoji-flag
Executable file
14
emoji-flag
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
|
||||
def flag_emoji(country_code):
|
||||
offset = 127397
|
||||
flag = ''.join(chr(ord(char) + offset) for char in country_code.upper())
|
||||
return flag
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1:
|
||||
country_code = sys.argv[1]
|
||||
print(flag_emoji(country_code))
|
||||
else:
|
||||
print("No country code provided")
|
51
get
Executable file
51
get
Executable file
|
@ -0,0 +1,51 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Check if a URL is provided
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Please provide a git repository URL."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract the repository URL and name
|
||||
repo_url=$1
|
||||
repo_name=$(basename "$repo_url" .git)
|
||||
|
||||
# Clone the repository
|
||||
git clone "$repo_url"
|
||||
|
||||
# Check if the clone was successful
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to clone the repository."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Change to the newly created directory
|
||||
cd "$repo_name" || exit
|
||||
|
||||
# Check for setup.py or requirements.txt
|
||||
if [ -f "setup.py" ] || [ -f "requirements.txt" ]; then
|
||||
# Create a new Mamba environment
|
||||
mamba create -n "$repo_name" python -y
|
||||
|
||||
# Activate the new environment
|
||||
eval "$(conda shell.bash hook)"
|
||||
mamba activate "$repo_name"
|
||||
|
||||
# Install dependencies
|
||||
if [ -f "setup.py" ]; then
|
||||
echo "Installing from setup.py..."
|
||||
python setup.py install
|
||||
fi
|
||||
|
||||
if [ -f "requirements.txt" ]; then
|
||||
echo "Installing from requirements.txt..."
|
||||
pip install -r requirements.txt
|
||||
fi
|
||||
|
||||
echo "Environment setup complete."
|
||||
else
|
||||
echo "No setup.py or requirements.txt found. Skipping environment setup."
|
||||
fi
|
||||
|
||||
echo "Repository cloned and set up successfully."
|
||||
|
37
getreq
Executable file
37
getreq
Executable file
|
@ -0,0 +1,37 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Script to find Python imports and install them if necessary
|
||||
|
||||
# Check for input argument
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <path-to-python-file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PYTHON_FILE="$1"
|
||||
|
||||
# Extract import statements
|
||||
IMPORTS=$(grep -E "^import |^from " "$PYTHON_FILE" | \
|
||||
awk '{print $2}' | cut -d. -f1 | sort | uniq)
|
||||
|
||||
# Function to check and install packages
|
||||
check_and_install() {
|
||||
PACKAGE=$1
|
||||
# Check if the package is installed via pip
|
||||
if pip list | grep -q "^$PACKAGE "; then
|
||||
echo "$PACKAGE is already installed via pip."
|
||||
# Check if the package is installed via conda
|
||||
elif conda list | grep -q "^$PACKAGE "; then
|
||||
echo "$PACKAGE is already installed via conda."
|
||||
else
|
||||
# Install the package using kip
|
||||
echo "Installing $PACKAGE using kip..."
|
||||
kip install "$PACKAGE"
|
||||
fi
|
||||
}
|
||||
|
||||
# Iterate over imports and check/install them
|
||||
for pkg in $IMPORTS; do
|
||||
check_and_install $pkg
|
||||
done
|
||||
|
144
import-finder
Executable file
144
import-finder
Executable file
|
@ -0,0 +1,144 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import time
|
||||
import pkg_resources
|
||||
|
||||
# List of Python built-in modules
|
||||
BUILTIN_MODULES = {
|
||||
'abc', 'aifc', 'argparse', 'array', 'ast', 'asynchat', 'asyncio', 'asyncore', 'atexit',
|
||||
'audioop', 'base64', 'bdb', 'binascii', 'binhex', 'bisect', 'builtins', 'bz2', 'calendar',
|
||||
'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections', 'colorsys',
|
||||
'compileall', 'concurrent', 'configparser', 'contextlib', 'copy', 'copyreg', 'crypt', 'csv',
|
||||
'ctypes', 'curses', 'dataclasses', 'datetime', 'dbm', 'decimal', 'difflib', 'dis', 'distutils',
|
||||
'doctest', 'dummy_threading', 'email', 'encodings', 'ensurepip', 'enum', 'errno', 'faulthandler',
|
||||
'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fractions', 'ftplib', 'functools',
|
||||
'gc', 'getopt', 'getpass', 'gettext', 'glob', 'gzip', 'hashlib', 'heapq', 'hmac', 'html', 'http',
|
||||
'imaplib', 'imghdr', 'imp', 'importlib', 'inspect', 'io', 'ipaddress', 'itertools', 'json',
|
||||
'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'lzma', 'mailbox', 'mailcap', 'marshal',
|
||||
'math', 'mimetypes', 'modulefinder', 'multiprocessing', 'netrc', 'nntplib', 'numbers', 'operator',
|
||||
'optparse', 'os', 'ossaudiodev', 'parser', 'pathlib', 'pdb', 'pickle', 'pickletools', 'pipes',
|
||||
'pkgutil', 'platform', 'plistlib', 'poplib', 'posix', 'pprint', 'profile', 'pstats', 'pty',
|
||||
'pwd', 'py_compile', 'pyclbr', 'pydoc', 'queue', 'quopri', 'random', 're', 'readline',
|
||||
'reprlib', 'resource', 'rlcompleter', 'runpy', 'sched', 'secrets', 'select', 'selectors', 'shelve',
|
||||
'shlex', 'shutil', 'signal', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'socketserver',
|
||||
'spwd', 'sqlite3', 'ssl', 'stat', 'statistics', 'string', 'stringprep', 'struct', 'subprocess',
|
||||
'sunau', 'symtable', 'sys', 'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile',
|
||||
'termios', 'test', 'textwrap', 'threading', 'time', 'timeit', 'token', 'tokenize', 'trace',
|
||||
'traceback', 'tracemalloc', 'tty', 'turtle', 'types', 'typing', 'unicodedata', 'unittest',
|
||||
'urllib', 'uu', 'uuid', 'venv', 'warnings', 'wave', 'weakref', 'webbrowser', 'xdrlib', 'xml',
|
||||
'xmlrpc', 'zipapp', 'zipfile', 'zipimport', 'zlib'
|
||||
}
|
||||
|
||||
# Known corrections for PyPI package names
|
||||
KNOWN_CORRECTIONS = {
|
||||
'dateutil': 'python-dateutil',
|
||||
'dotenv': 'python-dotenv',
|
||||
'docx': 'python-docx',
|
||||
'tesseract': 'pytesseract',
|
||||
'magic': 'python-magic',
|
||||
'multipart': 'python-multipart',
|
||||
'newspaper': 'newspaper3k',
|
||||
'srtm': 'elevation',
|
||||
'yaml': 'pyyaml',
|
||||
'zoneinfo': 'backports.zoneinfo'
|
||||
}
|
||||
|
||||
# List of generic names to exclude
|
||||
EXCLUDED_NAMES = {'models', 'data', 'convert', 'example', 'tests'}
|
||||
|
||||
def find_imports(root_dir):
|
||||
imports_by_file = {}
|
||||
for dirpath, _, filenames in os.walk(root_dir):
|
||||
for filename in filenames:
|
||||
if filename.endswith('.py'):
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
with open(filepath, 'r') as file:
|
||||
import_lines = []
|
||||
for line in file:
|
||||
line = line.strip()
|
||||
if line.startswith(('import ', 'from ')) and not line.startswith('#'):
|
||||
import_lines.append(line)
|
||||
imports_by_file[filepath] = import_lines
|
||||
return imports_by_file
|
||||
|
||||
def process_import_lines(import_lines):
|
||||
processed_lines = set() # Use a set to remove duplicates
|
||||
for line in import_lines:
|
||||
# Handle 'import xyz' and 'import abc, def, geh'
|
||||
if line.startswith('import '):
|
||||
modules = line.replace('import ', '').split(',')
|
||||
for mod in modules:
|
||||
mod = re.sub(r'\s+as\s+\w+', '', mod).split('.')[0].strip()
|
||||
if mod and not mod.isupper() and mod not in EXCLUDED_NAMES:
|
||||
processed_lines.add(mod)
|
||||
# Handle 'from abc import def, geh'
|
||||
elif line.startswith('from '):
|
||||
mod = line.split(' ')[1].split('.')[0].strip()
|
||||
if mod and not mod.isupper() and mod not in EXCLUDED_NAMES:
|
||||
processed_lines.add(mod)
|
||||
return processed_lines
|
||||
|
||||
def check_pypi_availability(libraries):
|
||||
available = set()
|
||||
unavailable = set()
|
||||
for lib in libraries:
|
||||
if lib in BUILTIN_MODULES: # Skip built-in modules
|
||||
continue
|
||||
corrected_lib = KNOWN_CORRECTIONS.get(lib, lib)
|
||||
try:
|
||||
if check_library_on_pypi(corrected_lib):
|
||||
available.add(corrected_lib)
|
||||
else:
|
||||
unavailable.add(corrected_lib)
|
||||
except requests.exceptions.RequestException:
|
||||
print(f"Warning: Unable to check {corrected_lib} on PyPI due to network error.")
|
||||
unavailable.add(corrected_lib)
|
||||
return available, unavailable
|
||||
|
||||
def check_library_on_pypi(library):
|
||||
max_retries = 3
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
response = requests.get(f"https://pypi.org/pypi/{library}/json", timeout=5)
|
||||
return response.status_code == 200
|
||||
except requests.exceptions.RequestException:
|
||||
if attempt < max_retries - 1:
|
||||
time.sleep(1) # Wait for 1 second before retrying
|
||||
else:
|
||||
raise
|
||||
|
||||
def save_to_requirements_file(available, output_file='requirements.txt'):
|
||||
existing_requirements = set()
|
||||
if os.path.isfile(output_file):
|
||||
with open(output_file, 'r') as file:
|
||||
existing_requirements = set(line.strip() for line in file)
|
||||
with open(output_file, 'a') as file:
|
||||
for pkg in sorted(available - existing_requirements):
|
||||
print(f"Adding to requirements.txt: {pkg}")
|
||||
file.write(pkg + '\n')
|
||||
|
||||
def save_to_missing_file(unavailable, output_file='missing-packages.txt'):
|
||||
existing_missing = set()
|
||||
if os.path.isfile(output_file):
|
||||
with open(output_file, 'r') as file:
|
||||
existing_missing = set(line.strip() for line in file)
|
||||
with open(output_file, 'a') as file:
|
||||
for pkg in sorted(unavailable - existing_missing):
|
||||
print(f"Adding to missing-packages.txt: {pkg}")
|
||||
file.write(pkg + '\n')
|
||||
|
||||
if __name__ == "__main__":
|
||||
root_dir = os.getcwd() # Get the current working directory
|
||||
|
||||
imports_by_file = find_imports(root_dir)
|
||||
for filepath, import_lines in imports_by_file.items():
|
||||
print(f"# Processing {filepath}")
|
||||
processed_lines = process_import_lines(import_lines)
|
||||
available, unavailable = check_pypi_availability(processed_lines)
|
||||
save_to_requirements_file(available)
|
||||
save_to_missing_file(unavailable)
|
||||
|
||||
print(f"Processed import statements have been saved to requirements.txt and missing-packages.txt")
|
29
ison
Executable file
29
ison
Executable file
|
@ -0,0 +1,29 @@
|
|||
#!/Users/sij/miniforge3/envs/sijapi/bin/python
|
||||
|
||||
import requests
|
||||
|
||||
def check_health(url):
|
||||
try:
|
||||
response = requests.get(url)
|
||||
if response.status_code == 200:
|
||||
return f"{url} is up"
|
||||
else:
|
||||
return f"{url} returned status code {response.status_code}"
|
||||
except requests.exceptions.RequestException:
|
||||
return f"{url} is down"
|
||||
|
||||
def main():
|
||||
addresses = [
|
||||
"http://localhost:4444/health",
|
||||
"http://100.64.64.20:4444/health",
|
||||
"http://100.64.64.30:4444/health",
|
||||
"http://100.64.64.11:4444/health",
|
||||
"http://100.64.64.15:4444/health"
|
||||
]
|
||||
|
||||
for address in addresses:
|
||||
print(check_health(address))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
19
lsd
Executable file
19
lsd
Executable file
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Default options for lsd
|
||||
default_options="--color=always -F --long --size=short --permission=octal --group-dirs=first -X"
|
||||
|
||||
# Check if the first argument is a directory or an option
|
||||
if [[ $# -gt 0 && ! $1 =~ ^- ]]; then
|
||||
# First argument is a directory, store it and remove from arguments list
|
||||
directory=$1
|
||||
shift
|
||||
else
|
||||
# No directory specified, default to the current directory
|
||||
directory="."
|
||||
fi
|
||||
|
||||
# Execute lsd with the default options, directory, and any additional arguments provided
|
||||
/opt/homebrew/bin/lsd $default_options "$directory" "$@"
|
||||
|
||||
|
15
mamba-exporter
Executable file
15
mamba-exporter
Executable file
|
@ -0,0 +1,15 @@
|
|||
#!/bin/bash
|
||||
|
||||
# List all conda environments and cut the output to get just the names
|
||||
envs=$(mamba env list | awk '{print $1}' | grep -v '^#' | grep -v 'base')
|
||||
|
||||
# Loop through each environment name
|
||||
for env in $envs; do
|
||||
# Use conda (or mamba, but conda is preferred for compatibility reasons) to export the environment to a YAML file
|
||||
# No need to activate the environment; conda can export directly by specifying the name
|
||||
echo "Exporting $env..."
|
||||
mamba env export --name $env > "${env}.yml"
|
||||
done
|
||||
|
||||
echo "All environments have been exported."
|
||||
|
26
mamba-importer
Executable file
26
mamba-importer
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Function to process a single .yml file
|
||||
process_file() {
|
||||
file="$1"
|
||||
if [[ -f "$file" ]]; then
|
||||
env_name=$(echo "$file" | sed 's/.yml$//')
|
||||
echo "Creating environment from $file..."
|
||||
conda env create -f "$file" || echo "Failed to create environment from $file"
|
||||
else
|
||||
echo "File $file does not exist."
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if a .yml file was provided as an argument
|
||||
if [[ $# -eq 1 && $1 == *.yml ]]; then
|
||||
# Process the provided .yml file
|
||||
process_file "$1"
|
||||
else
|
||||
# No argument provided, process all .yml files in the current directory
|
||||
for file in *.yml; do
|
||||
process_file "$file"
|
||||
done
|
||||
echo "Environment creation process completed."
|
||||
fi
|
||||
|
24
murder
Executable file
24
murder
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Check if an argument is given
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: murder [process name or port]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the input parameter
|
||||
ARGUMENT=$1
|
||||
|
||||
# Check if the argument is numeric
|
||||
if [[ $ARGUMENT =~ ^[0-9]+$ ]]; then
|
||||
echo "Killing processes listening on port $ARGUMENT"
|
||||
lsof -t -i:$ARGUMENT | xargs kill
|
||||
else
|
||||
# Process name was given instead of a port number
|
||||
echo "Killing processes with name $ARGUMENT"
|
||||
for PID in $(ps aux | grep $ARGUMENT | grep -v grep | awk '{print $2}'); do
|
||||
echo "Killing process $PID"
|
||||
sudo kill -9 $PID
|
||||
done
|
||||
fi
|
||||
|
8
o
Executable file
8
o
Executable file
|
@ -0,0 +1,8 @@
|
|||
#! /bin/bash
|
||||
|
||||
if [[ -z $(pidof ollama) ]]; then
|
||||
ollama serve &>/dev/null &
|
||||
disown
|
||||
fi
|
||||
|
||||
/usr/bin/env python3 /Users/sij/AI/osh/osh.py $@
|
39
summarize
Executable file
39
summarize
Executable file
|
@ -0,0 +1,39 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Check if a filename has been provided
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <filename>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
filename="$1"
|
||||
|
||||
# Check if the file exists
|
||||
if [ ! -f "$filename" ]; then
|
||||
echo "Error: File does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Assuming GLOBAL_API_KEY is exported in your environment
|
||||
if [ -z "$GLOBAL_API_KEY" ]; then
|
||||
echo "Error: GLOBAL_API_KEY is not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Endpoint
|
||||
endpoint="https://api.sij.ai/speaksummary"
|
||||
|
||||
# Make the request
|
||||
curl -X POST "$endpoint" \
|
||||
-H "Authorization: Bearer $GLOBAL_API_KEY" \
|
||||
-H "Content-Type: multipart/form-data" \
|
||||
-F "file=@$filename" \
|
||||
-o "response.wav"
|
||||
|
||||
# Check if the output was saved successfully
|
||||
if [ -f "response.wav" ]; then
|
||||
echo "The summary has been processed and saved as 'response.wav'"
|
||||
else
|
||||
echo "Failed to save the summary."
|
||||
fi
|
||||
|
37
txt-line-merge-abc
Executable file
37
txt-line-merge-abc
Executable file
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
def merge_files(file_paths):
|
||||
if not file_paths:
|
||||
print("At least one file path is required.")
|
||||
return
|
||||
|
||||
# Read all lines from all files, including the first one
|
||||
all_lines = set()
|
||||
for file_path in file_paths:
|
||||
with open(file_path, 'r') as f:
|
||||
all_lines.update(f.read().splitlines())
|
||||
|
||||
# Sort the unique lines
|
||||
# sorted_lines = sorted(all_lines)
|
||||
sorted_lines = sorted(all_lines, key=str.lower)
|
||||
|
||||
|
||||
# Write the sorted, unique lines to the first file, overwriting its contents
|
||||
with open(file_paths[0], 'w') as f:
|
||||
for line in sorted_lines:
|
||||
f.write(line + '\n')
|
||||
|
||||
print(f"Merged {len(file_paths)} files into {file_paths[0]}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Get file paths from command line arguments
|
||||
file_paths = sys.argv[1:]
|
||||
|
||||
if not file_paths:
|
||||
print("Usage: txt-line-merge-abc file1.txt file2.txt file3.txt ...")
|
||||
else:
|
||||
merge_files(file_paths)
|
||||
|
||||
|
16
txtsort
Executable file
16
txtsort
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Checking if the user provided a file name
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "Usage: $0 filename"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Checking if the given file is readable
|
||||
if ! [ -r "$1" ]; then
|
||||
echo "The file '$1' is not readable or does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sort $1
|
||||
|
115
vpn
Executable file
115
vpn
Executable file
|
@ -0,0 +1,115 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess
|
||||
import requests
|
||||
import argparse
|
||||
import json
|
||||
import random
|
||||
|
||||
PRIVACY_FRIENDLY_COUNTRIES = ['Sweden', 'Switzerland', 'Germany', 'Finland', 'Netherlands', 'Norway']
|
||||
|
||||
def get_current_exit_node():
|
||||
result = subprocess.run(['tailscale', 'status', '--json'], capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
raise Exception("Failed to get Tailscale status")
|
||||
|
||||
status = json.loads(result.stdout)
|
||||
current_exit_node = status.get('Peer', {}).get('Tailnet', {}).get('ExitNode', {}).get('Name')
|
||||
return current_exit_node
|
||||
|
||||
def set_exit_node():
|
||||
# Get the suggested exit node
|
||||
result = subprocess.run(['tailscale', 'exit-node', 'suggest'], capture_output=True, text=True)
|
||||
exit_node = ''
|
||||
for line in result.stdout.splitlines():
|
||||
if 'Suggested exit node' in line:
|
||||
exit_node = line.split(': ')[1].strip()
|
||||
break
|
||||
|
||||
print(f"Suggested exit node: {exit_node}")
|
||||
|
||||
# Set the exit node
|
||||
subprocess.run(['tailscale', 'set', f'--exit-node={exit_node}'], check=True)
|
||||
|
||||
# Verify the exit node
|
||||
response = requests.get('https://am.i.mullvad.net/json')
|
||||
exit_node_info = response.json()
|
||||
exit_node_hostname = exit_node_info.get('mullvad_exit_ip_hostname')
|
||||
|
||||
print(f"Current exit node hostname: {exit_node_hostname}")
|
||||
|
||||
# Get the part before the first '.' in the exit_node
|
||||
exit_node_short = exit_node.split('.')[0]
|
||||
|
||||
# Verify that the exit_node_short and exit_node_hostname are equal
|
||||
if exit_node_short == exit_node_hostname:
|
||||
print("Exit node set successfully!")
|
||||
else:
|
||||
print("Failed to set exit node!")
|
||||
|
||||
def unset_exit_node():
|
||||
# Unset the exit node
|
||||
subprocess.run(['tailscale', 'set', '--exit-node='], check=True)
|
||||
print("Exit node unset successfully!")
|
||||
|
||||
def start_exit_node():
|
||||
current_exit_node = get_current_exit_node()
|
||||
if current_exit_node:
|
||||
print(f"Already connected to exit node: {current_exit_node}")
|
||||
else:
|
||||
set_exit_node()
|
||||
|
||||
def get_random_privacy_friendly_exit_node():
|
||||
result = subprocess.run(['tailscale', 'exit-node', 'list'], capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
raise Exception("Failed to list Tailscale exit nodes")
|
||||
|
||||
exit_nodes = []
|
||||
for line in result.stdout.splitlines():
|
||||
parts = line.split()
|
||||
if len(parts) > 3 and parts[2] in PRIVACY_FRIENDLY_COUNTRIES:
|
||||
exit_nodes.append(parts[1])
|
||||
|
||||
if not exit_nodes:
|
||||
raise Exception("No privacy-friendly exit nodes available")
|
||||
|
||||
return random.choice(exit_nodes)
|
||||
|
||||
def set_random_privacy_friendly_exit_node():
|
||||
exit_node = get_random_privacy_friendly_exit_node()
|
||||
print(f"Selected random privacy-friendly exit node: {exit_node}")
|
||||
|
||||
# Set the exit node
|
||||
subprocess.run(['tailscale', 'set', f'--exit-node={exit_node}'], check=True)
|
||||
|
||||
# Verify the exit node
|
||||
response = requests.get('https://am.i.mullvad.net/json')
|
||||
exit_node_info = response.json()
|
||||
exit_node_hostname = exit_node_info.get('mullvad_exit_ip_hostname')
|
||||
|
||||
print(f"Current exit node hostname: {exit_node_hostname}")
|
||||
|
||||
# Get the part before the first '.' in the exit_node
|
||||
exit_node_short = exit_node.split('.')[0]
|
||||
|
||||
# Verify that the exit_node_short and exit_node_hostname are equal
|
||||
if exit_node_short == exit_node_hostname:
|
||||
print("Exit node set successfully!")
|
||||
else:
|
||||
print("Failed to set exit node!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Manage VPN exit nodes.')
|
||||
parser.add_argument('action', choices=['start', 'stop', 'new', 'shh'], help='Action to perform: start, stop, new, or shh')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == 'start':
|
||||
start_exit_node()
|
||||
elif args.action == 'stop':
|
||||
unset_exit_node()
|
||||
elif args.action == 'new':
|
||||
set_exit_node()
|
||||
elif args.action == 'shh':
|
||||
set_random_privacy_friendly_exit_node()
|
||||
|
3
z
Executable file
3
z
Executable file
|
@ -0,0 +1,3 @@
|
|||
source ~/.zprofile
|
||||
source ~/.zshrc
|
||||
source ~/.zshenv
|
Loading…
Reference in a new issue