Pulled my scripts back into my personal repo.

This commit is contained in:
James Downie 2025-10-06 12:34:49 +10:00
parent dcad97a517
commit ade71394e8
9 changed files with 0 additions and 562 deletions

View File

View File

@ -1,71 +0,0 @@
#!/usr/bin/env bash
T="`mktemp`"
if [ "$1" = "-r" ]; then
find . -type f -regex ".*\/[0-9][0-9]\-.*\.\(yaml\|url\)$" > "$T"
find . -type d -regex ".*\/[0-9][0-9]\-ConfigMap\-.*$" >> "$T"
else
find . -maxdepth 1 -type f -regex ".*\/[0-9][0-9]\-.*\.\(yaml\|url\)$" > "$T"
find . -maxdepth 1 -type d -regex ".*\/[0-9][0-9]\-ConfigMap\-.*$" >> "$T"
fi
for f in $( sort "$T" ); do
if [ -d "$f" ]; then
if [[ "`basename $f`" =~ ^[0-9][0-9]-ConfigMap-.*$ ]]; then
NAME=""
NAMESPACE=""
if [ ! -f "$f/config.json" ]; then
echo "Warning: Missing $f/vars"
else
CFG="$f/config.json"
NAME="`jq -r .name "$CFG"`"
NAMESPACE="`jq -r .namespace "$CFG"`"
fi
FOUT="`mktemp`"
cat <<EOF > $FOUT
apiVersion: v1
kind: ConfigMap
metadata:
name: $NAME
namespace: $NAMESPACE
data:
EOF
FILES="`mktemp`"
find "$f" -type f > "$FILES"
for FILE in $(cat "$FILES"); do
BASENAME="`basename "$FILE"`"
if [ "$BASENAME" != "config.json" ]; then
jq -e ".base64[] | select(. == \"$BASENAME\")" "$CFG" > /dev/null
if [ $? -eq 0 ]; then
echo " $BASENAME.base64: |" >> "$FOUT"
base64 "$FILE" | tr -d '\n' | sed "s/^/ /" >> "$FOUT"
echo "" >> "$FOUT"
else
echo " $BASENAME: |" >> "$FOUT"
sed "s/^/ /" "$FILE" >> "$FOUT"
fi
fi
done
rm "$FILES"
kubectl apply -f "$FOUT"
rm "$FOUT"
else
echo "Dunno: $f"
fi
else
D="`dirname "$f"`"
if [ -f "$D/env" ]; then
source "$D/env"
fi
EXT="`echo "$f" | tr '.' '\n' | tail -n 1`"
if [ "$EXT" = "url" ]; then
url=$(head -n 1 "$f" | tr -d '\n')
kubectl apply -f "$url"
else
envsubst < "$f" | kubectl apply -f -
if [ $? -ne 0 ]; then
echo "🔴 $f"
fi
fi
fi
done
rm "$T"

View File

@ -1,46 +0,0 @@
#!/usr/bin/env bash
T="`mktemp`"
if [ "$1" = "-r" ]; then
find . -type f -regex ".*\/[0-9][0-9]\-.*\.\(yaml\|url\)$" > "$T"
find . -type d -regex ".*\/[0-9][0-9]\-ConfigMap\-.*$" >> "$T"
else
find . -maxdepth 1 -type f -regex ".*\/[0-9][0-9]\-.*\.\(yaml\|url\)$" > "$T"
find . -maxdepth 1 -type d -regex ".*\/[0-9][0-9]\-ConfigMap\-.*$" >> "$T"
fi
for f in $( sort -r "$T" ); do
if [ -d "$f" ]; then
if [[ "`basename $f`" =~ ^[0-9][0-9]-ConfigMap-.*$ ]]; then
NAME=""
NAMESPACE=""
if [ ! -f "$f/config.json" ]; then
echo "Warning: Missing $f/vars"
else
CFG="$f/config.json"
NAME="`jq -r .name "$CFG"`"
NAMESPACE="`jq -r .namespace "$CFG"`"
fi
FOUT="`mktemp`"
cat <<EOF > $FOUT
apiVersion: v1
kind: ConfigMap
metadata:
name: $NAME
namespace: $NAMESPACE
EOF
kubectl delete -f "$FOUT"
rm "$FOUT"
fi
else
EXT="`echo "$f" | tr '.' '\n' | tail -n 1`"
if [ "$EXT" = "url" ]; then
url=$(head -n 1 "$f" | tr -d '\n')
echo "$url"
kubectl delete -f "$url"
else
kubectl delete -f "$f"
fi
fi
done
rm "$T"

View File

@ -1,7 +0,0 @@
#!/bin/bash
NS="$1"
DEPL="$2"
CMD="$3"
kubectl -n "$NS" exec "deployment/$DEPL" -ti -- "$CMD"

View File

@ -1,10 +0,0 @@
#!/bin/bash
NS="$1"
DEPL="$2"
if [ "$3" = "-f" ]; then
kubectl logs -n "$NS" -f "deployment/$DEPL"
else
kubectl logs -n "$NS" "deployment/$DEPL"
fi

View File

@ -1,45 +0,0 @@
#!/bin/bash
function ks {
NAMESPACES="`mktemp`"
kubectl get namespaces --output=json \
| jq -r .items[].metadata.name \
| grep -v "^kube-system$" \
| grep -v "^kube-public$" \
| grep -v "^kube-node-lease$" \
> "$NAMESPACES"
for NAMESPACE in $(cat "$NAMESPACES"); do
DEPLOYMENTS="`mktemp`"
DEPLOYMENT_PODS="`mktemp`"
kubectl -n "$NAMESPACE" get deployments --output=json \
| jq -r .items[].metadata.name \
> "$DEPLOYMENTS"
for DEPLOYMENT in $(cat "$DEPLOYMENTS"); do
PODS="`mktemp`"
kubectl -n "$NAMESPACE" --selector "app=$DEPLOYMENT" get pods --output=json \
| jq -r .items[].metadata.name \
> "$PODS"
cat "$PODS" >> "$DEPLOYMENT_PODS"
for POD in $(cat "$PODS"); do
STATUS="`kubectl -n "$NAMESPACE" --selector="app=$DEPLOYMENT" get pods --output=json | jq -r ".items[] | select(.metadata.name==\\\"$POD\\\") | .status.phase"`"
echo -e "$NAMESPACE\t$DEPLOYMENT\t$POD\t$STATUS"
done
rm "$PODS"
done
PODS="`mktemp`"
kubectl -n "$NAMESPACE" get pods --output=json \
| jq -r .items[].metadata.name \
| grep -v -f "$DEPLOYMENT_PODS" \
> "$PODS"
for POD in $(cat "$PODS"); do
STATUS="`kubectl -n "$NAMESPACE" get pods --output=json | jq -r ".items[] | select(.metadata.name==\\\"$POD\\\") | .status.phase"`"
echo -e "$NAMESPACE\t_\t$POD\t$STATUS"
done
rm "$PODS"
rm "$DEPLOYMENT_PODS"
rm "$DEPLOYMENTS"
done
rm "$NAMESPACES"
}
ks | column -t | sort

View File

@ -1,34 +0,0 @@
# How I use Git
I host a few `git` repositories on my own personal `gitea` instance. The ones I use the most are...
- `bin`, for my scripts (including my `.bash_profile` script
- `cfg`, for my configuration files
- `Notes`, or my Obsidian notes
- `Orchestration`, for my container and virtual machine solutions
There are also a few projects that I build myself...
- `neovim`
- `multibg-wayland`
Anyway, there's lots more repositories and not all of them are required on all of my machines. While i'm working on a machine i'll make changes in oneor many repositories, so before I shut that machine down i need to `git add`, `git commit` and `git push`.
# How This Script Helps
In my `.bash_profile` script i set the environment variable `REPO_CFG` to point to my `repo.yaml` file (which is in my `cfg` repository by the way). I also add this `jdownie/repo` folder to my `PATH`. There is an example of a `repo.yaml` in this folder. For each repository, there's a code, a url and a local path to clone the repository into. There's also a list of hostnames that the repository is wanted on (which can be an empty list if the repository is wanted on all hosts; like `bin` and `cfg` for example).
With all of that in place, i can run the following commands across all of my repositories...
- `repo status`, list each repo with a count of "dirty" files against each
- `repo lc`, stage and commit all files in each repo with a generic comment
- `repo fetch`, `repo pull` and `repo push`
- `repo sync`, pulls and pushes all repositories
This script makes it easy for me to run `repo lc` and `repo sync` before i shut a machine down. On my next machine I can run `repo sync` to get my changes on the new machine.
If I want to remove a repo from a host. I can remove the hostname from that repositorie's `hosts` list. Then i run `repo prune` which does a `lc`, a `sync` and then removes the cloned folder.
Alternatively, i might move a repository. I change the url in `repo.yaml`, and then run `repo align` to update that repository's remote url.

View File

@ -1,336 +0,0 @@
#!/usr/bin/env python
import socket
import yaml, sys, subprocess, os
import concurrent.futures
import re
import shutil
import json
import time
from pathlib import Path
import os
def get_latest_git_mtime(repos: dict) -> float:
max_mtime = 0.0
for k, info in repos.items():
repo_path = os.path.expanduser(info.get("path"))
if not repo_path:
continue
git_dir = os.path.join(repo_path, ".git")
try:
mtime = os.path.getmtime(git_dir)
max_mtime = max(max_mtime, mtime)
except (FileNotFoundError, NotADirectoryError):
continue
return max_mtime
def is_repo_status_outdated(repos):
latest_mtime = get_latest_git_mtime(repos)
last_mtime = 0
if os.path.exists(filepath("status")) and os.path.exists(filepath("mtime")):
with open(filepath("mtime"), "rt") as fin:
content = fin.read().strip()
last_mtime = float(content)
return latest_mtime > last_mtime
def to_keycap(ch: str) -> str:
# this looked crap on my remote sessions for some reason...
# if ch in '0123456789#*':
# return ch + '\uFE0F\u20E3'
# else:
# raise ValueError(f"Unsupported keycap character: {ch!r}")
return ch
def indicators(props, ind = "tl"):
tl = "\U0001F7E2"
if "ab" in props.keys() and ( props["ab"]["push"] + props["ab"]["pull"] > 0 ):
tl = "\U0001F7E1"
if props["n"] > 0:
tl = "\U0001F534"
unstaged = "" # "\U0001F4DD"
up_keycap = "" # "\U0001F53C" # "\u2B06\uFE0F"
down_keycap = "" # "\U0001F53D" # "\u2B07\uFE0F"
status_digit = "##" if props["n"] > 99 else str(props["n"] % 100)
status_indicator = f"{to_keycap(status_digit)}{unstaged}"
push_digit = "?"
push_indicator = f" {up_keycap}"
pull_digit = "?"
pull_indicator = f" {down_keycap}"
if "ab" in props.keys():
push_digit = "##" if props["ab"]["push"] > 99 else f"{props['ab']['push'] % 100:2d}"
push_indicator = f"{to_keycap(push_digit)}{up_keycap}"
pull_digit = "##" if props["ab"]["pull"] > 99 else f"{props['ab']['pull'] % 100:2d}"
pull_indicator = f"{to_keycap(pull_digit)}{down_keycap}"
ret = "?"
if ind == "tl":
ret = f"{tl}"
if ind == "flags":
ret = f"{status_indicator} {push_indicator} {pull_indicator}"
return ret
def rm(p):
if os.path.exists(p):
os.remove(p)
def filepath(f):
base = os.environ.get("XDG_CACHE_HOME")
base = "/tmp" if base == None else base
ret = os.path.join(base, f"repo.{f}")
return ret
def is_file_recent(filepath, minutes=10):
if not os.path.isfile(filepath):
return False
mtime = os.path.getmtime(filepath)
return (time.time() - mtime) < (minutes * 60)
def hostname():
ret = socket.gethostname().split('.', 1)[0]
return ret
def parse_yaml(file_path):
with open(file_path, 'r') as file:
try:
data = yaml.safe_load(file)
return data
except e:
print(f"Error parsing YAML file: {e}")
return None
def execute_command(command, cwd = None, dump_error = True):
command = f"/usr/bin/env bash -c '{command}'"
try:
if cwd != None:
result = subprocess.run(command, cwd=cwd, shell=True, check=True, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
result = subprocess.run(command, shell=True, check=True, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if result.returncode == 0:
return result.stdout.strip()
else:
print(f"Error executing command: {result.stderr.strip()}")
print(command)
return None
except Exception as e:
if dump_error:
print(f"Error executing command: {command}")
print(e)
return None
def isFossil(cfg):
ret = False
if "vcs" in cfg.keys():
ret = ( cfg["vcs"] == "fossil" )
return ret
def perform_action(action, key, item, silent = False):
output = None
hn = hostname()
lbl = "{0}ing".format(action).title()
if os.path.exists(os.path.expanduser(item["path"])):
if action in list([ "pull", "push", "fetch" ]):
push = True
if "push" in item.keys():
push = item["push"]
if push or action in list([ "pull", "fetch" ]):
if isFossil(item):
cwd = Path(os.path.expanduser(item["path"]))
cmd = f"fossil {action}"
if not silent:
print("{0} {1}...".format(lbl, key))
output = execute_command(cmd, cwd=cwd)
else:
cmd = "git -C \"{0}\" {1}".format(os.path.expanduser(item["path"]), action)
if not silent:
print("{0} {1}...".format(lbl, key))
output = execute_command(cmd)
rm(filepath("status"))
elif action == "sync":
if not silent:
print("{0} {1}...".format(lbl, key))
perform_action("pull", key, item, silent=True)
perform_action("push", key, item, silent=True)
rm(filepath("status"))
elif action == "lcs":
if not silent:
print("{0} {1}...".format(lbl, key))
perform_action("pull", key, item, silent=True)
perform_action("lc", key, item, silent=True)
perform_action("pull", key, item, silent=True)
perform_action("push", key, item, silent=True)
rm(filepath("status"))
elif action == "lc":
if isFossil(item):
cwd = Path(os.path.expanduser(item["path"]))
cmd = f"fossil addremove"
output = execute_command(cmd, cwd=cwd)
cmd = f"fossil commit -m \"Lazy commit on {hn}\""
output = execute_command(cmd, cwd=cwd)
else:
cmd = "git -C \"{0}\" status --porcelain".format(os.path.expanduser(item["path"]))
output = execute_command(cmd).split("\n")
if len(output[0]) > 0:
print("Lazy committing {0}...".format(key))
cmd = "git -C \"{0}\" add .".format(os.path.expanduser(item["path"]))
output = execute_command(cmd)
cmd = "git -C \"{0}\" commit -m \"Lazy commit on {1}.\"".format(os.path.expanduser(item["path"]), hn)
output = execute_command(cmd)
rm(filepath("status"))
return output
if __name__ == "__main__":
yaml_file_path = os.getenv("REPO_CFG")
if yaml_file_path == None or not os.path.exists(yaml_file_path):
print(f"Environment variable REPO_CFG needs to point to your repo.yaml file.", file=sys.stderr)
sys.exit(1)
try:
cfg = parse_yaml(yaml_file_path)
except:
print(f"Unable to parse {yaml_file_path}.", file=sys.stderr)
sys.exit(2)
r = list(cfg.keys())
config_file = filepath("config")
if not is_file_recent(config_file, minutes = 60 * 60):
# Let's quickly sweep through and expand any tildes ("~") in the path references...
for k in r:
path = os.path.expanduser(cfg[k]["path"])
cmd = f"git config --global --add safe.directory {path}"
# I am not outputting any errors here because of a dumb bug in WSL.
output = execute_command(cmd, dump_error=False)
Path(config_file).touch()
if len(sys.argv) == 3:
if not sys.argv[2] in cfg.keys():
print("{0} is not one of your repositories.".format(sys.argv[2]))
exit(1)
r = list([ sys.argv[2] ])
if sys.argv[1] == "list":
for k in cfg.keys():
print(k)
elif sys.argv[1] == "status":
dat = dict()
cache_file = filepath("status")
if is_repo_status_outdated(cfg):
for k in r:
path = os.path.expanduser( cfg[k]["path"] )
if os.path.exists( path ):
n = 0
props = dict()
props["key"] = k
if isFossil(cfg[k]):
cwd = os.path.expanduser(cfg[k]["path"])
cmd = "fossil json status"
output = json.loads(execute_command(cmd, cwd=cwd))
n = len(output["payload"]["files"])
cmd = "fossil extras"
lines = execute_command(cmd, cwd=cwd).split("\n")
for line in lines:
if len(line.strip()) > 0:
n = n + 1
else:
cmd = "git -C \"{0}\" status --porcelain=2 --branch".format( path )
lines = execute_command(cmd).split("\n")
for line in lines:
m = re.match(r"# branch\.(\S+)\s+(.+)$", line)
if m:
prop = m.group(1)
rest = m.group(2)
if prop == "ab":
m2 = re.match(r"[+](\d+)\s[-](\d+)", rest)
p2 = rest
if m2:
p2 = dict()
p2["push"] = int(m2.group(1))
p2["pull"] = int(m2.group(2))
props[prop] = p2
props[prop] = p2
elif prop == "oid":
props["commit"] = rest
else:
props[prop] = rest
else:
n = n + 1
props["n"] = n
dat[k] = props
with open(cache_file, "wt") as fout:
json.dump(dat, fout, indent = 2)
with open(filepath("mtime"), "wt") as fout:
fout.write(str(get_latest_git_mtime(cfg)))
else:
with open(cache_file, "rt") as fin:
dat = json.load(fin)
ml = 0
for k in dat:
ml = len(k) if len(k) > ml else ml
for k in dat:
tl = indicators(dat[k], "tl")
flags = indicators(dat[k], "flags")
print(f" {tl} {k.ljust(ml + 1)} {flags}")
# print(json.dumps(dat, indent = 2))
elif sys.argv[1] in list( [ "sync", "lc", "lcs", "pull", "push", "fetch" ] ):
thread_count = 30
with concurrent.futures.ThreadPoolExecutor(max_workers=thread_count) as executor:
futures = {executor.submit(perform_action, sys.argv[1], k, cfg[k]) for k in r}
for future in concurrent.futures.as_completed(futures):
future.result()
try:
future.result() # To get the result of sync if it returns something
except Exception as exc:
print(f"{exc}")
# for k in r:
# perform_action(sys.argv[1], k, cfg[k])
elif sys.argv[1] == "clone":
hn = hostname()
for k in r:
hosttest = True
if "hosts" in cfg[k].keys():
hosttest = hn in cfg[k]["hosts"]
if hosttest and not os.path.exists(os.path.expanduser(cfg[k]["path"])):
print("Cloning {0} into {1}...".format(k, os.path.expanduser(cfg[k]["path"])))
if isFossil(cfg[k]):
cwd = Path(os.path.expanduser(cfg[k]["path"])).parent
cmd = "fossil clone --save-http-password \"{0}\"".format(cfg[k]["url"])
output = execute_command(cmd, cwd=cwd)
with open(os.path.expanduser(f"{cfg[k]['path']}.password"), "wt") as fout:
fout.write(output)
else:
cmd = "git clone \"{0}\" \"{1}\"".format(cfg[k]["url"], os.path.expanduser(cfg[k]["path"]))
output = execute_command(cmd)
elif sys.argv[1] == "align":
hn = hostname()
p = re.compile("^.*Fetch URL: (.*)$")
for k in r:
hosttest = True
if "hosts" in cfg[k].keys():
hosttest = hn in cfg[k]["hosts"]
if hosttest and os.path.exists(os.path.expanduser(cfg[k]["path"])):
print("Aligning {0}...".format(k))
cmd = "git -C \"{0}\" remote show -n origin".format(os.path.expanduser(cfg[k]["path"]))
output = execute_command(cmd)
url = None
for line in output.split("\n"):
r = p.match(line)
if r != None:
url = r.group(1)
if url == None:
print("Unable to determine origin's remote path.")
else:
if cfg[k]["url"] == url:
print(" 🟢 {0}".format(url))
else:
print(" 🔴 {0}".format(url))
cmd = "git -C \"{0}\" remote set-url origin \"{1}\"".format(os.path.expanduser(cfg[k]["path"]), cfg[k]["url"])
output = execute_command(cmd)
print(" 🟢 {0}".format(cfg[k]["url"]))
# else:
# print("Failed hosttest for {0}".format(k))
elif sys.argv[1] == "prune":
hn = hostname()
for k in r:
hosttest = True
if "hosts" in cfg[k].keys():
hosttest = hn in cfg[k]["hosts"]
if not hosttest and os.path.exists(os.path.expanduser(cfg[k]["path"])):
perform_action("lc", k, cfg[k])
perform_action("sync", k, cfg[k])
print("Pruning {0}".format(k))
shutil.rmtree(os.path.expanduser(cfg[k]["path"]))

View File

@ -1,13 +0,0 @@
neovim:
path: ~/Build/neovim
url: https://github.com/neovim/neovim.git
push: false
hosts: []
hblShed:
path: ~/Development/HLB/shed
url: ssh://git@gitea.downie.net.au:32222/jdownie/shed.git
hosts:
- fry
- yancy
- frankie
- scruffy