config/shell: update aliases
This commit is contained in:
parent
b2e64b6555
commit
856ab66059
|
@ -51,6 +51,14 @@ status() {
|
|||
fi
|
||||
}
|
||||
|
||||
disks() {
|
||||
{
|
||||
sudo df -h -T --exclude-type=tmpfs --exclude-type=devtmpfs --exclude-type=squashfs --exclude-type=overlay
|
||||
sudo blkid
|
||||
sudo fdisk -l
|
||||
} | less
|
||||
}
|
||||
|
||||
__u="$sudo apt update && $sudo apt upgrade"
|
||||
alias u="$__u"
|
||||
alias ur="tmux new-session -s upgrade '$__u && $sudo reboot'"
|
||||
|
|
|
@ -238,7 +238,7 @@ alias v='edit'
|
|||
alias st='synct'
|
||||
command -v dtrx >/dev/null && alias ex='dtrx'
|
||||
alias expr='noglob expr'
|
||||
alias get='noglob ='
|
||||
alias calc='noglob ='
|
||||
alias bi='b .*ignore*'
|
||||
|
||||
# Shortcuts
|
||||
|
@ -265,6 +265,7 @@ del() {
|
|||
m "$@" $trash
|
||||
}
|
||||
|
||||
alias get='curlie --location'
|
||||
curlh() {
|
||||
curl -v --location "$@" >/dev/null
|
||||
}
|
||||
|
|
|
@ -4,8 +4,9 @@ command -v notify-send >/dev/null &&
|
|||
|
||||
if which jrnl >/dev/null; then
|
||||
j(){ jrnl "$@" && jrnl-tag; }
|
||||
jn() { jrnl -to today "$@" | less --exit-follow-on-close +F; }
|
||||
alias jnc='jn -contains'
|
||||
jna() { jrnl -to today "$@" | less --exit-follow-on-close +F; }
|
||||
alias jn='jna -n 99'
|
||||
alias jnc='jna -contains'
|
||||
alias jne='jrnl --edit'
|
||||
fi
|
||||
|
||||
|
|
|
@ -41,8 +41,10 @@ stack() {
|
|||
# Uncomment the line below to always use the main stackspin repo, even when running in a fork.
|
||||
#export GITLAB_CI="true"
|
||||
echo Selected Stackspin cluster "$_cluster_name" with IP "$_cluster_ip"
|
||||
echo "$_cluster_name" >"$_stackspin_cluster_cache"
|
||||
echo "$_cluster_name" >"$_stackspin_cluster_cache" || true
|
||||
#test "$PWD" = "$HOME" && builtin cd "$STACKSPIN"
|
||||
;;
|
||||
(activate)
|
||||
test -d "$STACKSPIN" && . $STACKSPIN/env/bin/activate
|
||||
;;
|
||||
(sso) "$cmdname" exec dashboard-backend -- flask "$@";;
|
||||
|
@ -74,7 +76,7 @@ stack() {
|
|||
export name=${2:-$(echo $mail | sed -E 's/(.*)\.(.*)@.*/\u\1 \u\2/' )}
|
||||
#echo "$mail,$name"
|
||||
stack user init "$mail" "$name"
|
||||
stack-invite
|
||||
stack-invite "$3"
|
||||
);;
|
||||
(push)
|
||||
test -f "$1" && $EDITOR "$1"
|
||||
|
@ -174,11 +176,13 @@ stack() {
|
|||
# KUBE
|
||||
# app clis
|
||||
(occ) "$cmdname" exec nc-nextcloud -c nextcloud -it -- su www-data -s /bin/bash -c "php $command $*";;
|
||||
(zulip) "$cmdname" exec zulip -- su zulip -c "/home/zulip/deployments/current/manage.py $*";;
|
||||
(vikunja*)
|
||||
local pod=$command
|
||||
case "$1" in
|
||||
(dump|export) cd "$PROJECTS/vikunja"
|
||||
"$cmdname" exec "$pod-api" -- sh -c 'rm -f *.zip && ./vikunja dump >/dev/null && ls --color -lAhF >&2 && cat *.zip' >"$pod-dump_$(date +%F).zip"
|
||||
"$cmdname" exec "$pod-api" -- \
|
||||
sh -c 'rm -f *.zip && ./vikunja dump >/dev/null && ls --color -lAhF >&2 && cat *.zip' >"$pod-dump_$(date +%F).zip"
|
||||
;;
|
||||
(restore)
|
||||
if ! test -f "$2"
|
||||
|
@ -189,13 +193,36 @@ stack() {
|
|||
"$cmdname" upload "$pod-api" "$file"
|
||||
"$cmdname" exec "$pod-api" -it -- ./vikunja restore "$file"
|
||||
;;
|
||||
(psql) kubectl exec -it -n $("$cmdname" pod "$pod-postgresql") -- sh -c "PGPASSWORD=$(kubectl get secret --namespace stackspout $pod-postgresql -o jsonpath='{.data.password}' | base64 --decode) psql -h localhost -U vikunja -p 5432 vikunja";;
|
||||
(psql)
|
||||
kubectl exec -it -n $("$cmdname" pod "$pod-postgresql") -- \
|
||||
sh -c "PGPASSWORD=$(kubectl get secret --namespace stackspout $pod-postgresql -o jsonpath='{.data.password}' | base64 --decode) psql -h localhost -U vikunja -p 5432 vikunja";;
|
||||
(*) echo "Unknown $command subcommand";;
|
||||
esac
|
||||
;;
|
||||
(psql)
|
||||
local app=$1
|
||||
shift
|
||||
case "$1" in
|
||||
(restore)
|
||||
shift
|
||||
file=$1
|
||||
db=${2:-$app}
|
||||
"$cmdname" upload "$app-postgresql" "$file"
|
||||
"$cmdname" psql "$app" exec createdb "$db"
|
||||
stack psql "$app" "$db" -f \
|
||||
$(kubectl describe pod -n $(stack pod "$app-postgresql") | grep "from data" | awk '{print $1}')/$file
|
||||
;;
|
||||
(exec) command="$2"
|
||||
shift 2
|
||||
kubectl exec -it -n $("$cmdname" pod "$app-postgresql") -- sh -c "PGPASSWORD=$(kubectl get secret --namespace stackspout $app-postgresql -o jsonpath='{.data.password}' | base64 --decode) $command -h localhost -U $app -p 5432 $*"
|
||||
;;
|
||||
(*)
|
||||
"$cmdname" psql "$app" exec psql "$@"
|
||||
;;
|
||||
esac;;
|
||||
(maria)
|
||||
app=$1
|
||||
pw="$(kubectl get secret -n flux-system stackspin-$app-variables --template '{{.data.mariadb_password}}' | base64 -d 2>/dev/null ||
|
||||
local app=$1
|
||||
local pw="$(kubectl get secret -n flux-system stackspin-$app-variables --template '{{.data.mariadb_password}}' | base64 -d 2>/dev/null ||
|
||||
kubectl get secret -n flux-system stackspin-$app-variables --template "{{.data.${app}_mariadb_password}}" | base64 -d)"
|
||||
case $app in
|
||||
(nextcloud) n=nc-mariadb;;
|
||||
|
@ -211,7 +238,7 @@ stack() {
|
|||
;;
|
||||
# high-level
|
||||
(shell)
|
||||
container=$1
|
||||
local container=$1
|
||||
shift
|
||||
test "$1" = "-c" && pod=$2 && shift 2
|
||||
"$cmdname" exec "$container" -c "$pod" -it -- /bin/sh "$@";;
|
||||
|
@ -240,13 +267,13 @@ stack() {
|
|||
for arg
|
||||
do case "$arg" in (-*) break;; (*) commands+="$arg"; shift;; esac
|
||||
done
|
||||
local IFS=$'\n'
|
||||
for namespacedpod in $pods; do
|
||||
namespacedpod="$pods"
|
||||
#while IFS= read -r namespacedpod; do
|
||||
test "$subcommand" = get ||
|
||||
highlight "Running $subcommand on $namespacedpod" >&2
|
||||
local IFS=' '
|
||||
kubectl "$subcommand" "${commands[@]}" -n $namespacedpod "$@"
|
||||
done;;
|
||||
#done <<< "$pods"
|
||||
;;
|
||||
(pod)
|
||||
test $# -gt 0 && local podname=$1 && shift
|
||||
if ! kubectl get pods --all-namespaces --field-selector="status.phase=Running" -o=custom-columns=S:.metadata.namespace,N:.metadata.name --no-headers "$@" | grep --color=never -- "$podname"
|
||||
|
@ -277,6 +304,10 @@ stack() {
|
|||
cp -nv install/kustomization.yaml $CLUSTER_DIR/
|
||||
kubectl get namespace flux-system 2>/dev/null || kubectl create namespace flux-system
|
||||
kubectl apply -k $CLUSTER_DIR
|
||||
|
||||
ssh "root@${_cluster_name}" mkdir /etc/nftables.d
|
||||
ssh "root@${_cluster_name}" echo 'tcp dport { 2222 } counter accept' | tee /etc/nftables.d/ssh.nft
|
||||
|
||||
./install/install-stackspin.sh
|
||||
;;
|
||||
esac
|
||||
|
|
|
@ -159,10 +159,10 @@ setopt hist_save_no_dups
|
|||
setopt hist_reduce_blanks
|
||||
unsetopt hist_ignore_space
|
||||
zshaddhistory() {
|
||||
[[ $1 != netkeeper* ]] && [[ $1 != killm* ]] && [[ $1 != "stretchly reset" ]]
|
||||
[[ $1 != netkeeper* ]] && [[ $1 != killm* ]] && [[ $1 != "stretchly reset" ]]
|
||||
}
|
||||
|
||||
setopt sh_word_split
|
||||
setopt sh_word_split # https://github.com/zsh-users/zsh-history-substring-search/issues/154
|
||||
setopt extended_glob
|
||||
unsetopt case_glob
|
||||
|
||||
|
|
|
@ -16,8 +16,8 @@ sudo hdparm -MWAgt "$disk"
|
|||
if test $# -eq 0
|
||||
then highlight "[1mWrite Test"
|
||||
sync
|
||||
# This prevents predictions by using random, but since that is too slow we have to copy a previously created file
|
||||
count=100$(test $(df --output="avail" . | tail -1) -gt 999999 && echo 0 || true)
|
||||
# This prevents predictions by using random, but since that is too slow we have to copy a previously created file
|
||||
highlight "Preparing random bits:" &&
|
||||
sudo dd status=progress if=/dev/random of=/var/tmp/tempfile bs=1M count=$count &&
|
||||
highlight "Copying random bits:" &&
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Find terms in jrnl files and turn them into tags
|
||||
# Check with jrnl --tags
|
||||
if test $# -eq 0
|
||||
then $0 sleep uni work nap health tech read girl dev phone Zinc run bike tour laptop computer PC CB piano faith journal Catherine Franklin Kerstin Henri Katja
|
||||
then $0 sleep uni work nap health tech read girl dev phone Zinc run bike tour laptop computer PC CB piano faith journal Catherine Franklin Kerstin Henri Katja Erlangen Bayreuth Berlin ICF
|
||||
else
|
||||
for arg
|
||||
do rpl "\(^\|[^@]\)\b$arg\b" "\1@$arg" $JOURNAL/*.txt 2>/dev/null
|
||||
|
|
|
@ -147,9 +147,11 @@ data:
|
|||
|
||||
ingress:
|
||||
enabled: true
|
||||
# Elaborate style
|
||||
# Elaborate (TrueCharts) style
|
||||
annotations:
|
||||
kubernetes.io/tls-acme: "true"
|
||||
nginx.ingress.kubernetes.io/configuration-snippet: |
|
||||
more_set_headers "Content-Security-Policy: frame-ancestors 'self' files.${domain}";
|
||||
hosts:
|
||||
- host: "\${${app}_domain}"
|
||||
paths:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/sh
|
||||
#!/bin/sh -x
|
||||
# Convert given file to PNG
|
||||
pdftoppm -png "$@" >"$1.png" ||
|
||||
convert -background white -alpha off -density 500 "$@" "$1.png"
|
||||
|
|
8
.zshenv
8
.zshenv
|
@ -8,7 +8,7 @@ export_existing() {
|
|||
export_existing DATA $HOME/daten $HOME/data
|
||||
export MUSIC="$DATA/4-media/music"
|
||||
|
||||
export BORG_REPO='/mnt/backup/borg'
|
||||
export BORG_REPO='admin@172.16.0.2:/mnt/b/user/janek/backup/janek-borg'
|
||||
export BORG_PASSCOMMAND='pass service/device/borg/backup'
|
||||
|
||||
# xdg
|
||||
|
@ -102,8 +102,8 @@ export LESS="--raw-control-chars --ignore-case --LONG-PROMPT --jump-target=5 $(t
|
|||
# TODO put into config file and use --exclude-from
|
||||
# -x 'System Volume Information'
|
||||
export DIRS_GENERATED="-x generated -x .gradle -x cmake_build -x dist-newstyle -x node_modules -x __pycache__ -x .pytest_cache"
|
||||
export DIRS_IGNORE_SAFE="-x .cache -x .cpan -x *Cache -x .pyenv -x .local/cache -x share/baloo -x share/cabal -x share/cargo -x share/digikam -x share/gem -x share/JetBrains -x share/tldr -x share/syncthing -x share/Steam/ubuntu* -x share/Steam/package -x share/virtualenv -x share/Zeal -x state/gradle -x state/android -x Ferdi/Partitions -x oh-my-zsh -x wine/drive_c/windows -x vendor/cache $DIRS_GENERATED"
|
||||
export DIRS_IGNORE="-x .sync -x .stfolder -x *build -x .git -x .idea -x env -x out -x cache -x Partitions -x vendor/bundle -x log $DIRS_IGNORE_SAFE"
|
||||
export DIRS_IGNORE_SAFE="-x .cache -x .cpan -x *Cache -x .pyenv -x .local/cache -x .config/DeltaChat -x .config/discord -x .config/Slack -x .config/syncthing -x share/baloo -x share/cabal -x share/cargo -x share/digikam -x share/gem -x share/JetBrains -x share/tldr -x share/syncthing -x share/Steam/ubuntu* -x share/Steam/package -x share/virtualenv -x share/Zeal -x state/gradle -x state/android -x Ferdi/Partitions -x oh-my-zsh -x wine/drive_c/windows -x vendor/cache $DIRS_GENERATED"
|
||||
export DIRS_IGNORE="-x .archive -x .sync -x .stfolder -x *build -x .git -x .idea -x env -x out -x cache -x Partitions -x vendor/bundle -x log $DIRS_IGNORE_SAFE"
|
||||
# red stderr
|
||||
test -f "/usr/lib/libstderred.so" && export LD_PRELOAD="/usr/lib/libstderred.so${LD_PRELOAD:+:$LD_PRELOAD}"
|
||||
# software config
|
||||
|
@ -159,3 +159,5 @@ export CTEST_OUTPUT_ON_FAILURE=1
|
|||
#autolight
|
||||
#export TZ='Europe/Dublin'
|
||||
#export TZ='Africa/Nairobi'
|
||||
|
||||
export CONTEST_NETWORK=lan-restricted-dev
|
||||
|
|
Loading…
Reference in New Issue