Compare commits
26 commits
af1f18952c
...
490f50695b
Author | SHA1 | Date | |
---|---|---|---|
|
490f50695b | ||
|
b6d8ad24f2 | ||
|
9829b58ee7 | ||
|
b0803435b4 | ||
|
99a13ed12a | ||
|
5f9ffa9757 | ||
|
b49f92932e | ||
|
a5cfef796c | ||
|
67eb994037 | ||
|
c6206d3fc1 | ||
|
828b17b9c0 | ||
|
7dd8e25b00 | ||
|
a2896ea0ec | ||
|
b81a70c474 | ||
|
4be82b0adb | ||
|
4f82c00060 | ||
|
d79346de86 | ||
|
eda4de9d2c | ||
|
750b42f7cf | ||
|
84af52658e | ||
|
334368a01f | ||
|
91d3180a7b | ||
|
289c583fbe | ||
|
863ef2051b | ||
|
dc9cee6da5 | ||
|
4c1cbb23f9 |
85 changed files with 1141 additions and 375 deletions
|
@ -471,11 +471,15 @@
|
|||
"timeToBreakInTray": false,
|
||||
"currentTimeInBreaks": false,
|
||||
"showTrayIcon": true,
|
||||
"skipToNextScheduledBreakShortcut": "",
|
||||
"skipToNextMiniBreakShortcut": "",
|
||||
"skipToNextLongBreakShortcut": "",
|
||||
"resetBreaksShortcut": "",
|
||||
"pauseBreaksShortcut": "",
|
||||
"resumeBreaksShortcut": "",
|
||||
"__internal__": {
|
||||
"migrations": {
|
||||
"version": "1.14.1"
|
||||
"version": "1.15.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
test -n "$PS1" || return 0
|
||||
|
||||
which pfetch >/dev/null 2>&1 && pfetch
|
||||
|
||||
test $(id -u) -eq 0 || sudo=sudo
|
||||
|
||||
alias jc="$sudo journalctl --boot --unit"
|
||||
|
@ -12,6 +14,22 @@ alias scr="$sudo systemctl daemon-reload && $sudo systemctl reload-or-restart"
|
|||
alias hist='history | less'
|
||||
alias m='mv -vi'
|
||||
|
||||
alias nginx-edit="sudo vi /etc/nginx/nginx.conf && nginx -t && sudo systemctl reload nginx"
|
||||
|
||||
# Fast Find
|
||||
ff() {
|
||||
name=$1
|
||||
shift
|
||||
$(command -v fd || echo fdfind) --hidden "$name" ||
|
||||
find "$@" -name "*$name*"
|
||||
}
|
||||
|
||||
xtrace () {
|
||||
set -x
|
||||
"$@"
|
||||
set +x
|
||||
}
|
||||
|
||||
highlight() { echo "[4m$1[0m"; }
|
||||
status() {
|
||||
highlight 'System'
|
||||
|
|
|
@ -1,21 +1,80 @@
|
|||
set -o pipefail
|
||||
|
||||
alias localip="ip addr show | grep -E '(ens|eth)' | grep -oP '"'(?<=inet\s)\d+(\.\d+){3}'"' | head -1"
|
||||
ip=$(localip)
|
||||
|
||||
logs() {
|
||||
if test $# -eq 0
|
||||
then
|
||||
lnav $(pwd | cut -d '/' -f1-5)/{logs,document_errors}
|
||||
else
|
||||
for site in $(find /home -maxdepth 3 -name "*$1*")
|
||||
do cd $site
|
||||
lnav logs
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
monitor() {
|
||||
file=gatus.yaml
|
||||
echo 'customer-endpoint: &customer
|
||||
interval: 10m
|
||||
conditions:
|
||||
- "[STATUS] == 200"
|
||||
endpoints:' >$file
|
||||
for user in $(list users)
|
||||
do group="$(hestia v-list-user $user | head -3 | cut -d':' -f2 | tr -s ' ' | sed 'N;s/\n/:/;N;s/\n / (/;s/$/)/')"
|
||||
for domain in $(hestia v-list-web-domains $user | grep "$ip" | awk '{print $1}')
|
||||
do echo '- name: "'$domain'"
|
||||
<<: *customer
|
||||
group: "'$group'"
|
||||
url: "https://'$domain'"' >>$file
|
||||
done
|
||||
for domain in $(hestia v-list-mail-domains $user | tail +3 | awk '{print $1}')
|
||||
do echo '- name: "Mail '$domain'"
|
||||
<<: *customer
|
||||
group: "'$group'"
|
||||
url: "https://mail.'$domain'"' >>$file
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
letsencrypt() {
|
||||
(
|
||||
ip=$(localip)
|
||||
IFS=$'\n'
|
||||
if test $# -eq 0
|
||||
then
|
||||
for user in $(list users)
|
||||
do for domain in $(hestia v-list-web-domains $user | grep $ip | awk '{print $1}')
|
||||
do #echo "Checking $user $domain" >&2
|
||||
hestia v-list-web-domain-ssl $user $domain | grep -q . && continue
|
||||
hestia v-list-web-domain $user $domain | grep -q REDIRECT && continue
|
||||
#echo "Generating Certificate" >&2
|
||||
hestia v-add-letsencrypt-domain $user $domain $(hestia v-list-web-domain $user $domain | grep ALIAS | tr -s ' ' | cut -d' ' -f2- | tr ' ' ',')
|
||||
done
|
||||
do letsencrypt "$user"
|
||||
done
|
||||
)
|
||||
else
|
||||
(
|
||||
IFS=$'\n'
|
||||
for user
|
||||
do
|
||||
for domain in $(hestia v-list-dns-domains $user | tail +3 | awk '{print $1}')
|
||||
do hestia v-add-remote-dns-domain $user $domain
|
||||
done
|
||||
for domain in $(hestia v-list-mail-domains $user | tail +3 | awk '{print $1}')
|
||||
do hestia v-list-mail-domain-ssl $user $domain | grep -q . || hestia v-add-letsencrypt-domain $user $domain '' yes
|
||||
done
|
||||
for domain in $(hestia v-list-web-domains $user | grep "$ip" | awk '{print $1}')
|
||||
do #echo commented out due to command echoing in hestia alias
|
||||
#echo "Checking $user $domain" >&2
|
||||
hestia v-list-web-domain $user $domain | grep -q REDIRECT && continue
|
||||
#hestia v-list-mail-domain-ssl $user $domain | grep -q . || hestia v-add-letsencrypt-domain $user $domain '' yes
|
||||
hestia v-list-web-domain-ssl $user $domain | grep . >/dev/null && continue
|
||||
#echo "Generating Certificate" >&2
|
||||
hestia v-add-letsencrypt-domain $user $domain $(hestia v-list-web-domain $user $domain | grep ALIAS | tr -s ' ' | cut -d' ' -f2- | tr ' ' ',')
|
||||
done
|
||||
echo "Waiting an hour to not trigger letsencrypt rate limits..."
|
||||
time=0
|
||||
while test $time -lt 2000
|
||||
do sleep 100
|
||||
sudo -v
|
||||
time=$((time + 100))
|
||||
done
|
||||
done
|
||||
)
|
||||
fi
|
||||
}
|
||||
|
||||
list() {
|
||||
|
@ -30,17 +89,20 @@ list() {
|
|||
done
|
||||
else if test -t 1
|
||||
then sudo "$HESTIA/bin/v-list-$type" "$@" | column -t
|
||||
else sudo "$HESTIA/bin/v-list-$type" "$@" | tail +3 | awk '{print $1}'
|
||||
else sudo "$HESTIA/bin/v-list-$type" "$@" | tail +3 | $(if test $type = users; then echo "grep -v yes"; else echo cat; fi) | awk '{print $1}'
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
hestia() {
|
||||
test $# -eq 0 && cd "$HESTIA" && return 0
|
||||
test "$1" = "-x" && shift && set -x
|
||||
command=$1
|
||||
shift
|
||||
echo '>' sudo $(which $command) "$@" >&2
|
||||
sudo $(which $command) "$@"
|
||||
echo '>' sudo "$(which $command)" "$@" >&2
|
||||
export SHELLOPTS
|
||||
sudo --preserve-env=SHELLOPTS timeout 30s $(which $command) "$@"
|
||||
set +x
|
||||
}
|
||||
|
||||
accessible() {
|
||||
|
|
14
.config/bash/pve.bash
Normal file
14
.config/bash/pve.bash
Normal file
|
@ -0,0 +1,14 @@
|
|||
|
||||
[[ $- == *i* ]] && zfs list -d 0
|
||||
|
||||
alias cluster='scr corosync && scr pvesr'
|
||||
qcow() { qemu-img convert $1 -O qcow2 ${2:-$1}.qcow2; }
|
||||
extrac() {
|
||||
for var; do
|
||||
arg=${var%.lzo}
|
||||
out=$(basename ${arg%.vma})
|
||||
lzop -x $arg.lzo
|
||||
vma extract $arg $out &&
|
||||
( cd $out && find *.raw -exec qemu-img convert {} -O qcow2 $arg-{}.qcow2 \; )
|
||||
done
|
||||
}
|
|
@ -98,6 +98,7 @@ Version 2019-11-04 2021-02-16"
|
|||
"C--" 'doom/decrease-font-size
|
||||
"C-u" 'evil-scroll-up
|
||||
:leader
|
||||
"l" 'link-hint-open-link-at-point
|
||||
"u" 'evil-prev-buffer
|
||||
"i" 'evil-next-buffer
|
||||
"q" 'doom/save-and-kill-buffer
|
||||
|
@ -268,6 +269,7 @@ Version 2019-11-04 2021-02-16"
|
|||
("C-c b" . org-cycle-list-bullet)
|
||||
("C-c ." . org-time-stamp-inactive)
|
||||
("C-c C-." . org-time-stamp)
|
||||
("C-c C-;" . (lambda () (interactive) (let ((time-stamp-format (concat "[" time-stamp-bare " %a %H:%M]"))) (call-interactively 'org-time-stamp))))
|
||||
("M-C-+" . org-timestamp-up)
|
||||
("M-C--" . org-timestamp-down)
|
||||
)
|
||||
|
@ -299,7 +301,8 @@ Version 2019-11-04 2021-02-16"
|
|||
(map! :map org-mode-map
|
||||
:leader
|
||||
"\\" 'org-ctrl-c-ctrl-c
|
||||
:desc "Agenda" "oA" (lambda () (interactive) (org-agenda nil "d"))
|
||||
:desc "Agenda" "oa" 'org-agenda
|
||||
:desc "My Agenda" "oA" (lambda () (interactive) (let ((org-agenda-tag-filter-preset '("-phys" "-erlangen"))) (org-agenda nil "d")))
|
||||
:localleader
|
||||
"C" 'org-clock-in
|
||||
"v" 'org-insert-heading
|
||||
|
@ -347,9 +350,47 @@ Version 2019-11-04 2021-02-16"
|
|||
org-priority-start-cycle-with-default nil)
|
||||
(setq org-priority-faces '((65 . error) (66 . "DarkGoldenRod") (67 . warning) (68 . "bisque") (69 . "grey")))
|
||||
|
||||
(push "PERM(e)" (cdr (car org-todo-keywords)))
|
||||
; #+TODO: IDEA(i!) OUTLINE(o!) DRAFT(t!) | REVIEW(r!) DONE(d!) ABANDON(a!)
|
||||
; highlight review keyword
|
||||
(setq org-todo-keywords
|
||||
'(
|
||||
(sequence
|
||||
"TODO(t)" ; A task that needs doing & is ready to do
|
||||
"PROJ(p)" ; A project, which usually contains other tasks
|
||||
"STRT(s)" ; A task that is in progress
|
||||
"WAIT(w)" ; Something external is holding up this task
|
||||
"HOLD(h)" ; This task is paused/on hold because of me
|
||||
"IDEA(i)" ; An unconfirmed and unapproved task or notion
|
||||
"|"
|
||||
"DONE(d)" ; Task successfully completed
|
||||
"KILL(k)") ; Task was cancelled, aborted, or is no longer applicable
|
||||
(sequence "IDEA(i!)" "OUTLINE(o!)" "DRAFT(f!)" "|" "REVIEW(v!)" "DONE(d!)" "ABANDON(a!)") ; For Writings
|
||||
(sequence
|
||||
"LOOP(l)"
|
||||
"PERM(e)" ; A task that can always be worked on
|
||||
"|"
|
||||
"RELOOP(d)")
|
||||
(sequence
|
||||
"[ ](T)" ; A task that needs doing
|
||||
"[-](S)" ; Task is in progress
|
||||
"[?](W)" ; Task is being held up or paused
|
||||
"|"
|
||||
"[X](D)") ; Task was completed
|
||||
(sequence
|
||||
"|"
|
||||
"OKAY(o)"
|
||||
"YES(y)"
|
||||
"NO(n)"))
|
||||
org-todo-keyword-faces
|
||||
'(("[-]" . +org-todo-active)
|
||||
("STRT" . +org-todo-active)
|
||||
("[?]" . +org-todo-onhold)
|
||||
("WAIT" . +org-todo-onhold)
|
||||
;("REVIEW" . +org-todo-onhold)
|
||||
("HOLD" . +org-todo-onhold)
|
||||
("PROJ" . +org-todo-project)
|
||||
("NO" . +org-todo-cancel)
|
||||
("ABANDON" . +org-todo-cancel)
|
||||
("KILL" . +org-todo-cancel)))
|
||||
|
||||
;; Org startup - https://orgmode.org/manual/In_002dbuffer-Settings.html
|
||||
(setq org-startup-folded 'show2levels
|
||||
|
@ -452,12 +493,16 @@ Version 2019-11-04 2021-02-16"
|
|||
(if (= pri-value pri-current)
|
||||
subtree-end
|
||||
nil)))
|
||||
|
||||
(setq org-agenda-custom-commands
|
||||
'(("d" "Daily agenda and all TODOs"
|
||||
((tags "PRIORITY=\"A\""
|
||||
'(("d" "Daily agenda and all TODOs" (
|
||||
(tags "PRIORITY=\"A\""
|
||||
((org-agenda-skip-function '(org-agenda-skip-entry-if 'todo 'done))
|
||||
(org-agenda-overriding-header "High-priority unfinished tasks:")))
|
||||
(agenda "" ((org-agenda-ndays 1)))
|
||||
(org-agenda-overriding-header "DO NOW:")))
|
||||
(agenda "" ((org-agenda-start-day nil) (org-agenda-span 4)))
|
||||
(tags "PRIORITY=\"B\""
|
||||
((org-agenda-skip-function '(org-agenda-skip-entry-if 'todo 'done))
|
||||
(org-agenda-overriding-header "Important:")))
|
||||
(alltodo ""
|
||||
((org-agenda-skip-function '(or (air-org-skip-subtree-if-habit)
|
||||
(air-org-skip-subtree-if-priority ?A)
|
||||
|
@ -483,6 +528,7 @@ Version 2019-11-04 2021-02-16"
|
|||
(use-package! org-journal
|
||||
;; Prompt after idleness - Focused? ETC? (Pragmatic Programmer)
|
||||
:init
|
||||
|
||||
(setq org-journal-file-type 'monthly
|
||||
org-journal-file-format "%Y%m.org"
|
||||
org-journal-created-property-timestamp-format time-stamp-format
|
||||
|
@ -490,6 +536,7 @@ Version 2019-11-04 2021-02-16"
|
|||
org-journal-date-format (concat "[" time-stamp-bare " %3a]")
|
||||
org-journal-time-format "%02H "
|
||||
)
|
||||
|
||||
:config
|
||||
; TODO map njj to open-or-create-entry
|
||||
|
||||
|
@ -771,6 +818,7 @@ This is 0.3 red + 0.59 green + 0.11 blue and always between 0 and 255."
|
|||
:n "q" 'kill-this-buffer
|
||||
:map image-mode-map
|
||||
"<tab>" 'other-window
|
||||
:n "D" 'doom/delete-this-file ; Follow by dired-find-file to select next?
|
||||
:n "q" 'kill-this-buffer
|
||||
:n "+" 'image-increase-size
|
||||
:n "-" 'image-decrease-size
|
||||
|
@ -811,11 +859,16 @@ This is 0.3 red + 0.59 green + 0.11 blue and always between 0 and 255."
|
|||
(call-process "xdg-open" nil 0 nil file)
|
||||
(find-file file))))
|
||||
|
||||
; maybe add +org/close-fold
|
||||
(map! ; what about closing popup buffers first, like debugger-mode :n "<escape>" (lambda () (interactive) (if (eq major-mode 'org-mode) (condition-case nil (org-up-element) (error (dired-jump))) (dired-jump)))
|
||||
:leader
|
||||
"." 'dired-jump)
|
||||
|
||||
(map! :map dired-mode-map
|
||||
:n "RET" 'dired-find-file-dwim
|
||||
:n "l" 'dired-find-file-dwim
|
||||
:n "h" 'dired-up-directory
|
||||
:n "ö" 'evil-ex-search-forward
|
||||
:n "l" 'dired-find-file-dwim
|
||||
:n "h" 'dired-up-directory
|
||||
:n "ö" 'evil-ex-search-forward
|
||||
:localleader
|
||||
:desc "Compress/Extract" "c" 'dired-do-compress
|
||||
:desc "Size information" "s"
|
||||
|
@ -924,22 +977,22 @@ This is 0.3 red + 0.59 green + 0.11 blue and always between 0 and 255."
|
|||
|
||||
;;; File Editing Modes
|
||||
|
||||
;(setq initial-major-mode 'org-mode)
|
||||
;(add-to-list 'auto-mode-alist '("/journal/" . org-mode))
|
||||
;(add-to-list 'auto-mode-alist '("\\.jrnl\\'" . org-mode))
|
||||
;
|
||||
;(add-to-list 'auto-mode-alist '("\\.el##" . emacs-lisp-mode))
|
||||
;(add-to-list 'auto-mode-alist `(,(getenv "CONFIG_SHELLS") . sh-mode))
|
||||
;;(add-to-list 'auto-mode-alist `(,(getenv "CONFIG_ZSH") . sh-mode))
|
||||
;(add-to-list 'auto-mode-alist `("\\.local/bin" . sh-mode))
|
||||
;
|
||||
;;(add-to-list 'auto-mode-alist '("\\.twee\\'" . twee-chapbook-mode))
|
||||
;;(add-hook 'twee-chapbook-mode-hook 'twee-mode)
|
||||
;;
|
||||
;;;(add-to-list 'auto-mode-alist `("\\.scss.erb\\'" . scss-mode))
|
||||
;;(add-to-list 'auto-mode-alist '("\\.html\\'" . web-mode))
|
||||
;
|
||||
;(add-hook 'pdf-view-mode-hook 'auto-revert-mode)
|
||||
(setq initial-major-mode 'org-mode)
|
||||
(add-to-list 'auto-mode-alist '("/journal/" . org-mode))
|
||||
(add-to-list 'auto-mode-alist '("\\.jrnl\\'" . org-mode))
|
||||
|
||||
(add-to-list 'auto-mode-alist '("\\.el##" . emacs-lisp-mode))
|
||||
(add-to-list 'auto-mode-alist `(,(or (getenv "CONFIG_SHELLS") "\\.config/shell"). sh-mode))
|
||||
(add-to-list 'auto-mode-alist `(,(or (getenv "CONFIG_ZSH") "\\.config/zsh") . sh-mode))
|
||||
(add-to-list 'auto-mode-alist `("\\.local/bin" . sh-mode))
|
||||
|
||||
;(add-to-list 'auto-mode-alist '("\\.twee\\'" . twee-chapbook-mode))
|
||||
;(add-hook 'twee-chapbook-mode-hook 'twee-mode)
|
||||
|
||||
(add-to-list 'auto-mode-alist `("\\.scss.erb\\'" . scss-mode))
|
||||
(add-to-list 'auto-mode-alist '("\\.html\\'" . web-mode))
|
||||
|
||||
(add-hook 'pdf-view-mode-hook 'auto-revert-mode)
|
||||
|
||||
(use-package! web-mode
|
||||
:mode "\\.html\\'"
|
||||
|
@ -1075,11 +1128,6 @@ This is 0.3 red + 0.59 green + 0.11 blue and always between 0 and 255."
|
|||
:on-error (lambda (&rest _) (message "")))
|
||||
)
|
||||
|
||||
(after! spell-fu
|
||||
(remove-hook 'text-mode-hook #'spell-fu-mode)
|
||||
)
|
||||
(setq ispell-personal-dictionary (expand-file-name "personal-dictionary" custom-emacs-data-dir))
|
||||
|
||||
(use-package! rdictcc
|
||||
:if (locate-library "rdictcc")
|
||||
:bind (("C-c t". 'rdictcc-translate-word-at-point)
|
||||
|
@ -1178,15 +1226,20 @@ This is 0.3 red + 0.59 green + 0.11 blue and always between 0 and 255."
|
|||
smtpmail-smtp-service 1025
|
||||
smtpmail-stream-type 'ssl))
|
||||
|
||||
;(with-eval-after-load "ispell"
|
||||
; (setq ispell-program-name "hunspell")
|
||||
; (setq hunspell-default-dict "en_US")
|
||||
; (setq ispell-dictionary "en_US,de_DE")
|
||||
; ;; ispell-set-spellchecker-params has to be called
|
||||
; ;; before ispell-hunspell-add-multi-dic will work
|
||||
; (ispell-set-spellchecker-params)
|
||||
; (ispell-hunspell-add-multi-dic ispell-dictionary)
|
||||
; )
|
||||
(after! spell-fu
|
||||
(remove-hook 'text-mode-hook #'spell-fu-mode)
|
||||
)
|
||||
(setq ispell-personal-dictionary (expand-file-name "personal-dictionary" custom-emacs-data-dir))
|
||||
|
||||
(with-eval-after-load "ispell"
|
||||
(setq ispell-program-name "hunspell")
|
||||
(setq hunspell-default-dict "en_US")
|
||||
(setq ispell-dictionary "en_US,de_DE")
|
||||
;; ispell-set-spellchecker-params has to be called
|
||||
;; before ispell-hunspell-add-multi-dic will work
|
||||
(ispell-set-spellchecker-params)
|
||||
(ispell-hunspell-add-multi-dic ispell-dictionary)
|
||||
)
|
||||
|
||||
;; Here are some additional functions/macros that could help you configure Doom:
|
||||
;;
|
||||
|
|
|
@ -8,3 +8,11 @@ PlantUML
|
|||
animalistic
|
||||
intellectualness
|
||||
superstimuli
|
||||
Stackspin
|
||||
Zulip
|
||||
Gitea
|
||||
Vikunja
|
||||
Nextcloud
|
||||
HedgeDoc
|
||||
Gantt
|
||||
Kanban
|
||||
|
|
|
@ -145,7 +145,7 @@
|
|||
;;nim ; python + lisp at the speed of c
|
||||
;;nix ; I hereby declare "nix geht mehr!"
|
||||
;;ocaml ; an objective camel
|
||||
(org +pretty +dragndrop +journal +roam2 +pomodoro +noter +present +pandoc) ; organize your plain life in plain text
|
||||
(org +pretty +dragndrop +journal +roam2 +pomodoro +noter +present) ;+pandoc) ; organize your plain life in plain text
|
||||
;;php ; perl's insecure younger brother
|
||||
plantuml ; diagrams for confusing people more
|
||||
;;purescript ; javascript, but functional
|
||||
|
|
|
@ -19,8 +19,10 @@
|
|||
(if (file-equal-p command-line-default-directory "~")
|
||||
(setq default-directory org-directory))
|
||||
(setq org-agenda-files (append
|
||||
(directory-files-recursively (expand-file-name "project/" org-directory) ".org\\'")
|
||||
(list (expand-file-name "inbox/"))
|
||||
;(directory-files-recursively (expand-file-name "project/" org-directory) ".org\\'")
|
||||
;(list (expand-file-name "inbox/"))
|
||||
(list (expand-file-name "agenda/"))
|
||||
(directory-files-recursively (expand-file-name "uni/" org-directory) ".org\\'")
|
||||
))
|
||||
))
|
||||
|
||||
|
|
|
@ -15,13 +15,14 @@ matches:
|
|||
Janek
|
||||
- trigger: ";greb"
|
||||
replace: |-
|
||||
Grüße aus Bayreuth,
|
||||
Grüße,
|
||||
Janek Fischer
|
||||
|
||||
--
|
||||
---
|
||||
Technischer Leiter https://software-challenge.de, deutscher Programmierwettbewerb für Schüler
|
||||
CTO der FTT Forensic Discovery GmbH - gesunde IT aus Bayreuth: https://forensicdiscovery.de
|
||||
Software Engineering Student @ https://code.berlin
|
||||
Privater Blog: https://barelybuggy.blog
|
||||
- trigger: ";grer"
|
||||
replace: |-
|
||||
Greetings from Bavaria,
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
# Sensitive data through pass
|
||||
matches:
|
||||
- trigger: ";env"
|
||||
replace: "{{output}}"
|
||||
vars:
|
||||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: env
|
||||
shell: bash
|
||||
debug: true
|
||||
# {{{1 EMAILS
|
||||
- trigger: ";ep"
|
||||
replace: "{{output}}"
|
||||
|
@ -7,28 +16,28 @@ matches:
|
|||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: pass info/email/personal
|
||||
cmd: PASSWORD_STORE_DIR=$HOME/.local/share/pass pass info/email/personal
|
||||
- trigger: ";ec"
|
||||
replace: "{{output}}"
|
||||
vars:
|
||||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: pass info/email/church
|
||||
cmd: PASSWORD_STORE_DIR=$HOME/.local/share/pass pass info/email/church
|
||||
- trigger: ";eu"
|
||||
replace: "{{output}}"
|
||||
vars:
|
||||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: pass info/email/uni
|
||||
cmd: PASSWORD_STORE_DIR=$HOME/.local/share/pass pass info/email/uni
|
||||
- trigger: ";eaur"
|
||||
replace: "{{output}}"
|
||||
vars:
|
||||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: pass info/email/aur
|
||||
cmd: PASSWORD_STORE_DIR=$HOME/.local/share/pass pass info/email/aur
|
||||
# {{{1 ADDRESS
|
||||
- trigger: ";add"
|
||||
replace: "{{output}}"
|
||||
|
@ -36,7 +45,7 @@ matches:
|
|||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: pass info/address
|
||||
cmd: PASSWORD_STORE_DIR=$HOME/.local/share/pass pass info/address
|
||||
- triggers:
|
||||
- ";adb"
|
||||
- ";adt"
|
||||
|
@ -45,7 +54,7 @@ matches:
|
|||
- name: output
|
||||
type: shell
|
||||
params:
|
||||
cmd: pass info/address-bt
|
||||
cmd: PASSWORD_STORE_DIR=$HOME/.local/share/pass pass info/address-bt
|
||||
- triggers:
|
||||
- ";adw"
|
||||
- ";ads"
|
||||
|
|
|
@ -12,3 +12,4 @@ node_modules/
|
|||
build/
|
||||
generated/
|
||||
out/
|
||||
.git/
|
||||
|
|
|
@ -32,10 +32,11 @@
|
|||
tool = nvim
|
||||
submodule = log
|
||||
context = 2
|
||||
renames = copy
|
||||
renames = true
|
||||
colorMoved = dimmed-zebra
|
||||
colorMovedWS = allow-indentation-change
|
||||
[merge]
|
||||
renames = copy
|
||||
tool = nvim
|
||||
[mergetool "nvim"]
|
||||
cmd = nvim -f -c \"Gdiffsplit!\" \"$MERGED\"
|
||||
|
@ -85,7 +86,7 @@
|
|||
# status
|
||||
s = !git stl && git stb
|
||||
st = stb .
|
||||
stb = status --short --branch --find-renames=.2
|
||||
stb = status --short --branch --find-renames
|
||||
sv = --paginate status -v
|
||||
svv = --paginate status -vv
|
||||
|
||||
|
@ -111,12 +112,13 @@
|
|||
lg = lgr HEAD @{push}
|
||||
lo = log --pretty=tformat:'%C(auto)%h%d %s %Cgreen(%cd) %Cblue<%an>%Creset' --date=human --no-merges
|
||||
lp = log -p --date=local # Like --full-diff
|
||||
my = lo --author "xeru\\|anek"
|
||||
my = lo --author "xeru\\|anek\\|melonion"
|
||||
|
||||
standup = my --since yesterday --all
|
||||
co-authors = !git log | grep -i Co-Authored | awk '!a[$0]++'
|
||||
when = !git for-each-ref --sort=committerdate --format='%(refname:short) * %(authorname) * %(committerdate:relative)' refs/remotes/ # List all branches with their last updates
|
||||
|
||||
tags = !git for-each-ref --color=always --format='%(color:yellow)%(tag)%(color:default)%09 %(subject) %(color:green)(%(taggerdate:format:%Y-%m-%d))' refs/tags | sort -V | column -s $'\t' -t
|
||||
find = log --patch --all --full-history # Find a filename in all git knows
|
||||
ref = reflog show --pretty=tformat:'%C(auto)%h%d %s %Cgreen(%cd) %Cblue<%an>%Creset'
|
||||
refg = log --reflog --graph --pretty=tformat:'%C(auto)%h%d %s %Cgreen(%cd) %Cblue<%an>%Creset'
|
||||
|
@ -145,6 +147,7 @@
|
|||
cme = commit -v --edit --message # Commit with message from CLI but edit it
|
||||
cad = !git diff-tree --no-commit-id --name-only -r HEAD | git commit -v --amend --pathspec-from-file=- # Amend commit with all already changed files
|
||||
cap = !git commit --amend --no-edit && git push --force-with-lease
|
||||
journal = !git -C $DATA commit -v $DATA/2-box/journal*
|
||||
|
||||
cp = cherry-pick
|
||||
cpc = cherry-pick --continue
|
||||
|
|
|
@ -27,7 +27,8 @@ if beginswith $'\n#' "$original"; then
|
|||
expr substr "$common" 1 "$count" | sed 's|.local/bin/scripts|bin|') || exit 0
|
||||
case "$path" in ([0-9]-*) path="${path#*-}";; esac
|
||||
{
|
||||
echo "$path" | sed 's|^\.||;s|/\?$|: |'
|
||||
# Remove initial dot and trailing slash/dash/underscore
|
||||
echo "$path" | sed 's|^\.||;s|[/_-]\?$|: |'
|
||||
echo "$original"
|
||||
} > "$COMMIT_MSG_FILE"
|
||||
fi
|
||||
|
|
|
@ -11,10 +11,10 @@ highlight: true
|
|||
indent_character: '|'
|
||||
journals:
|
||||
default: ~/data/2-box/journal/jrnl.txt
|
||||
tug: ~/data/2-box/journal/tug.txt
|
||||
intentions: ~/data/2-box/journal/intentions.txt
|
||||
nug: ~/data/2-box/journal/nug.txt
|
||||
linewrap: 99
|
||||
tagsymbols: '@'
|
||||
template: false
|
||||
timeformat: '%Y-%m-%d %a %H:%M'
|
||||
version: v2.8.4
|
||||
version: v4.1
|
||||
|
|
|
@ -29,10 +29,10 @@ audio/mpeg=mpc.desktop;
|
|||
audio/x-wav=mpc.desktop;
|
||||
image/jpeg=org.kde.showfoto.desktop;
|
||||
image/png=org.kde.showfoto.desktop;
|
||||
inode/directory=org.kde.dolphin.desktop;
|
||||
inode/directory=pcmanfm.desktop;
|
||||
text/html=firefox.desktop;
|
||||
x-scheme-handler/discord-757737740241731714=discord-757737740241731714.desktop;
|
||||
x-scheme-handler/etcher=balena-etcher-electron.desktop;
|
||||
x-scheme-handler/etcher=balena-etcher.desktop
|
||||
x-scheme-handler/ferdi=ferdi.desktop
|
||||
x-scheme-handler/http=firefox.desktop;
|
||||
x-scheme-handler/https=firefox.desktop;
|
||||
|
@ -43,3 +43,4 @@ x-scheme-handler/signalcaptcha=signal.desktop;
|
|||
x-scheme-handler/tg=telegram.desktop;
|
||||
video/mp4=vlc.desktop;
|
||||
text/calendar=emacsclient.desktop;
|
||||
video/x-m4v=vlc.desktop
|
||||
|
|
|
@ -37,11 +37,14 @@ let g:firenvim_config = {
|
|||
\ '.*calendar\.google\.com.*': { 'priority': 9, 'takeover': 'empty', },
|
||||
\ '.*docs\.google\.com.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '.*contacts\.google\.com.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '.*notion\.so.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '.*cloud\.atomtoast\.xyz.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ 'https://bigbluebutton.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ 'https://dhall-lang.org.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ 'https://aur.archlinux.org/account.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '://notion\.so.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '://bbb\..*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '://bigbluebutton.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '://dhall-lang.org.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '://aur.archlinux.org/account.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '.*\.hobbyfarm\..*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '://rancher\..*': { 'priority': 9, 'takeover': 'never', },
|
||||
\
|
||||
\ '.*stackexchange\.com.*': { 'priority': 9, 'takeover': 'never', },
|
||||
\ '.*stackoverflow\.com.*': { 'priority': 9, 'takeover': 'never', },
|
||||
|
|
|
@ -63,6 +63,9 @@ Plug 'HiPhish/info.vim'
|
|||
|
||||
call plug#end()
|
||||
|
||||
let g:python_host_prog = '/usr/bin/python'
|
||||
let g:python3_host_prog = '/usr/bin/python3'
|
||||
|
||||
" Install plugins automatically after installing plug
|
||||
if plug_install
|
||||
PlugInstall --sync
|
||||
|
|
|
@ -30,6 +30,7 @@ yzf() {
|
|||
|
||||
# List installable packages into fzf and install selection
|
||||
yas() {
|
||||
# TODO also filter by provides field
|
||||
cache_dir="/tmp/yas-$USER"
|
||||
case "$1" in (-y*) rm -rf "$cache_dir"; shift; param=$1;; esac
|
||||
mkdir -p "$cache_dir"
|
||||
|
|
|
@ -85,6 +85,10 @@ xtrace() {
|
|||
set +x
|
||||
}
|
||||
|
||||
retry() {
|
||||
while ! "$@"; do sleep 2; done
|
||||
}
|
||||
|
||||
# Shows source for given command, resolving nested aliases
|
||||
wh() {
|
||||
local res=$(which "$@" 2>/dev/null || type "$@") || return $?
|
||||
|
@ -245,12 +249,14 @@ alias dt='python -c "import time;print(time.asctime(time.localtime()))"'
|
|||
|
||||
# Process
|
||||
alias println='printf "\n"'
|
||||
alias myip="curl ifconfig.me && println && curl icanhazip.com"
|
||||
alias dedup='awk '"'"'!a[$0]++'"'"
|
||||
alias dedup-lines='awk '"'"'!a[$0]++'"'"
|
||||
alias lar='last | tac'
|
||||
alias lst='( last; last -f /var/log/wtmp.1 ) | grep -v "pts/" | tac | less +G'
|
||||
alias hedgedoc="tmux kill-session -t hedgedoc; builtin cd '$d4/dev/_forks/hedgedoc' && tmux new-session -s hedgedoc -d 'yarn run dev' \; split-window -h 'nodemon --watch app.js --watch lib --watch locales --watch config.json app.js' \; ls"
|
||||
|
||||
alias myip='curl ifconfig.me && println && curl icanhazip.com'
|
||||
alias ips='ip -br address && ip route'
|
||||
|
||||
alias rm='rm -I'
|
||||
del() {
|
||||
# TODO use current partition and clean on reboot via cron
|
||||
|
@ -306,7 +312,7 @@ dns() {
|
|||
local dig="drill -Q"
|
||||
which drill >/dev/null || dig="dig +short"
|
||||
local server=1.1.1.1
|
||||
# allow changing DNS server with @
|
||||
# TODO allow changing DNS server with @
|
||||
# TODO implement rdns via -x
|
||||
for arg; do
|
||||
local trimmed="${arg##*//}"
|
||||
|
@ -329,11 +335,13 @@ sshl() {
|
|||
ssh -G "$1" | grep --silent "^user root$" &&
|
||||
! [[ "$1" =~ "pve.*" ]] &&
|
||||
! [[ "$1" =~ "encee.*" ]] &&
|
||||
! [[ "$1" =~ ".*fmh.*" ]] &&
|
||||
! [[ "$1" =~ "tmtgw" ]] &&
|
||||
! [[ "$1" =~ "delta*" ]] &&
|
||||
! [[ "$1" =~ "nc-*" ]] &&
|
||||
! [[ "$1" =~ "ddns*" ]] &&
|
||||
local pass=pass
|
||||
test "$all" &&
|
||||
find $XDG_CONFIG_HOME/bash/ \( -name aliases.bash -o -name "$1*.bash" \) -exec cat {} + | $pass ssh "$1" 'cat > .bash_aliases && grep -q .bash_aliases .bashrc || echo "source ~/.bash_aliases" | tee -a .bashrc' &&
|
||||
find $XDG_CONFIG_HOME/bash/ \( -name aliases.bash -o -name "${1%[0-9a-z]}*.bash" \) -exec cat {} + | $pass ssh "$@" 'cat > .bash_aliases && grep -q .bash_aliases .bashrc || echo "source ~/.bash_aliases" | tee -a .bashrc' &&
|
||||
echo 'Updated .bash_aliases!'
|
||||
#$pass ssh "$1" 'grep -q ".bash_aliases" .bashrc || echo "source ~/.bash_aliases" >>.bashrc'
|
||||
if test -n "$pass"
|
||||
|
@ -343,7 +351,7 @@ sshl() {
|
|||
ssh-copy-id -i "$(ssh -G "$1" |
|
||||
grep --max-count 1 "^identityfile " |
|
||||
cut -d " " -f2- |
|
||||
sed "s|^~|$HOME|")" "$1" &&
|
||||
sed "s|^~|$HOME|")" "$@" &&
|
||||
touch "$file"
|
||||
#TERM=xterm-256color
|
||||
sshk -R 2489:127.0.0.1:2489 "$@"
|
||||
|
@ -390,7 +398,7 @@ alias grpc='grep --color=auto --line-number --binary-files=without-match --direc
|
|||
alias grp='grpc --ignore-case'
|
||||
# Grep recursively and paginate
|
||||
# TODO remove some prefixes \([^ ]*/\)\?
|
||||
grpr() { grp --color=always --recursive '--exclude=*.sql' $(echo $DIRS_IGNORE | sed 's|-x |--exclude-dir |g') "$@" | less -FX; }
|
||||
grpr() { grp --color=always --recursive '--exclude=*.sql' $(echo $DIRS_IGNORE | sed 's|-x |--exclude-dir |g') "$@" | cut -c-$(expr $COLUMNS \* 3) | less -FX; }
|
||||
# Grep in shell config files
|
||||
grsh() {
|
||||
local search=$1
|
||||
|
@ -403,14 +411,14 @@ grsh() {
|
|||
|
||||
# Recover stray swap files from neovim
|
||||
vrec() {
|
||||
find "$XDG_DATA_HOME/nvim/swap" -name "*$1*" |
|
||||
find "$XDG_STATE_HOME/nvim/swap" -name "*$1*" -printf '%f%n' |
|
||||
sed 's/\%/\//g' | sed 's|\(.*\)\..*|\1|' |
|
||||
xargs --no-run-if-empty nvim
|
||||
}
|
||||
alias vrecd="ls $XDG_DATA_HOME/nvim/swap | head -1 | xargs -r -i mv {} /tmp"
|
||||
|
||||
# I think this was something about recovering backup files
|
||||
unv() { strings $1 | sed 's/5$//' | dedup; }
|
||||
unv() { strings $1 | sed 's/5$//' | dedup-lines; }
|
||||
|
||||
alias hx='sudo hexedit --maximize --color'
|
||||
# Paginated hexyl
|
||||
|
@ -518,7 +526,7 @@ alias rcr='rcs --compress --size-only --times'
|
|||
alias rcs='rsync --recursive --info=progress2,remove,symsafe,flist,del --human-readable'
|
||||
alias dsync='rc --delete --specials'
|
||||
alias move='rc --remove-source-files'
|
||||
alias rdiff='rsync --recursive --progress --delete --links --dry-run'
|
||||
alias rdiff='rsync --recursive --checksum --delete --links --dry-run'
|
||||
alias rdiffe='rdiff --existing --size-only'
|
||||
|
||||
# Swap the names of two files
|
||||
|
@ -641,3 +649,15 @@ stopswap() {
|
|||
echo "Not enough free memory!"
|
||||
fi
|
||||
}
|
||||
|
||||
extrac() {
|
||||
test "$#" -lt 1 && echo "$0 <archive.vma[.lzo]>..." && return 2
|
||||
for var; do
|
||||
arg=${var%.lzo}
|
||||
out=$(basename ${arg%.vma})
|
||||
lzop -x $arg.lzo
|
||||
vma.py $arg $out ||
|
||||
vma.py $(basename $arg) $out &&
|
||||
( cd $out && find drive-* -exec qemu-img convert {} -O qcow2 $out-{}.qcow2 \; )
|
||||
done
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ command -v notify-send >/dev/null &&
|
|||
alias alert='notify-send --urgency=low -i "$([ $? = 0 ] && echo terminal || echo error)" "$(echo "$history[$HISTCMD]" | sed -e '\''s/^\s*[0-9]\+\s*//;s/[;&|]\s*alert$//'\'')"'
|
||||
|
||||
if which jrnl >/dev/null; then
|
||||
alias j='jrnl'
|
||||
j(){ jrnl "$@" && jrnl-tag; }
|
||||
jn() { jrnl -to today "$@" | less --exit-follow-on-close +F; }
|
||||
alias jnc='jn -contains'
|
||||
alias jne='jrnl --edit'
|
||||
|
@ -32,9 +32,6 @@ mozedit() (
|
|||
|
||||
test "$XDG_CURRENT_DESKTOP" = "KDE" || return 0
|
||||
|
||||
# Fix errors regarding broken mim database
|
||||
alias fixmime='sudo rm /usr/share/mime/packages/kde.xml && sudo update-mime-database /usr/share/mime'
|
||||
|
||||
# Switch theme across system: KDE, Konsole, taskwarrior, bat
|
||||
# WIP: Emacs, Mozilla, diffr (git pager)
|
||||
theme_save="$XDG_CACHE_HOME/theme"
|
||||
|
@ -75,3 +72,6 @@ themeswitch() {
|
|||
export THEME=$(cat "$theme_save" 2> /dev/null)
|
||||
export THEME=${THEME:-$theme_default}
|
||||
themeswitch $THEME
|
||||
|
||||
# Fix errors regarding broken mim database
|
||||
alias fixmime='sudo rm /usr/share/mime/packages/kde.xml && sudo update-mime-database /usr/share/mime'
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
test -z "$DISPLAY" && test "$XDG_VTNR" -lt 1 || return 0
|
||||
velero_backup() {
|
||||
#name=$(date +%y%m%d.%h%m) && velero create backup $name --exclude-namespaces velero --wait && velero backup logs $name'
|
||||
name=$(date +%y%m%d.%H%M)
|
||||
velero create backup $name --exclude-namespaces velero --wait
|
||||
velero backup logs $name
|
||||
}
|
||||
|
||||
export PATH="$PATH:$HOME/.local/bin/server"
|
||||
export PROJECTS="$HOME/projects"
|
||||
test -d "$MUSIC" || export MUSIC="/srv/funkwhale/data/music/janek"
|
||||
export PROJECTS="${PROJECTS:-$HOME/projects}"
|
||||
|
||||
## STACKSPIN
|
||||
export STACKSPIN="${STACKSPIN:-$PROJECTS/stackspin}"
|
||||
|
@ -16,10 +19,11 @@ _stackspin_cluster_cache=/var/tmp/stackspin-cluster
|
|||
# Hostname [IP]
|
||||
# This is a function so it can change directory.
|
||||
stack() {
|
||||
cmdname=${FUNCNAME:-$0}
|
||||
local pod_suffix='-\(0\|[0-f]\+\)'
|
||||
if test $# -lt 1; then
|
||||
builtin cd "$STACKSPIN"
|
||||
echo "Usage: $0 <COMMAND> [args...]"
|
||||
builtin cd "$STACKSPIN" || cd /mnt/b/media/backups/servers/stackspin/2310_stackspin
|
||||
echo "Usage: $cmdname <COMMAND> [args...]"
|
||||
echo "Stackspin commands: select, sso, user, push"
|
||||
echo "Kubepod commands: pod, exec, app, shell, ls, logs, upload"
|
||||
echo "App commands: occ, vikunja"
|
||||
|
@ -36,49 +40,161 @@ stack() {
|
|||
export KUBECONFIG="$CLUSTER_DIR/kube_config_cluster.yml"
|
||||
# Uncomment the line below to always use the main stackspin repo, even when running in a fork.
|
||||
#export GITLAB_CI="true"
|
||||
echo Selected "$_cluster_name" with IP "$_cluster_ip"
|
||||
echo Selected Stackspin cluster "$_cluster_name" with IP "$_cluster_ip"
|
||||
echo "$_cluster_name" >"$_stackspin_cluster_cache"
|
||||
#test "$PWD" = "$HOME" && builtin cd "$STACKSPIN"
|
||||
test -d "$STACKSPIN" && . $STACKSPIN/env/bin/activate
|
||||
;;
|
||||
(sso) builtin cd "$STACKSPIN"
|
||||
"$0" exec dashboard --container backend -- flask "$@";;
|
||||
(sso) "$cmdname" exec dashboard-backend -- flask "$@";;
|
||||
(users)
|
||||
if test $# -gt 0
|
||||
then for arg
|
||||
do "$0" user show $arg
|
||||
if test "$1" = "delete"
|
||||
then shift
|
||||
for arg
|
||||
do "$cmdname" user delete "$arg"
|
||||
done
|
||||
else "$0" users $("$0" user list | sed 's|.*<\(.*\)>.*|\1|')
|
||||
elif test $# -gt 0
|
||||
then
|
||||
for arg
|
||||
do "$cmdname" user show $arg
|
||||
done
|
||||
else "$cmdname" users $("$cmdname" user list | sed 's|.*<\(.*\)>.*|\1|')
|
||||
fi;;
|
||||
(user|app)
|
||||
if test "$1" = "init"
|
||||
then mail="$2"
|
||||
shift 2
|
||||
"$0" user create "$mail"
|
||||
"$0" user update "$mail" name "$*"
|
||||
"$cmdname" user create "$mail" &&
|
||||
"$cmdname" user update "$mail" name "$*" &&
|
||||
echo "Initialized user '$*' with email '$mail'"
|
||||
else "$0" sso cli "$command" "$@"
|
||||
else "$cmdname" sso cli "$command" "$@"
|
||||
fi;;
|
||||
(invite) (
|
||||
# Mail invitation to new users
|
||||
export mail=$1
|
||||
export name=${2:-$(echo $mail | sed -E 's/(.*)\.(.*)@.*/\u\1 \u\2/' )}
|
||||
#echo "$mail,$name"
|
||||
stack user init "$mail" "$name"
|
||||
stack-invite
|
||||
);;
|
||||
(push)
|
||||
git commit -a "$@"
|
||||
test -f "$1" && $EDITOR "$1"
|
||||
# Allow force: https://open.greenhost.net/xeruf/stackspout/-/settings/repository#js-protected-branches-settings
|
||||
git commit "$@"
|
||||
git push &&
|
||||
flux reconcile source git -n flux-system "$(basename $(git rev-parse --show-toplevel))"
|
||||
flux reconcile kustomization -n flux-system "$(basename $(git rev-parse --show-toplevel))";;
|
||||
# KUBE
|
||||
# app clis
|
||||
(occ) "$0" exec nc-nextcloud -c nextcloud -it -- su www-data -s /bin/bash -c "php $command $*";;
|
||||
(vikunja) local pod=${2:-vikunja}
|
||||
flux reconcile kustomization -n flux-system "$(basename $(git rev-parse --show-toplevel))"
|
||||
;;
|
||||
# FLUX
|
||||
(flux)
|
||||
case "$1" in
|
||||
(dump|export) cd "$PROJECTS/vikunja"
|
||||
"$0" exec "$pod" -c api -- sh -c 'rm -f *.zip && ./vikunja dump >/dev/null && ls --color -lAhF >&2 && cat *.zip' >"$pod-dump_$(date +%F).zip"
|
||||
(env) # Apply changes to .flux.env
|
||||
kubectl apply -k "$CLUSTER_DIR"
|
||||
flux reconcile -n flux-system kustomization velero
|
||||
flux get -A kustomizations --no-header | awk -F' ' '{system("flux reconcile -n " $1 " kustomization " $2)}'
|
||||
;;
|
||||
(restore) "$0" upload "$pod" "$3" -c api
|
||||
"$0" exec "$pod" -c api -it -- ./vikunja restore "$3"
|
||||
;;
|
||||
(psql) kubectl exec -it -n $("$0" pod "$pod-postgresql") -- sh -c "PGPASSWORD=$(kubectl get secret --namespace stackspout $pod-postgresql -o jsonpath='{.data.postgresql-password}' | base64 --decode) psql -h localhost -U vikunja -p 5432 vikunja";;
|
||||
(*) echo "Unknown Subcommand";;
|
||||
esac
|
||||
;;
|
||||
(maria) app=$1
|
||||
(reconcile)
|
||||
app=$1
|
||||
namespace=${2:-stackspout}
|
||||
if flux suspend helmrelease -n $namespace $app
|
||||
then flux resume helmrelease -n $namespace $app
|
||||
else flux suspend helmrelease -n stackspin-apps $app
|
||||
flux resume helmrelease -n stackspin-apps $app
|
||||
fi
|
||||
flux suspend kustomization $app
|
||||
flux resume kustomization $app
|
||||
;;
|
||||
(edit)
|
||||
# Edit the URL for an application
|
||||
app=$1
|
||||
kubectl edit configmap -n flux-system stackspin-$app-kustomization-variables
|
||||
"$0" reconcile $app
|
||||
;;
|
||||
# Velero
|
||||
(restic)
|
||||
(
|
||||
namespace=stackspin
|
||||
case $1 in (-n|--namespace) namespace=$2; shift 2;; esac
|
||||
source $CLUSTER_DIR/.flux.env || exit $?
|
||||
export RESTIC_REPOSITORY="s3:${backup_s3_url}/${backup_s3_bucket}/${backup_s3_prefix}/restic/$namespace"
|
||||
export AWS_ACCESS_KEY_ID="${backup_s3_aws_access_key_id}"
|
||||
export AWS_SECRET_ACCESS_KEY="${backup_s3_aws_secret_access_key}"
|
||||
export RESTIC_PASSWORD="$(kubectl get secret -n velero velero-repo-credentials -o jsonpath='{.data.repository-password}' | base64 -d)"
|
||||
restic "$@"
|
||||
)
|
||||
;;
|
||||
(backup)
|
||||
backupname=$(date +%y%m%d.%H%m)
|
||||
velero create backup $backupname --exclude-namespaces velero --wait
|
||||
velero backup logs $backupname;;
|
||||
(restore)
|
||||
test $# -lt 2 && echo "$0 $command <backup> <app> [namespace]" >&2 && return 1
|
||||
backup=$1; app=$2
|
||||
namespace=${3:-stackspin-apps} # TODO automatically handle stackspout apps
|
||||
restore="${backup}-$app-$(date +%s)"
|
||||
if test "$app" = dashboard
|
||||
then kust=single-sign-on
|
||||
hr="$kust-database"
|
||||
namespace=stackspin
|
||||
else hr="$app"
|
||||
kust="$app"
|
||||
fi
|
||||
flux suspend kustomization $kust
|
||||
flux suspend helmrelease -n $namespace $hr
|
||||
kubectl delete all -n $namespace -l stackspin.net/backupSet=$app
|
||||
kubectl delete secret -n $namespace -l stackspin.net/backupSet=$app
|
||||
kubectl delete configmap -n $namespace -l stackspin.net/backupSet=$app
|
||||
kubectl delete pvc -n $namespace -l stackspin.net/backupSet=$app
|
||||
velero restore create $restore --from-backup=$backup -l stackspin.net/backupSet=$app
|
||||
echo "Waiting a few seconds for $app backup to restore..."
|
||||
sleep 10
|
||||
velero restore describe $restore
|
||||
echo "Press enter if backup is ready to resume flux resources:"
|
||||
read
|
||||
test $app = dashboard && kubectl delete secret -n stackspin hydra && flux reconcile helmrelease -n stackspin hydra
|
||||
flux resume helmrelease -n $namespace $hr # TODO timeout
|
||||
flux resume kustomization $kust
|
||||
;;
|
||||
(restore-pvc)
|
||||
test $# -lt 1 && echo "$0 $command <app> [dir]" >&2 && return 1
|
||||
local app=$1
|
||||
if test -d "$2"
|
||||
then dir="$2"
|
||||
target=$(ssh "$_cluster_name" find /var/lib/Stackspin/local-storage/ -maxdepth 1 -name "*$app")
|
||||
test -z "$target" && echo "No target found for ${app}" && return 1
|
||||
ssh "$_cluster_name" mv -v "$target" "$target.$(date +%s)"
|
||||
rsync --links --hard-links --times --recursive --info=progress2,remove,symsafe,flist,del --human-readable "$dir/" "$_cluster_name:$target/"
|
||||
else
|
||||
for vol in $(ls -d pvc*$app* | cut -d_ -f3 | sort)
|
||||
do "$cmdname" restore-pvc $vol $(find -maxdepth 1 -name "*$vol")
|
||||
done
|
||||
fi
|
||||
;;
|
||||
# KUBE
|
||||
# app clis
|
||||
(occ) "$cmdname" exec nc-nextcloud -c nextcloud -it -- su www-data -s /bin/bash -c "php $command $*";;
|
||||
(vikunja*)
|
||||
local pod=$command
|
||||
case "$1" in
|
||||
(dump|export) cd "$PROJECTS/vikunja"
|
||||
"$cmdname" exec "$pod-api" -- sh -c 'rm -f *.zip && ./vikunja dump >/dev/null && ls --color -lAhF >&2 && cat *.zip' >"$pod-dump_$(date +%F).zip"
|
||||
;;
|
||||
(restore)
|
||||
if ! test -f "$2"
|
||||
then echo "Usage: $0 vikunja[suffix] restore <file>" >&2
|
||||
return 2
|
||||
fi
|
||||
file=$2
|
||||
"$cmdname" upload "$pod-api" "$file"
|
||||
"$cmdname" exec "$pod-api" -it -- ./vikunja restore "$file"
|
||||
;;
|
||||
(psql) kubectl exec -it -n $("$cmdname" pod "$pod-postgresql") -- sh -c "PGPASSWORD=$(kubectl get secret --namespace stackspout $pod-postgresql -o jsonpath='{.data.password}' | base64 --decode) psql -h localhost -U vikunja -p 5432 vikunja";;
|
||||
(*) echo "Unknown $command subcommand";;
|
||||
esac
|
||||
;;
|
||||
(maria)
|
||||
app=$1
|
||||
pw="$(kubectl get secret -n flux-system stackspin-$app-variables --template '{{.data.mariadb_password}}' | base64 -d 2>/dev/null ||
|
||||
kubectl get secret -n flux-system stackspin-$app-variables --template "{{.data.${app}_mariadb_password}}" | base64 -d)"
|
||||
case $app in
|
||||
|
@ -86,32 +202,38 @@ stack() {
|
|||
(wordpress) n=wordpress-database;;
|
||||
(*) n=$app-mariadb;;
|
||||
esac
|
||||
"$0" exec $n -it -- env "MYSQL_PWD=$pw" mysql -u $app "$@";;
|
||||
"$cmdname" exec $n -it -- env "MYSQL_PWD=$pw" mysql -u $app "$@"
|
||||
;;
|
||||
(mariar)
|
||||
name="$1-mariadb"
|
||||
shift
|
||||
"$cmdname" exec "$name" -it -- env "MYSQL_PWD=$(kubectl get secret -n $(kubectl get secret --all-namespaces -o=custom-columns=S:.metadata.namespace,N:.metadata.name --no-headers | grep --color=never -- "$name") -o jsonpath='{.data.mariadb-root-password}' | base64 -d)" mysql -u root "$@"
|
||||
;;
|
||||
# high-level
|
||||
(shell)
|
||||
container=$1
|
||||
shift
|
||||
test "$1" = "-c" && pod=$2 && shift 2
|
||||
"$0" exec "$container" -c "$pod" -it -- /bin/sh "$@";;
|
||||
"$cmdname" exec "$container" -c "$pod" -it -- /bin/sh "$@";;
|
||||
(ls)
|
||||
if test $# -gt 1 && ! [[ "$2" =~ ".*/.*" ]]
|
||||
then "$0" exec "$1" "$2" "$3" -it -- ls -lAhF --group-directories-first "${@:4}"
|
||||
else for container in $("$0" kube get "$1" pod -o "jsonpath={.spec.containers[*].name}")
|
||||
then "$cmdname" exec "$1" "$2" "$3" -it -- ls -lAhF --group-directories-first "${@:4}"
|
||||
else for container in $("$cmdname" kube get "$1" pod -o "jsonpath={.spec.containers[*].name}")
|
||||
do highlight "Listing content of $container" &&
|
||||
"$0" ls "$1" -c "$container" "${@:2}"
|
||||
"$cmdname" ls "$1" -c "$container" "${@:2}"
|
||||
done
|
||||
fi;;
|
||||
(upload)
|
||||
kubectl cp "$2" -n $("$0" pod "$1$pod_suffix"):$2 "${@:3}"
|
||||
"$0" ls "$1" "${@:3}";;
|
||||
(exec) "$0" kube exec "$@";;
|
||||
kubectl cp "$2" -n $("$cmdname" pod "$1$pod_suffix"):$2 "${@:3}"
|
||||
"$cmdname" ls "$1" "${@:3}";;
|
||||
(exec) "$cmdname" kube exec "$@";;
|
||||
(logs) podname=$1
|
||||
shift
|
||||
"$0" kube logs "$podname" | $(command which ${LOGPAGER:-lnav} || { which bat >/dev/null && echo "bat --number -l toml" } || echo 'less -RF') "$@";;
|
||||
"$cmdname" kube logs "$podname" | $(command which ${LOGPAGER:-lnav} || { which bat >/dev/null && echo "bat --number -l toml" } || echo 'less -RF') "$@";;
|
||||
# low-level
|
||||
(kube)
|
||||
test $# -gt 1 || { echo "Please provide a command and pod name" >&2 && return 1; }
|
||||
local pods=$("$0" pod "$2$pod_suffix") || { echo "No pod found for $2" >&2 && return 1; }
|
||||
local pods=$("$cmdname" pod "$2$pod_suffix") || { echo "No pod found for $2" >&2 && return 1; }
|
||||
local subcommand=$1
|
||||
shift 2
|
||||
local commands=()
|
||||
|
@ -123,34 +245,58 @@ stack() {
|
|||
test "$subcommand" = get ||
|
||||
highlight "Running $subcommand on $namespacedpod" >&2
|
||||
local IFS=' '
|
||||
kubectl "$subcommand" "$commands[@]" -n $namespacedpod "$@"
|
||||
kubectl "$subcommand" "${commands[@]}" -n $namespacedpod "$@"
|
||||
done;;
|
||||
(pod)
|
||||
test $# -gt 0 && local podname=$1 && shift
|
||||
kubectl get pods --all-namespaces --field-selector="status.phase=Running" -o=custom-columns=S:.metadata.namespace,N:.metadata.name --no-headers "$@" | grep --color=never -- "$podname";;
|
||||
if ! kubectl get pods --all-namespaces --field-selector="status.phase=Running" -o=custom-columns=S:.metadata.namespace,N:.metadata.name --no-headers "$@" | grep --color=never -- "$podname"
|
||||
then code=$?
|
||||
echo "No pod found for $podname" >&2
|
||||
return $code
|
||||
fi
|
||||
;;
|
||||
# stackspin bare
|
||||
(*) if which "$0-$command" >/dev/null 2>&1
|
||||
then "$0-$command" "$@"
|
||||
(*) if which "$cmdname-$command" >/dev/null 2>&1
|
||||
then "$cmdname-$command" "$@"
|
||||
return $?
|
||||
fi
|
||||
builtin cd "$STACKSPIN"
|
||||
# Since the install command can also be given bare to install stackspin itself
|
||||
if test "$command" = "install"; then
|
||||
if test $# -gt 0
|
||||
then "./install/install-$1.sh" || ./install/install-app.sh "$@"
|
||||
else
|
||||
/usr/bin/python3 -m pip install --upgrade pip
|
||||
/usr/bin/python3 -m pip install -r requirements.txt
|
||||
python3 -m stackspin "$_cluster_name" "$command"
|
||||
fi
|
||||
case "$1" in
|
||||
([a-z]*)
|
||||
for arg
|
||||
do kubectl exec -n stackspin deploy/dashboard -c backend -- flask cli app install "$arg"
|
||||
done;;
|
||||
(""|-*)
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install -r requirements.txt
|
||||
python3 -m stackspin "$@" "$_cluster_name" "$command"
|
||||
cp -nv "install/.flux.env.example" "clusters/$_cluster_name/.flux.env" &&
|
||||
$EDITOR "clusters/$_cluster_name/.flux.env"
|
||||
cp -nv install/kustomization.yaml $CLUSTER_DIR/
|
||||
kubectl get namespace flux-system 2>/dev/null || kubectl create namespace flux-system
|
||||
kubectl apply -k $CLUSTER_DIR
|
||||
./install/install-stackspin.sh
|
||||
;;
|
||||
esac
|
||||
else python3 -m stackspin "$_cluster_name" "$command" "$@"
|
||||
fi;;
|
||||
esac
|
||||
}
|
||||
|
||||
cat "$_stackspin_cluster_cache" 2>/dev/null |
|
||||
while read cluster; do stack select "$cluster"; done
|
||||
|
||||
test "$PWD" = "$HOME" && cde "$PROJECTS/ampache-docker"
|
||||
test -z "$DISPLAY" && test "$XDG_VTNR" != 1 || return 0
|
||||
# The following runs only on headless machines
|
||||
|
||||
which kubectl >/dev/null ||
|
||||
{ kubectl() { sudo k3s kubectl "$@"; } && export -f kubectl; }
|
||||
|
||||
export PATH="$PATH:$HOME/.local/bin/server"
|
||||
|
||||
test -d "$MUSIC" || export MUSIC="/srv/funkwhale/data/music/janek"
|
||||
|
||||
test -f "$HOME/.rvm/scripts/rvm" &&
|
||||
source "$HOME/.rvm/scripts/rvm" && # Load RVM into a shell session *as a function*
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
# assign [app_id="firefox"] workspace 2
|
||||
assign [class="sc.gui.final.FinalApp"] workspace 5
|
||||
assign [title="JViewer.*"] workspace 5
|
||||
assign [class="sc.*"] workspace 6
|
||||
assign [class="discord"] workspace 9
|
||||
assign [app_id="firefox"] number 2
|
||||
assign [title="JViewer.*"] number 5
|
||||
assign [class="^sc.*"] number 6
|
||||
assign [class="sc.gui.final.FinalApp"] number 5
|
||||
|
||||
exec swaymsg "workspace 2; exec firefox;"
|
||||
# Gaming & Communication at the end
|
||||
assign [class="zoom"] number 7
|
||||
assign [class="steam"] number 8
|
||||
assign [class="DeltaChat"] number 8
|
||||
assign [class="discord"] number 9
|
||||
#assign [title="Telegram"] number 9
|
||||
assign [app_id="org.telegram.desktop" tiling] number 9
|
||||
assign [class="Signal"] number 9
|
||||
|
||||
#exec swaymsg "workspace 2; exec firefox;"
|
||||
exec swaymsg "workspace 3; exec kitty;"
|
||||
exec swaymsg "workspace 1; exec emacs"
|
||||
|
|
|
@ -40,7 +40,7 @@ repos = [
|
|||
#pull_predefined = false
|
||||
|
||||
# Arguments to pass Git when pulling Repositories
|
||||
arguments = "--rebase --autostash --recurse-submodules"
|
||||
pull_arguments = "--rebase --autostash --recurse-submodules"
|
||||
|
||||
[composer]
|
||||
#self_update = false
|
||||
|
@ -58,7 +58,7 @@ arguments = "--rebase --autostash --recurse-submodules"
|
|||
|
||||
[linux]
|
||||
# Arguments to pass yay when updating packages
|
||||
yay_arguments = "--nodiffmenu --ignore android-studio --ignore clion --ignore intellij-idea-ultimate-edition --ignore intellij-idea-ultimate-edition-jre --ignore webstorm --ignore linux --ignore stretchly-xeruf-git --overwrite '/usr/lib/node_modules/npm/**'"
|
||||
yay_arguments = "--diffmenu=false --overwrite /usr/lib/node_modules/node-gyp/**,/usr/lib/node_modules/npm/**,/usr/lib/** --ignore linux,zoom,qt*,zulip*,webcord*,brave*,*openjfx,android-studio*,clion*,intellij-idea-*,webstorm*,stretchly-xeruf-git"
|
||||
#trizen_arguments = "--devel"
|
||||
#enable_tlmgr = true
|
||||
#emerge_sync_flags = "-q"
|
||||
|
|
|
@ -10,7 +10,8 @@ XDG_TEMPLATES_DIR="$HOME/.local/templates"
|
|||
XDG_DOWNLOAD_DIR="$HOME/data/5-incubator"
|
||||
XDG_MUSIC_DIR="$HOME/data/4-media/music"
|
||||
XDG_PICTURES_DIR="$HOME/data/4-media/images"
|
||||
XDG_VIDEOS_DIR="$HOME/data/4-media/videos"
|
||||
XDG_SCREENSHOTS_DIR="$HOME/data/4-media/images/screenshots"
|
||||
XDG_VIDEOS_DIR="$HOME/data/4-media/video"
|
||||
XDG_DOCUMENTS_DIR="$HOME/data/4-media/multimedia"
|
||||
XDG_NOTES_DIR="$HOME/data/2-box"
|
||||
#XDG_PUBLICSHARE_DIR="$HOME/data/3-resources/multimedia"
|
||||
|
|
74
.config/way-displays/cfg.yaml##default
Normal file
74
.config/way-displays/cfg.yaml##default
Normal file
|
@ -0,0 +1,74 @@
|
|||
# Default cfg.yaml for way-displays.
|
||||
# Copy this to ~/.config/way-displays/cfg.yaml and edit it to your liking.
|
||||
#
|
||||
# See https://github.com/alex-courtis/way-displays/blob/master/doc/CONFIGURATION.md
|
||||
|
||||
|
||||
# Arrange displays in a ROW (default, left to right) or a COLUMN (top to bottom)
|
||||
ARRANGE: COLUMN
|
||||
|
||||
# Align ROWs at the TOP (default), MIDDLE or BOTTOM
|
||||
# Align COLUMNs at the LEFT (default), MIDDLE or RIGHT
|
||||
ALIGN: MIDDLE
|
||||
|
||||
|
||||
# The default ORDER is simply the order in which the displays are discovered.
|
||||
# Define your own.
|
||||
ORDER:
|
||||
- 'C24F390'
|
||||
- 'LOEWE'
|
||||
- 'DP-4'
|
||||
- 'VFV'
|
||||
- 'DP-3'
|
||||
- 'DP-1'
|
||||
- 'BOE'
|
||||
|
||||
# The default is to scale each display by DPI.
|
||||
# This may be disabled and scale 1 will be used, unless a SCALE has been specified.
|
||||
AUTO_SCALE: FALSE
|
||||
|
||||
# Auto scale may be overridden for each display.
|
||||
SCALE:
|
||||
- NAME_DESC: 'U28E570'
|
||||
SCALE: 2
|
||||
- NAME_DESC: 'VFV' # Portable Monitor
|
||||
SCALE: 1
|
||||
- NAME_DESC: 'GW2785TC' # CODE Focus
|
||||
SCALE: 1
|
||||
- NAME_DESC: 'LOEWE' # CODE Big
|
||||
SCALE: 2
|
||||
#- NAME_DESC: 'BOE' # Framework internal
|
||||
# SCALE: 2
|
||||
|
||||
# Override the preferred mode.
|
||||
# WARNING: this may result in an unusable display. See https://github.com/alex-courtis/way-displays#known-issues-with-workarounds
|
||||
# for a possible workaround.
|
||||
MODE:
|
||||
# Resolution and refresh
|
||||
#- NAME_DESC: HDMI-A-1
|
||||
# WIDTH: 1920
|
||||
# HEIGHT: 1080
|
||||
# HZ: 60
|
||||
|
||||
# Resolution with highest refresh
|
||||
#- NAME_DESC: 'monitor description'
|
||||
# WIDTH: 2560
|
||||
# HEIGHT: 1440
|
||||
|
||||
# Highest available
|
||||
#- NAME_DESC: DP-2
|
||||
# MAX: TRUE
|
||||
|
||||
# Laptop displays usually start with eDP e.g. eDP-1. This may be overridden if
|
||||
# your laptop is different.
|
||||
#LAPTOP_DISPLAY_PREFIX: 'eDP'
|
||||
|
||||
|
||||
# One of: ERROR, WARNING, INFO (default), DEBUG
|
||||
LOG_THRESHOLD: INFO
|
||||
|
||||
|
||||
# Disable the specified displays.
|
||||
DISABLED:
|
||||
#- "eDP-1"
|
||||
|
|
@ -28,7 +28,7 @@
|
|||
"sudoloop": true,
|
||||
"timeupdate": false,
|
||||
"devel": false,
|
||||
"cleanAfter": true,
|
||||
"cleanAfter": false,
|
||||
"provides": true,
|
||||
"pgpfetch": true,
|
||||
"upgrademenu": false,
|
||||
|
@ -36,6 +36,6 @@
|
|||
"diffmenu": true,
|
||||
"editmenu": false,
|
||||
"combinedupgrade": true,
|
||||
"useask": false,
|
||||
"useask": true,
|
||||
"batchinstall": false
|
||||
}
|
||||
|
|
|
@ -171,7 +171,7 @@ setopt pipefail
|
|||
# Empower zmv (see ZSHCONTRIB(1))
|
||||
alias szmv='sudo zsh -c autoload zmv && zmv'
|
||||
autoload zmv
|
||||
alias zmv='noglob zmv'
|
||||
alias zmv='noglob zmv -v'
|
||||
alias zmw='noglob zmv -W'
|
||||
alias zcp='noglob zmv -C'
|
||||
alias zln='noglob zmv -L'
|
||||
|
|
|
@ -10,8 +10,7 @@
|
|||
# - visual files are displayed with timg
|
||||
# video thumbnails via mtn, pdf pages from pdftoppm
|
||||
# - text files are displayed through bat
|
||||
# Automatically requests elevation through sudo when needed
|
||||
# TODO .raw,.iso,.qcow2
|
||||
# Usually automatically requests elevation through sudo when needed
|
||||
|
||||
set -o pipefail
|
||||
|
||||
|
@ -42,15 +41,22 @@ fileinfo() {
|
|||
tput smso
|
||||
$elevate file --exclude elf -E "$arg"
|
||||
tput rmso
|
||||
$elevate ssh-keygen -l -f "$arg" 2>/dev/null || true
|
||||
# TODO do not grep bitrate but extract properly
|
||||
|
||||
size=$(stat --format=%s "$arg")
|
||||
# Check if SSH key (<10KB then read)
|
||||
if test "$size" -lt 10000
|
||||
then $elevate ssh-keygen -l -f "$arg" 2>/dev/null
|
||||
fi
|
||||
# I think this check is here to avoid scrolling text interpreted as video
|
||||
#probe="$($elevate ffprobe "$arg" 2>&1)"
|
||||
#echo $probe | grep -v -e '00:00:00.04' -e 'ansi' &&
|
||||
{ ! $inspect && $elevate ffprobe -hide_banner "$arg" 2>&1 | grep "bitrate: ....\? " | sed 's/, start:[^,]\+,/,/'; } ||
|
||||
$elevate stat --format "%U:%G %A %s $(
|
||||
# Print media infos over file infos when <2G
|
||||
if ! { test "$size" -lt 2000000000 && $elevate ffprobe -hide_banner "$arg" 2>&1 | grep -E "bitrate: .{3,5} " | sed 's/, start:[^,]\+,/,/'; } || $inspect
|
||||
then $elevate stat --format "%U:%G %A %s $(
|
||||
size="$($elevate unzip -l "$arg" 2>/dev/null | tail -1)" &&
|
||||
echo "(uncompressed $(echo $size | cut -d' ' -f1 | numfmt --to=iec-i --suffix=B))"
|
||||
) - birth %.10w mod %.10y" "$arg" | numfmt --field=3 --to=iec-i --padding=6 --suffix=B
|
||||
fi
|
||||
done
|
||||
tput sgr0
|
||||
}
|
||||
|
@ -68,7 +74,7 @@ for arg; do
|
|||
fi
|
||||
continue
|
||||
fi
|
||||
# amount of columns in a grid
|
||||
# amount of items to display per line in a grid for two lines max
|
||||
grid=$(expr $(tput cols) / \( 25 - \( $# / 2 \) \& $# \< 30 \| 5 \))
|
||||
tmpfile="$prefix/$(basename -- "$arg")_$(dd "if=$arg" bs=512 count=10 2>/dev/null | md5sum | tr -d ' ' || true)"
|
||||
mkdir -p "$prefix"
|
||||
|
@ -93,8 +99,13 @@ for arg; do
|
|||
echo Converting "$arg"
|
||||
convert -flatten "$arg" png:"$tmpfile"
|
||||
timg+=("$tmpfile");;
|
||||
# TODO .raw .img
|
||||
(*/x-iso*|*/x-qemu-disk*) fdisk -l "$arg";;
|
||||
(*\ video/*)
|
||||
suffix=_thumbs.jpg
|
||||
! $inspect &&
|
||||
# TODO sometimes duration mismatch for short videos
|
||||
test $(printf "%.0f" $(ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 "$arg")) -gt 3 &&
|
||||
mtn -q -i -t -W -r$(expr 5 - $# \& $# \< 4 \| 1) -D6 -b 0.6 -c $grid -w $(expr $(tput cols) '*' 20) \
|
||||
-O "$prefix" -o "$suffix" "$arg" &&
|
||||
timg -W "$prefix/$(basename -- "${arg%.*}")$suffix"
|
||||
|
@ -139,7 +150,7 @@ for arg; do
|
|||
fi
|
||||
esac
|
||||
;;
|
||||
(*:\ SQLite\ *\ database*) highlight "Tables" && sqlite3 "$arg" ".tables";;
|
||||
(*:\ SQLite\ *\ database*) highlight "Tables" && sqlite3 "$arg" ".tables";; # TODO for few tables: SELECT * FROM db LIMIT 3; | cut -c-$col
|
||||
(*:\ data) ;;
|
||||
(*\ key) bat+=("$arg");;
|
||||
(*) bat+=("$arg")
|
||||
|
@ -160,7 +171,7 @@ if test "$timg"; then
|
|||
if which timg >/dev/null
|
||||
then $elevate timg $(test "$timga" && echo "-V") --rotate=exif -g $(tput cols)x$(expr $(tput lines) / 2) \
|
||||
$(test $# -gt 1 &&
|
||||
echo "-t0.2 --auto-crop --center $(test $# -lt 20 && echo "--title") --grid=$((grid < $# ? grid : $#))x2") \
|
||||
echo "-t0.2 --auto-crop --center $(test $# -lt 20 && echo "--title") --grid=$(((grid < $# ? grid : $#) / 2))x") \
|
||||
"${timg[@]}" "${timga[@]}" 2>/dev/null || true
|
||||
else for img in "${timg[@]}"
|
||||
do catimg -H $(expr $(tput lines) / 2) $img
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/sh
|
||||
#!/bin/sh -e
|
||||
# Backup linux system and user files with borg
|
||||
# Usage: bag [-n] [--dry-run] [--home] [--root MOUNTED_ROOT]
|
||||
root="/"
|
||||
|
@ -8,8 +8,8 @@ while test $# -gt 0; do
|
|||
(-n) run="arg-test"; shift;;
|
||||
(--dry-run) args="--dry-run --list"; name=test-$(date +%s); shift;;
|
||||
(--root) shift; root="$1"; name="$(basename "$(realpath "$root")")"; echo $root $name; shift;;
|
||||
(--home) "$0" home /home -e "sh:$HOME/data/4-*" -e "sh:$HOME/data/5-*" -e "sh:**/.stfolder" -e "sh:**/0-forks" -e "sh:**/.git"
|
||||
return $?;;
|
||||
(--home) "$0" home /home -e "sh:$HOME/.local/state" -e "sh:$HOME/data/4-*" -e "sh:$HOME/data/5-*" -e "sh:**/.stfolder" -e "sh:**/0-forks" -e "sh:**/.git"
|
||||
exit $?;;
|
||||
(-*) name="${1#-}"; shift;;
|
||||
(*) break;;
|
||||
esac
|
||||
|
@ -17,6 +17,7 @@ done
|
|||
test $# -gt 0 || cd "$root"
|
||||
name="::$(test -n "$name" && echo "$name" || cat /etc/hostname)_${1:-system}_$(date -u +"%y%m%d")"
|
||||
echo "Backing up as $name"
|
||||
# TODO ignore electron caches
|
||||
${run:-sudo --preserve-env=BORG_REPO BORG_PASSPHRASE="$($BORG_PASSCOMMAND)" borg} create --exclude-caches $args \
|
||||
$(echo $DIRS_IGNORE_SAFE -x 'software-challenge/**/build' -x state/emacs -x state/go -x .local/lib -x .cpan -x *cache -x $HOME/.gem |
|
||||
sed 's|-x \([^ ]\+\)|-e sh:**/\1|g') \
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
# Rename a file with a backup suffix or revert it
|
||||
# Add -a to act on multiple files,
|
||||
# otherwise the second arg is used as suffix rather than the default "bak".
|
||||
# TODO no sudo for symlinks
|
||||
if test "$1" = "-a"; then
|
||||
shift
|
||||
for arg; do "$0" "$arg"; done
|
||||
|
|
2
.local/bin/scripts/bluetooth-send
Executable file
2
.local/bin/scripts/bluetooth-send
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh -e
|
||||
bluetoothctl devices
|
|
@ -1,5 +1,6 @@
|
|||
#!/bin/sh
|
||||
# Write a CACHEDIR.TAG to mark this or the given directory as cache.
|
||||
echo 'Signature: 8a477f597d28d172789f06886806bc55
|
||||
# This file is a cache directory tag created by (application name).
|
||||
# For information about cache directory tags, see:
|
||||
# https://bford.info/cachedir/'>CACHEDIR.TAG
|
||||
# https://bford.info/cachedir/' > "${1:-.}/CACHEDIR.TAG"
|
||||
|
|
21
.local/bin/scripts/checkpass
Executable file
21
.local/bin/scripts/checkpass
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash -e
|
||||
# Verify the given password hash against a password
|
||||
if test "$1"
|
||||
then hash="$1"; shift
|
||||
else printf "Hash? " && read -r hash
|
||||
fi
|
||||
while echo "Type password (no echo) " && read -r -s password && test "$password"; do
|
||||
param=$(echo "$hash" | cut -d\$ -f3)
|
||||
# Check if hash contains parameters
|
||||
if test $(echo "$hash" | tr -cd \$ | wc -c) -gt 3
|
||||
then salt=$(echo "$hash" | cut -d\$ -f4)
|
||||
salted_hash=$(mkpasswd -m sha-512 --"$param" --salt="$salt" "$password")
|
||||
else salted_hash=$(mkpasswd -m sha-512 --salt="$param" "$password")
|
||||
fi
|
||||
|
||||
# Compare the generated hash with the original hash
|
||||
if [[ "$salted_hash" == "$hash" ]]
|
||||
then echo ":) Password correct"; break
|
||||
else echo "X Password incorrect"
|
||||
fi
|
||||
done
|
|
@ -1,3 +1,4 @@
|
|||
#!/bin/sh
|
||||
# Set exec flag on all files that should be executable
|
||||
find -maxdepth 4 \( -name "*.sh" -o -path '*/githooks/*' \) "$@" -exec chmod -v +x {} +
|
||||
case "$1" in ([0-9]) depth=$1; shift;; esac
|
||||
find -maxdepth ${depth:-4} \( -name "*.sh" -o -name "*.py" -o -path '*/githooks/*' \) "$@" -exec chmod -v +x {} +
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
typeset -A _clean_map
|
||||
_clean_map=([h]=$XDG_CACHE_HOME [t]=/var/tmp [l]=/var/log [v]=/var/cache)
|
||||
# TODO .mix - outdated version in socha
|
||||
_clean_home=(.ant .autopsy .bundle .cache .cargo .cpanm .docker .stack .hex .nix-defexpr .parallel .surf
|
||||
_clean_home=(.ant .autopsy .bundle .cargo .cpanm .docker .stack .hex .nix-defexpr .parallel .surf .pub-cache
|
||||
#.cache
|
||||
.yarn .node_modules .npm .pnpm-store .node-gyp .nv .electron .electron-gyp
|
||||
.gradle .gradle-kotlin-dsl .java .kscript .konan .m2 .openjfx
|
||||
.log luametatex-cache luatex-cache .texlive
|
||||
|
@ -28,12 +29,6 @@ find -L -maxdepth 2 -type l -printf "Removing broken symlink %p\n" -delete 2>/de
|
|||
test -d /mnt/data/backups/mobile/ &&
|
||||
mv -v $DATA/4*/backups/mobile/signal-* /mnt/data/backups/mobile/ 2>/dev/null
|
||||
|
||||
highlight "y :: recursively remove empty folders and files"
|
||||
if [[ $1 =~ "y" ]]; then
|
||||
find -name '.thumbnails' -printf "Pre-cleaning %p\n" -exec rm -r {} +
|
||||
find \( -name ".stfolder" -o -name ".*keep" -o -name ".git" -o -name "tmp" -o -name ".nomedia" -o -name "__init__.py" -o -name "instalee" \) -prune -o -empty -printf "Removing empty %p\n" -exec rm -d {} +
|
||||
fi
|
||||
|
||||
highlight "e :: remove downloaded emacs packages (rebuild takes minutes!)"
|
||||
[[ $1 =~ "e" ]] &&
|
||||
find $DOOMLOCALDIR -maxdepth 1 \( -name straight -o -type f \) -printf "Removing %p\n" -exec rm -rf {} +
|
||||
|
@ -83,7 +78,7 @@ highlight "c :: clean electron caches"
|
|||
|
||||
highlight "o :: optimize space extensively"
|
||||
if [[ $1 =~ "o" ]]; then
|
||||
sudo find $XDG_CACHE_HOME /var/cache /var/log /var/tmp -mindepth 1 -maxdepth 2 -atime +2 -exec rm -r {} + -prune
|
||||
sudo find /root/.cache $XDG_CACHE_HOME /var/cache /var/log /var/tmp -mindepth 1 -maxdepth 2 -atime +2 -exec rm -r {} + -prune
|
||||
|
||||
if test -f "/var/log/apt/history.log"; then
|
||||
aptclean_cur=$(cat "/var/log/apt/history.log" | wc -l)
|
||||
|
|
5
.local/bin/scripts/dedup
Executable file
5
.local/bin/scripts/dedup
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/sh -e
|
||||
rmlint --max-depth=5 --keep-hardlinked --rank-by=HpOdlam -o pretty -o sh:/tmp/dedup.sh "$@"
|
||||
printf "Enter to confirm, any text to skip (answer does not matter if nothing was printed above this line) "
|
||||
read -r answer
|
||||
test -n "$answer" || /tmp/dedup.sh -d
|
6
.local/bin/scripts/dedup-dox
Executable file
6
.local/bin/scripts/dedup-dox
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/bin/zsh
|
||||
# Deduplicate Documents
|
||||
dox=$(realpath $DATA/3-*)
|
||||
echo "Hardlinking:"
|
||||
dedup --max-depth=9 -c sh:handler=hardlink "$dox" "$@"
|
||||
dedup --max-depth=9 "$dox" "$@"
|
5
.local/bin/scripts/dedup-images
Executable file
5
.local/bin/scripts/dedup-images
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/zsh
|
||||
# Deduplicate Images
|
||||
img=$(realpath $DATA/4-*/images)
|
||||
#find -type f -not -name '*.xmp' | rmlint -k - // pictorials
|
||||
dedup --max-depth=9 -k $img $img/[a-z]*/ // $img/pictorials/stockicons/ $img/**/*.xmp "$@"
|
|
@ -1,6 +1,7 @@
|
|||
#!/bin/sh -e
|
||||
# Flatten folder hierarchy
|
||||
# Args: depth
|
||||
# Flatten folder hierarchies
|
||||
# Moves subfolders up one layer, replacing the slash by underscore unless the parent folder has the same name.
|
||||
# Args: depth of leaf folders to move up
|
||||
depth=$(expr 1 \& $# \| 2)
|
||||
case $1 in ([0-9]|[0-9][0-9]) depth=$1; shift;; esac
|
||||
find "$@" -mindepth $depth -maxdepth $depth -depth -type d | while read folder; do
|
||||
|
|
|
@ -15,8 +15,8 @@ if ! test "$command"; then
|
|||
(image/*) command="exiftool";;
|
||||
(*sqlite*) sqldiff --summary "${files[@]}" | grep -v '0 changes, 0 inserts, 0 deletes';; # TODO syntax highlighting for INSERT/UPDATE/DELETE
|
||||
(text/*)
|
||||
# Use wiked-diff only for text <10MB
|
||||
if test 10000000 -gt "$(stat --format=%s "${files[@]}" | paste -s -d'+' | bc)"
|
||||
# Use wiked-diff only for text <100KB
|
||||
if test 100000 -gt "$(stat --format=%s "${files[@]}" | paste -s -d'+' | bc)"
|
||||
then wiked-diff "${files[@]}"
|
||||
else diff --color=always --unified=1 --report-identical-files "${files[@]}"
|
||||
fi;;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/bin/bash
|
||||
# Troubleshoot.sh
|
||||
# troubleshoot disk issues and log results
|
||||
# A more elaborate version of Troubleshoot.sh.
|
||||
|
||||
SUCCESS=0
|
||||
|
@ -32,4 +32,5 @@ do
|
|||
smartctl -i -A $e >> ${date}_Troubleshoot.log # Run smartctl on all disks
|
||||
fi
|
||||
done
|
||||
|
||||
exit $? # In this case, exit code = 99, since that is function return.
|
|
@ -1,8 +1,20 @@
|
|||
#!/bin/sh -e
|
||||
#!/bin/sh
|
||||
# Move common files into their place from the downloads folder.
|
||||
dow=$DATA/5-*/download
|
||||
rmlint --rank-by=l -o pretty -o sh:/tmp/dow.sh *.*
|
||||
printf "Enter to confirm, any text to skip. "
|
||||
d5=$DATA/5-*
|
||||
dow=$d5/download
|
||||
|
||||
# Internal Duplicates
|
||||
rmlint --rank-by=Olam -o pretty -o sh:/tmp/dow.sh $dow/*.*
|
||||
printf "Enter to confirm, any text to skip (answer does not matter if nothing is printed above this). "
|
||||
read -r answer
|
||||
test -n "$answer" || /tmp/dow.sh -d
|
||||
|
||||
# Songs
|
||||
mv -v -- $dow/*.mp3 $dow/*.flac $dow/*.wav $MUSIC/
|
||||
mv -v -- $dow/*.iso $DATA/4-*/flash/
|
||||
|
||||
# Data-dir duplicates
|
||||
rmlint --keep-all-tagged --max-depth=5 --rank-by=Odlam -o pretty -o sh:/tmp/dow.sh $d5 // $DATA/1-* $DATA/2-* $DATA/3-* $DATA/4-*
|
||||
printf "Enter to confirm, any text to skip (answer does not matter if nothing is printed above this). "
|
||||
read -r answer
|
||||
test -n "$answer" || /tmp/dow.sh -d
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/bin/sh
|
||||
# Find config files with fzf, preview and edit them
|
||||
# Common config files are automatically checked/reloaded
|
||||
alias dedup='awk '"'"'!a[$0]++'"'"
|
||||
alias dedup-lines='awk '"'"'!a[$0]++'"'"
|
||||
|
||||
listconf() {
|
||||
{ cat "$conf_cache"
|
||||
|
@ -12,7 +12,7 @@ listconf() {
|
|||
\( -name Partitions -o -name mdn -o -name .git -o -name .local -o -name plugged \) -prune -o \
|
||||
! \( -iname "*.markdown" -o -iname "*.md" -o -name "Network Persistent State" -o -iname "*.pem" \) -a \
|
||||
-type f -readable -exec grep -lI '' {} + 2>/dev/null
|
||||
} | dedup
|
||||
} | dedup-lines
|
||||
}
|
||||
|
||||
conf_cache_dir="${XDG_CACHE_HOME:-$HOME/.cache}/$(basename "$0")"
|
||||
|
@ -21,7 +21,7 @@ conf_tmp="${conf_cache}.tmp"
|
|||
mkdir -p "$conf_cache_dir"
|
||||
touch "$conf_cache"
|
||||
|
||||
sel=$(listconf | fzf -1 -0 --tiebreak=end,length --preview '$(test -r {} || echo "sudo") bat --color=always --style=numbers --line-range :200 {}' --query="$1" --history "$conf_cache_dir/searches")
|
||||
sel=$(listconf | fzf -1 -0 --tiebreak=end,length --preview '$(test -r {} || echo "sudo -n") bat --color=always --style=numbers --line-range :200 {}' --query="$1" --history "$conf_cache_dir/searches")
|
||||
case "$sel" in
|
||||
("") exit 2;;
|
||||
(/etc/sudoers) sudo visudo;;
|
||||
|
|
5
.local/bin/scripts/flash
Executable file
5
.local/bin/scripts/flash
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/sh
|
||||
lsblk $2
|
||||
echo -n "Flash $1? "
|
||||
read
|
||||
sudo dd if=$1 of=$2 status=progress bs=1M
|
|
@ -6,7 +6,7 @@
|
|||
# TODO fails on "Alchemy"
|
||||
# TODO implement finding by tags
|
||||
PLAYLISTS="${PLAYLISTS:-$MUSIC/Playlists}"
|
||||
mpc -q clear || mpdr && mpc -q clear
|
||||
mpc -q clear || mpdr
|
||||
if test -z "$1"
|
||||
then mp -q "$PLAYLISTS/focus.m3u"
|
||||
else
|
||||
|
|
|
@ -7,10 +7,12 @@
|
|||
# - host
|
||||
# - target directory name (and further arguments to clone)
|
||||
# In an existing repo, first arg is omitted
|
||||
# TODO: transform https://codeberg.org/forgejo-contrib/forgejo-helm -> forgejo-helm forgejo-contrib "" codeberg.org
|
||||
if test -d ".git" # TODO search upwards
|
||||
then repo="$(basename "$PWD")"
|
||||
git remote set-url origin "$(git-repo "${3:-github.com}" "$repo" "${2:-$(git config --get user.name)}" "${@:4}")"
|
||||
else test $# -eq 0 && echo "Usage: $0 [repo (omit if in repo)] [upstream owner] [own user] [url]" && exit 2
|
||||
git remote remove upstream 2>/dev/null || true
|
||||
else test $# -eq 0 && echo "Usage: $0 <repo (omit if in repo)> <upstream owner> [own user] [host]" && exit 2
|
||||
repo="$1" && shift
|
||||
if test "$#" -eq 0
|
||||
then git-get "$repo"
|
||||
|
@ -24,7 +26,6 @@ else test $# -eq 0 && echo "Usage: $0 [repo (omit if in repo)] [upstream owner]
|
|||
cd "$(basename "$(expr "$4" \| "$repo")")"
|
||||
fi
|
||||
user="${1:-$repo}"
|
||||
git remote remove upstream 2>/dev/null || true
|
||||
git remote add upstream "$(git-repo "${3:-github.com}" "$repo" "$user")"
|
||||
git remote -v
|
||||
git-upstream "$user"
|
||||
#git remote add upstream "$(git-repo "${3:-github.com}" "$repo" "$user")"
|
||||
exec $SHELL
|
||||
|
|
|
@ -5,8 +5,8 @@ git remote $(case "$(git remote)" in
|
|||
(*"$remote"*) echo set-url;;
|
||||
(*) echo add;;
|
||||
esac) "$remote" "$(git-repo "$@")"
|
||||
git remote -v && git fetch
|
||||
git remote -v && git fetch --all
|
||||
|
||||
git branch --format='%(refname:short)' | while read branch
|
||||
do test $(git branch -a | grep $remote/$branch | wc -l) -gt 0 && git branch -u $remote/$branch $branch
|
||||
do test $(git branch --all | grep "$remote/$branch" | wc -l) -gt 0 && git branch -u "$remote/$branch" "$branch"
|
||||
done
|
||||
|
|
|
@ -14,12 +14,13 @@ case "$1" in
|
|||
echo "$1"
|
||||
exit;;
|
||||
esac
|
||||
case $1 in
|
||||
host=$1
|
||||
case $host in
|
||||
(socha) user=software-challenge; host=git@github.com;;
|
||||
(hub) host=git@github.com;;
|
||||
(hub|github) host=git@github.com;;
|
||||
(lab) host=git@gitlab.com;;
|
||||
(jf) host=${1:-gitea@git.jfischer.org};;
|
||||
(*|ftt) host=git@code.ftt.gmbh; user=janek;;
|
||||
(jf) host=gitea@git.jfischer.org;;
|
||||
(ftt|"") host=git@code.ftt.gmbh; user=janek;;
|
||||
esac
|
||||
user=${3:-${user:-$(git config user.name)}}
|
||||
repo=${2:-$(basename $(git root))}
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
#!/bin/sh
|
||||
# set given repository as upstream or add as a new remote
|
||||
# ARGS:
|
||||
# - user/org
|
||||
# - repo
|
||||
# - remote name
|
||||
# Set given repository as upstream or add as a new remote
|
||||
test $# -lt 1 && echo "$0 <user> [repo] [remote-name]" && exit 1
|
||||
if test "$3"
|
||||
then
|
||||
name="$3"
|
||||
|
@ -12,5 +9,5 @@ else
|
|||
git remote get-url upstream >/dev/null 2>&1 && name="$1" || name="upstream"
|
||||
fi
|
||||
origin="$(git remote get-url origin)"
|
||||
git remote add -f $name "$(echo $origin | cut -d':' -f1):$1/${2:-$(echo $origin | cut -d'/' -f2)}"
|
||||
git remote add -f "$name" "$(echo $origin | cut -d':' -f1):$1/${2:-$(echo $origin | cut -d'/' -f2)}"
|
||||
git remote -v
|
||||
|
|
6
.local/bin/scripts/grtag
Executable file
6
.local/bin/scripts/grtag
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/bin/sh
|
||||
# Rescursively find org-mode and other tags
|
||||
tag=$1
|
||||
shift
|
||||
grep --color=always --line-number --binary-files=without-match --directories=recurse --exclude-dir logseq --ignore-case ":$tag:
|
||||
@$tag\b" "$@"
|
|
@ -26,13 +26,17 @@ showinfo() {
|
|||
|
||||
cmd="$1"
|
||||
case "$cmd" in
|
||||
(fwupdmgr|hunt|rdoc|gh|chordpro|bat|pdfjam|reflector|topgrade|r128gain|7z|kubectl|diffr|docker|jrnl|difft|wiked-diff|qpdf|ninja) unbuffer "$@" --help | sed 's|^[^ ].*:|[1m\0[22m|' | $paginate;;
|
||||
(fwupdmgr|hunt|rdoc|gh|chordpro|bat|pdfjam|reflector|topgrade|r128gain|7z|kubectl|diffr|docker|jrnl|difft|wiked-diff|qpdf|ninja)
|
||||
unbuffer "$@" --help | sed 's|^[^ ].*:|[1m\0[22m|' | $paginate;;
|
||||
(sqlcmd) sqlcmd -?;;
|
||||
(exa) "$0" eza;;
|
||||
(caddy|stretchly|go|flutter)
|
||||
shift
|
||||
"$cmd" help "$@" | $paginate;;
|
||||
(doom|sgpt) "$@" --help;; # Paginates itself
|
||||
(mpw) "$@" -h 2>&1 | $paginate;;
|
||||
(rsgain) "$@" custom --help;;
|
||||
(spectre|plantuml|java) unbuffer "$@" -help | $paginate;;
|
||||
(mpw) "$@" -h 2>&1 | $paginate;;
|
||||
(rails) { "$@" -H && "$@" --help; } | $paginate;;
|
||||
(vlc) shift && unbuffer vlc --full-help "$@" | $paginate;;
|
||||
(kdeconnect*) shift && kdeconnect-cli --help-all "$@" | $paginate;;
|
||||
|
|
|
@ -5,5 +5,5 @@ then echo "$0 site [index.html] [USER]"
|
|||
else
|
||||
site=$1
|
||||
shift
|
||||
scp -r "$@" "iridion:/home/${2:-$USER}/web/$site/public_html$(test "$#" -gt 1 || echo '/index.html')"
|
||||
scp -r "$@" "iridion:/home/${2:-$USER}/web/$site/public_html$(test "$#" -gt 1 || test -d "$1" || echo '/index.html')"
|
||||
fi
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
#!/bin/sh
|
||||
# Find terms in jrnl and turn them into tags
|
||||
# Find terms in jrnl files and turn them into tags
|
||||
# Check with jrnl --tags
|
||||
if test $# -eq 0
|
||||
then $0 sleep nap health tech read dev phone Zinc run bike tour laptop computer PC CB piano faith journal
|
||||
else
|
||||
for arg
|
||||
do rpl "\(^\|[^@]\)\b$arg\b" "\1@$arg" $JOURNAL/*.txt
|
||||
do rpl "\(^\|[^@]\)\b$arg\b" "\1@$arg" $JOURNAL/*.txt 2>/dev/null
|
||||
done
|
||||
fi
|
||||
|
|
8
.local/bin/scripts/k8sviz
Executable file
8
.local/bin/scripts/k8sviz
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/bin/sh -e
|
||||
ns=${1:-stackspin}
|
||||
case "$1" in
|
||||
("") /opt/k8sviz/k8sviz.sh --help;;
|
||||
(-*) /opt/k8sviz/k8sviz.sh "$@";;
|
||||
(*) set -x
|
||||
/opt/k8sviz/k8sviz.sh --kubeconfig $KUBECONFIG --namespace "$ns" -t png -o "$(date +%y%m%d)_$ns.png";;
|
||||
esac
|
|
@ -2,7 +2,7 @@
|
|||
kdeconnect-cli --refresh
|
||||
if ! test -f "$1"; then echo "Please specify a file to share" && exit 1; fi
|
||||
file="$1"
|
||||
device="$(kdeconnect-cli --list-available 2>/dev/null | fzf -0 -1 | cut -d' ' -f3)"
|
||||
device="$(kdeconnect-cli --list-available 2>/dev/null | fzf -0 -1 | sed -E 's|.*: ([^ ]*) .*|\1|')"
|
||||
test -z "$device" && kdeconnect-cli --list-devices && echo "No device available!" && exit 1
|
||||
shift
|
||||
kdeconnect-cli "--share=$file" -d "$device" "$@"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/bin/sh -e
|
||||
# Displays the latest files in the given directory or pwd
|
||||
# Lists the latest modified files in the given directory or pwd
|
||||
test "$1" = "-a" && all=true && shift
|
||||
for f in "${@:-$PWD}"
|
||||
do test $# -gt 1 && highlight "$f"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
# Mount a partition by label or device identifier automatically
|
||||
# Includes interactive selection if no argument is given
|
||||
set -eo pipefail
|
||||
|
@ -27,6 +27,17 @@ case $1 in
|
|||
shift;;
|
||||
esac
|
||||
|
||||
# FILE as loopback device
|
||||
if test -f "$arg"
|
||||
then loopdevice="$(sudo losetup -f)"
|
||||
if sudo losetup -P -v "$loopdevice" "$arg"
|
||||
then $0 "$loopdevice" ||
|
||||
{ sudo vgchange -ay &&
|
||||
sudo pvs | grep "$loopdevice" | awk '{print $2}' | xargs -I% find /dev/% -mindepth 1 -exec sudo sh -c 'dir=/mnt/$(echo "{}" | sed "s|/dev/||;s|/|-|g") && mkdir -vp "$dir" && mount {} "$dir"' \; ; }
|
||||
exit $?
|
||||
fi
|
||||
fi
|
||||
|
||||
# FSTAB: BY LABEL
|
||||
if grep --word-regexp "LABEL=$arg" /etc/fstab
|
||||
then # have to mount twice as the first one might be creating the directory
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/sh -e
|
||||
#!/bin/sh
|
||||
cd /mnt/nas
|
||||
find -mindepth 1 -maxdepth 1 -exec mount {} \;
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
#!/bin/sh
|
||||
find -type d -exec touch {}/.nomedia \;
|
||||
find -type d -printf 'Marking %p\n' -a -exec touch {}/.nomedia \;
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
#!/bin/sh -e
|
||||
# Add the modification date in front of the filename
|
||||
test $# -eq 0 && echo "predate [-s] [newname] <files...>" && exit 2
|
||||
IFS='\n'
|
||||
test "$1" = -s && short=true && shift
|
||||
# Whether to change the filename suffix after the date
|
||||
test ! -f "$1" && rename=true && name=$1 && shift || rename=false
|
||||
|
||||
for file
|
||||
do mv --verbose --interactive "$file" \
|
||||
"$(latest "$file" | head -2 | tail -1 |
|
||||
cut -c$(test "$short" && echo "3,4,6,7,9,10" || echo "-10"))$(
|
||||
cut -c$(test "$short" && echo "3,4,6,7,9,10" || echo "-10"))$(
|
||||
if $rename
|
||||
then echo "$(test "$name" && echo "_$name").${file##*.}"
|
||||
else echo "_$file" | sed 's/^_2\?\([0-9]\{2,3\}\)-\([0-9]\{2\}\)\(-[0-9]\{2\}\)\?_\?/_/'
|
||||
|
|
2
.local/bin/scripts/puml
Executable file
2
.local/bin/scripts/puml
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
find . -maxdepth 2 -name '*.puml' -o -name '*.plantuml' | entr -r plantuml /_
|
|
@ -14,8 +14,12 @@ do
|
|||
continue
|
||||
fi
|
||||
|
||||
# Clean empty files
|
||||
$elevate find -name '.thumbnails' -printf 'Pre-cleaning %p\n' -exec rm -r {} +
|
||||
$elevate find -H "$f" -maxdepth $(expr 1 \& "$f" = "/" \| 5 \& $# \> 0 \| 3) \
|
||||
\( -name '.stfolder' -o -name '.*keep' -o -name '*.py' -o -name 'nodelay.txt' -o -name '.git' -o -name 'tmp' -o -name '.nomedia' -o -name '__init__.py' -o -name '*ignore' -o -name 'instalee' \) \
|
||||
\( -type d -o -type f \) -prune -o -empty -printf 'Removing empty %p\n' -exec rm -d {} +
|
||||
# $elevate find -H "$f" -maxdepth $(expr 1 \& "$f" = "/" \| 5 \& $# \> 0 \| 4) -type d -empty -name .stfolder -exec rm -div {} \;
|
||||
$elevate find -H "$f" -maxdepth $(expr 1 \& "$f" = "/" \| 5 \& $# \> 0 \| 3) -not \( -name '.stfolder' -o -name '.*keep' -o -name "*.py" -o -name "nodelay.txt" \) \( -type d -o -type f \) -a -empty -printf 'Removing empty %p\n' -delete
|
||||
test $# -eq 0 && exit $?
|
||||
if test -e "$f"; then
|
||||
echo -n "$f ($(ls -A "$f" | head -3 | paste -s -d' ')) " >&2 &&
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/bin/sh
|
||||
# Execute a gradle task (default test) until it fails
|
||||
# Execute a gradle task (by default "test") until it fails
|
||||
code=0
|
||||
case "$1" in ([0-9]*) code=$1; shift;; esac
|
||||
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
#!/bin/sh
|
||||
# Recursively add replaygain to the given files or from the current directory
|
||||
r128gain $(test -f "$1" || echo '--recursive') --skip-tagged --preserve-times 1 "${@:-.}"
|
||||
# rsgain custom --album --skip-existing --tagmode=i
|
||||
# Need a find command, call for each album
|
||||
# rsgain easy --skip-existing --multithread=${SPARE_CORES:-3} "${@:-.}"
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
# shows size statistics for subfolders
|
||||
# max depth is equal to the first argument if it is a number or 1
|
||||
export chars=$(expr $(tput cols) - 60)
|
||||
case "$1" in (+*) size=${1#+}; depth=2; shift;; esac
|
||||
case "$1" in (+*) size=${1#+}G; depth=2; shift;; esac
|
||||
case "$1" in ([0-9]) depth=$1; shift;; esac
|
||||
(du --max-depth "${depth:-1}" -xhat ${size:-50}M "$@" | sort -h | grep -v "^0") |
|
||||
(du --max-depth "${depth:-1}" -xhat ${size:-50M} "$@" | sort -h | grep -v "^0") |
|
||||
while read line; do echo "$(tput smso)$line$(tput rmso) $(pacman -Qqo $(echo $line | awk '{print $2}') 2>/dev/null | paste -s -d',' | sed "s/\(.\{${chars}\}\).*/\1.../")"; done || #| column -t ||
|
||||
(du --max-depth "${depth:-1}" -xha "$@" | sort -h | tail)
|
||||
|
|
|
@ -3,7 +3,7 @@ light -S .01
|
|||
pkill --echo electron
|
||||
pkill --echo --ignore-case discord
|
||||
pkill --echo signal
|
||||
pkill --echo telegram
|
||||
pkill --echo aw-server
|
||||
pkill --echo aw-qt
|
||||
systemctl stop --user syncthing plasma-baloorunner kde-baloo
|
||||
systemctl stop openvpn-client@deltaPeak.service
|
||||
|
|
|
@ -2,4 +2,4 @@
|
|||
# Rename files according to a given extended regex sed expression
|
||||
sedexpr="$1"
|
||||
shift
|
||||
find "$@" -exec sh -c 'mv -iv "{}" "$(echo "{}" | sed -E "'$sedexpr'")" 2>/dev/null' \;
|
||||
find "$@" -exec sh -c 'mv -iv "{}" "$(echo "{}" | sed -E "'"$sedexpr"'")" 2>/dev/null' \;
|
||||
|
|
|
@ -4,16 +4,29 @@
|
|||
while true
|
||||
do case $1 in
|
||||
(-h|--help|"") echo "Usage: $0 [-q quality (default 85)] [-o outfile] [xRES] <images...>" && exit 2;;
|
||||
(-o) out="$2"; shift 2;;
|
||||
(-q) quality="$2"; shift 2;;
|
||||
(x*) resolution="$1"; resize="-resize $resolution"; shift;;
|
||||
(-o) out="$2"; shift;;
|
||||
(-q) quality="$2"; shift;;
|
||||
(x*) resolution="$1"; resize="-resize $resolution";;
|
||||
(-v) set -x;;
|
||||
(*) break;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
out="${out:-$1${resolution:--shrinked}.jpg}"
|
||||
magick "$@" -auto-orient -strip \
|
||||
-interlace Plane -define jpeg:dct-method=float -sampling-factor 4:2:0 -gaussian-blur 0.05 \
|
||||
-quality "${quality:-85}" $resize "$out"
|
||||
|
||||
process() {
|
||||
out=$1
|
||||
shift
|
||||
magick "$@" -auto-orient -strip \
|
||||
-interlace Plane -define jpeg:dct-method=float -sampling-factor 4:2:0 -gaussian-blur 0.05 \
|
||||
-quality "${quality:-85}" $resize "$out"
|
||||
}
|
||||
if test -n "$out"
|
||||
then process "$out" "$@"
|
||||
else
|
||||
for arg
|
||||
do process "${arg}${resolution:--shrinked}.jpeg" "$arg"
|
||||
done
|
||||
fi
|
||||
printf "Shrinked $1(%s) to $out(%s) - reduced to %s%%\n" \
|
||||
$(stat --format %s "$1" "$out" | numfmt --to=iec-i --suffix=B) \
|
||||
$(stat --format %s "$out" "$1" | sed 'N;s|\n|*100/|' | bc)
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
# Trims everything beyond the given page number
|
||||
test "$1" = "-q" && quiet=$1 && shift
|
||||
case "$1" in ([0-9]) page=$1; shift;; esac
|
||||
test ! -r "$1" && echo "Usage: sign [page] <document.pdf> [hoffset (-160) [voffset (-310) [scale [signature-image]]]]" && exit 1
|
||||
test $# -lt 2 && echo "Usage: sign [page] <document.pdf> [hoffset (-160) [voffset (-310) [scale [signature-image]]]]"
|
||||
test ! -r "$1" && exit 1
|
||||
|
||||
tmp_base=/tmp/sign
|
||||
mkdir -p $tmp_base
|
||||
|
|
21
.local/bin/scripts/stack-helm
Executable file
21
.local/bin/scripts/stack-helm
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/sh -e
|
||||
# Emulate helm repo adding for easy command copy-pasting
|
||||
cd "$STACKSPIN/../stackspout"
|
||||
cmd=$1
|
||||
shift
|
||||
case "$cmd" in
|
||||
(install) true;;
|
||||
(repo) shift;;
|
||||
(*) echo 'Unknown command!'>&2 && exit 2;;
|
||||
esac
|
||||
name=$1
|
||||
url=$2
|
||||
|
||||
echo "apiVersion: source.toolkit.fluxcd.io/v1beta1
|
||||
kind: HelmRepository
|
||||
metadata:
|
||||
name: $name
|
||||
namespace: flux-system
|
||||
spec:
|
||||
interval: 60m
|
||||
url: $url" | tee "infrastructure/sources/$name.yaml"
|
5
.local/bin/scripts/stack-invite
Executable file
5
.local/bin/scripts/stack-invite
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/sh
|
||||
#for app in wordpress nextcloud velero vikunja ninja
|
||||
#do stack user setrole
|
||||
#done
|
||||
pass business/ftt/invite | envsubst | ssh nc-iridion sudo sendmail -v "$mail"
|
266
.local/bin/scripts/stack-template
Executable file
266
.local/bin/scripts/stack-template
Executable file
|
@ -0,0 +1,266 @@
|
|||
#!/bin/sh -e
|
||||
if test $# -lt 1; then
|
||||
echo "You should be in the root apps folder."
|
||||
echo "Usage: $0 <app> [subdomain] [repo] [namespace]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
app=$1
|
||||
subdomain=${2:-$app}
|
||||
repo=${3:-$app}
|
||||
namespace=${4:-stackspout}
|
||||
|
||||
cat <<EOF >>"$subdomain-kustomization.yaml"
|
||||
---
|
||||
apiVersion: kustomize.toolkit.fluxcd.io/v1beta2
|
||||
kind: Kustomization
|
||||
metadata:
|
||||
name: add-${subdomain}
|
||||
namespace: flux-system
|
||||
spec:
|
||||
interval: 10m
|
||||
prune: true
|
||||
path: ./apps/${subdomain}
|
||||
sourceRef:
|
||||
kind: GitRepository
|
||||
name: ${namespace}
|
||||
EOF
|
||||
|
||||
if test "$(basename "$PWD")" != "${subdomain}"
|
||||
then mkdir -p "${subdomain}"
|
||||
cd "${subdomain}"
|
||||
fi
|
||||
|
||||
# Values
|
||||
|
||||
cat <<EOF >>"$app-kustomization.yaml"
|
||||
---
|
||||
apiVersion: kustomize.toolkit.fluxcd.io/v1beta2
|
||||
kind: Kustomization
|
||||
metadata:
|
||||
name: ${app}
|
||||
namespace: flux-system
|
||||
spec:
|
||||
interval: 5m
|
||||
retryInterval: 2m
|
||||
timeout: 10m
|
||||
wait: true
|
||||
prune: true
|
||||
path: ./apps/${subdomain}/${app}
|
||||
sourceRef:
|
||||
kind: GitRepository
|
||||
name: ${namespace}
|
||||
dependsOn:
|
||||
- name: flux
|
||||
- name: local-path-provisioner
|
||||
- name: ${app}-secrets
|
||||
- name: nginx
|
||||
- name: single-sign-on
|
||||
postBuild:
|
||||
substituteFrom:
|
||||
- kind: Secret
|
||||
name: stackspin-cluster-variables
|
||||
- kind: ConfigMap
|
||||
name: stackspin-${app}-kustomization-variables
|
||||
- kind: Secret
|
||||
name: stackspin-${app}-variables
|
||||
# OIDC
|
||||
- kind: Secret
|
||||
name: stackspin-${app}-oauth-variables
|
||||
- kind: ConfigMap
|
||||
name: stackspin-single-sign-on-kustomization-variables
|
||||
EOF
|
||||
|
||||
if mkdir "$app"
|
||||
then
|
||||
cat <<EOF >"$app/$app-oauth-client.yaml"
|
||||
apiVersion: hydra.ory.sh/v1alpha1
|
||||
kind: OAuth2Client
|
||||
metadata:
|
||||
name: $app-oauth-client
|
||||
# Has to live in the same namespace as the stackspin-$app-oauth-variables secret
|
||||
namespace: flux-system
|
||||
spec:
|
||||
# TODO copied from wekan: https://github.com/wekan/wekan/wiki/Keycloak
|
||||
grantTypes:
|
||||
- authorization_code
|
||||
- refresh_token
|
||||
- client_credentials
|
||||
- implicit
|
||||
responseTypes:
|
||||
- id_token
|
||||
- code
|
||||
scope: "openid profile email stackspin_roles"
|
||||
secretName: stackspin-$app-oauth-variables
|
||||
#redirectUris:
|
||||
# - https://\${${app}_domain}/oauth/openid/
|
||||
#tokenEndpointAuthMethod: client_secret_post
|
||||
EOF
|
||||
cat <<EOF >"$app/$app-release.yaml"
|
||||
apiVersion: helm.toolkit.fluxcd.io/v2beta1
|
||||
kind: HelmRelease
|
||||
metadata:
|
||||
name: $app
|
||||
namespace: $namespace
|
||||
spec:
|
||||
releaseName: $app
|
||||
chart:
|
||||
spec:
|
||||
chart: $app
|
||||
version: 1.0 # TODO
|
||||
sourceRef:
|
||||
kind: HelmRepository
|
||||
name: $repo
|
||||
namespace: flux-system
|
||||
interval: 5m
|
||||
valuesFrom:
|
||||
- kind: ConfigMap
|
||||
name: stackspin-$app-values
|
||||
optional: false
|
||||
# Allow overriding values by ConfigMap or Secret
|
||||
- kind: ConfigMap
|
||||
name: stackspin-$app-override
|
||||
optional: true
|
||||
- kind: Secret
|
||||
name: stackspin-$app-override
|
||||
optional: true
|
||||
EOF
|
||||
cat <<EOF >"$app/$app-values-configmap.yaml"
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: stackspin-$app-values
|
||||
namespace: $namespace
|
||||
data:
|
||||
values.yaml: |
|
||||
# TODO verify structure matches chart
|
||||
commonLabels:
|
||||
stackspin.net/backupSet: "${app}"
|
||||
podLabels:
|
||||
stackspin.net/backupSet: "${app}"
|
||||
# TODO Configure PVC for data & database including backup labels
|
||||
podAnnotations:
|
||||
backup.velero.io/backup-volumes: "data"
|
||||
persistence:
|
||||
enabled: true
|
||||
existingClaim: "${app}-data"
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
# Elaborate style
|
||||
annotations:
|
||||
kubernetes.io/tls-acme: "true"
|
||||
hosts:
|
||||
- host: "\${${app}_domain}"
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: $app-tls
|
||||
hosts:
|
||||
- "\${${app}_domain}"
|
||||
# Bitnami style
|
||||
hostname: "\${${app}_domain}"
|
||||
tls: true
|
||||
certManager: true
|
||||
# TODO Adjust $app Mailing config
|
||||
# mailer:
|
||||
# enabled: "\${outgoing_mail_enabled}"
|
||||
# host: "\${outgoing_mail_smtp_host}"
|
||||
# port: "\${outgoing_mail_smtp_port}"
|
||||
# username: "\${outgoing_mail_smtp_user}"
|
||||
# password: "\${outgoing_mail_smtp_password}"
|
||||
# fromemail: "\${outgoing_mail_from_address}"
|
||||
# TODO Adjust $app OpenID Connect Single Sign-On Configuration
|
||||
# - name: Stackspin
|
||||
# key: "\${client_id}"
|
||||
# secret: "\${client_secret}"
|
||||
# issuer: "https://\${hydra_domain}"
|
||||
# autoDiscoverUrl: 'https://\${hydra_domain}/.well-known/openid-configuration'
|
||||
EOF
|
||||
cat <<EOF >"$app/$app-pvc.yaml"
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: $app-data
|
||||
namespace: $namespace
|
||||
labels:
|
||||
stackspin.net/backupSet: "$app"
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
volumeMode: Filesystem
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
storageClassName: local-path
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Secrets
|
||||
|
||||
cat <<EOF >>"$app-secrets-kustomization.yaml"
|
||||
---
|
||||
apiVersion: kustomize.toolkit.fluxcd.io/v1beta2
|
||||
kind: Kustomization
|
||||
metadata:
|
||||
name: ${app}-secrets
|
||||
namespace: flux-system
|
||||
spec:
|
||||
interval: 5m
|
||||
timeout: 4m
|
||||
wait: true
|
||||
prune: true
|
||||
path: ./apps/${subdomain}/${app}-secrets
|
||||
sourceRef:
|
||||
kind: GitRepository
|
||||
name: ${namespace}
|
||||
dependsOn:
|
||||
- name: flux
|
||||
- name: secrets-controller
|
||||
postBuild:
|
||||
substituteFrom:
|
||||
- kind: Secret
|
||||
name: stackspin-cluster-variables
|
||||
EOF
|
||||
if mkdir "$app-secrets"
|
||||
then
|
||||
cat <<EOF >"$app-secrets/$app-kustomization-variables.yaml"
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: stackspin-$app-kustomization-variables
|
||||
namespace: flux-system
|
||||
data:
|
||||
${app}_domain: ${subdomain}.\${domain}
|
||||
EOF
|
||||
cat <<EOF >>"$app-secrets/$app-variables.yaml"
|
||||
---
|
||||
apiVersion: secretgenerator.mittwald.de/v1alpha1
|
||||
kind: StringSecret
|
||||
metadata:
|
||||
name: stackspin-$app-variables
|
||||
namespace: flux-system
|
||||
spec:
|
||||
fields:
|
||||
- fieldname: password
|
||||
EOF
|
||||
cat <<EOF >"$app-secrets/$app-oauth-secret.yaml"
|
||||
---
|
||||
apiVersion: secretgenerator.mittwald.de/v1alpha1
|
||||
kind: StringSecret
|
||||
metadata:
|
||||
name: stackspin-$app-oauth-variables
|
||||
namespace: flux-system
|
||||
spec:
|
||||
data:
|
||||
client_id: $app
|
||||
fields:
|
||||
- fieldName: client_secret
|
||||
length: "32"
|
||||
EOF
|
||||
fi
|
||||
|
||||
../generate-kustomizations.sh .
|
||||
echo "TODO: Obtain chart version, check configmap, adjust secrets" >&2
|
||||
exec $SHELL
|
|
@ -7,3 +7,4 @@
|
|||
# Any other arguments (usually pathnames) are passed on to the tree command
|
||||
case "$1" in ([0-9]) depth=$1; shift;; esac
|
||||
tree --dirsfirst --du -h -C -L ${depth:-3} -I node_modules "$@" | ${PAGER:-less} -rF
|
||||
# TODO consider exa -T -L X
|
||||
|
|
3
.local/bin/scripts/touche
Executable file
3
.local/bin/scripts/touche
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
mkdir -p $(dirname $1)
|
||||
touch "$@"
|
5
.local/bin/scripts/vmpasswd
Executable file
5
.local/bin/scripts/vmpasswd
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/sh -ex
|
||||
# Generate a cloud-init hashed password for the given machine and copy it
|
||||
passwd=$(mkpasswd --method=SHA-512 --rounds=4096 $(pass mpw -t basic "$@"))
|
||||
echo $passwd
|
||||
echo $passwd | wl-copy
|
14
.local/bin/scripts/void
Executable file
14
.local/bin/scripts/void
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/bin/sh
|
||||
temp=$(mktemp)
|
||||
target=$(pass app/voidcat)
|
||||
for arg; do
|
||||
echo "Uploading $arg"
|
||||
{ curl --progress-bar -X POST \
|
||||
-H "V-Content-Type: $(file --mime-type -b $1)" \
|
||||
-H "V-Full-Digest: $(sha256sum -bz $1 | cut -d' ' -f1)" \
|
||||
-H "V-Filename: $arg" \
|
||||
-H "Authorization: Bearer $(echo "$target" | head -1)" \
|
||||
--data-binary @$arg \
|
||||
$(echo "$target" | tail -1); echo; } | tee -a $temp
|
||||
done
|
||||
test $# -gt 1 && echo && cat $temp | sed 's|http:|https:|'
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/sh -e
|
||||
test -f "$1" || cd "$XDG_DATA_HOME/openvpn"
|
||||
tmux new-session -s "$@" "sudo openvpn $@"
|
||||
tmux new-session -s "$@" "sudo openvpn $@ || read"
|
||||
|
|
3
.local/bin/scripts/way-column
Executable file
3
.local/bin/scripts/way-column
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
way-displays -s ARRANGE_ALIGN COLUMN middle
|
||||
test $# -gt 0 && way-displays -s ORDER "$@"
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/sh
|
||||
way-displays -s ARRANGE_ALIGN ROW bottom
|
||||
way-displays -s ORDER "$@"
|
||||
test $# -gt 0 && way-displays -s ORDER "$@"
|
||||
|
|
3
.local/bin/scripts/wg-reload
Normal file
3
.local/bin/scripts/wg-reload
Normal file
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh -x
|
||||
# wg-quick reload interface
|
||||
wg syncconf "$@" <(wg-quick strip "$@")
|
|
@ -2,6 +2,9 @@
|
|||
# xdg-open all given files
|
||||
# TODO handle .desktop-files with gtk-launch/dex/kioclient exec, add selector from xdg-mime-file
|
||||
while test $# -gt 0; do
|
||||
xdg-open "$(case "$1" in (-*) echo './';; esac)$1"
|
||||
case $1 in
|
||||
(*.epub) okular "$1" &;;
|
||||
(*) xdg-open "$(case "$1" in (-*) echo './';; esac)$1";;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
|
21
.local/bin/server/fail2ban-status
Executable file
21
.local/bin/server/fail2ban-status
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash -e
|
||||
# Source: https://serverfault.com/questions/285256/how-to-unban-an-ip-properly-with-fail2ban/1023005#1023005
|
||||
test $# -eq 0 && sudo fail2ban-client status && exit 0
|
||||
echo "Jails where $1 is locked up:"
|
||||
JAILS=`fail2ban-client status | grep "Jail list" | sed -E 's/^[^:]+:[ \t]+//' | sed 's/,//g'`
|
||||
for JAIL in $JAILS
|
||||
do
|
||||
currentjail=`fail2ban-client status $JAIL | grep -B 8 $1 | grep Status | awk '{printf $5}'`
|
||||
if [[ ${#currentjail} -gt a ]] ; then
|
||||
echo $currentjail
|
||||
fi
|
||||
done
|
||||
echo
|
||||
echo "To unban $1 use the following commands:"
|
||||
for JAIL in $JAILS
|
||||
do
|
||||
currentjail=`fail2ban-client status $JAIL | grep -B 8 $1 | grep Status | awk '{printf $5}'`
|
||||
if [[ ${#currentjail} -gt a ]] ; then
|
||||
echo "fail2ban-client set $currentjail unbanip $1"
|
||||
fi
|
||||
done
|
|
@ -1,134 +0,0 @@
|
|||
#!/bin/sh -e
|
||||
if test $# -lt 1; then
|
||||
echo "You should be in the root apps folder."
|
||||
echo "Usage: $0 <app> [subdomain] [repo] [namespace]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
app=$1
|
||||
subdomain=${2:-$app}
|
||||
repo=${3:-$app}
|
||||
namespace=${4:-stackspout}
|
||||
|
||||
if test "$(basename "$PWD")" != "$subdomain"
|
||||
then mkdir -p "$subdomain" && cd "$subdomain"
|
||||
fi
|
||||
|
||||
cat <<EOF >$app-oauth-client.yaml
|
||||
apiVersion: hydra.ory.sh/v1alpha1
|
||||
kind: OAuth2Client
|
||||
metadata:
|
||||
name: $app-oauth-client
|
||||
# Has to live in the same namespace as the stackspin-$app-oauth-variables secret
|
||||
namespace: flux-system
|
||||
spec:
|
||||
# TODO copied from wekan: https://github.com/wekan/wekan/wiki/Keycloak
|
||||
grantTypes:
|
||||
- authorization_code
|
||||
- refresh_token
|
||||
- client_credentials
|
||||
- implicit
|
||||
responseTypes:
|
||||
- id_token
|
||||
- code
|
||||
scope: "openid profile email stackspin_roles"
|
||||
secretName: stackspin-$app-oauth-variables
|
||||
#redirectUris:
|
||||
# - https://$subdomain.\${domain}/oauth/openid/
|
||||
#tokenEndpointAuthMethod: client_secret_post
|
||||
EOF
|
||||
|
||||
cat <<EOF >$app-release.yaml
|
||||
apiVersion: helm.toolkit.fluxcd.io/v2beta1
|
||||
kind: HelmRelease
|
||||
metadata:
|
||||
name: $app
|
||||
namespace: $namespace
|
||||
spec:
|
||||
releaseName: $app
|
||||
chart:
|
||||
spec:
|
||||
chart: $app
|
||||
version: # TODO
|
||||
sourceRef:
|
||||
kind: HelmRepository
|
||||
name: $repo
|
||||
namespace: flux-system
|
||||
interval: 5m
|
||||
valuesFrom:
|
||||
- kind: ConfigMap
|
||||
name: stackspin-$app-values
|
||||
optional: false
|
||||
# Allow overriding values by ConfigMap or Secret
|
||||
- kind: ConfigMap
|
||||
name: stackspin-$app-override
|
||||
optional: true
|
||||
- kind: Secret
|
||||
name: stackspin-$app-override
|
||||
optional: true
|
||||
EOF
|
||||
|
||||
cat <<EOF >$app-values-configmap.yaml
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: stackspin-$app-values
|
||||
namespace: $namespace
|
||||
data:
|
||||
values.yaml: |
|
||||
# TODO verify structure matches chart
|
||||
ingress:
|
||||
enabled: true
|
||||
# Elaborate style
|
||||
annotations:
|
||||
kubernetes.io/tls-acme: "true"
|
||||
hosts:
|
||||
- host: "$subdomain.\${domain}"
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: $app-tls
|
||||
hosts:
|
||||
- "$subdomain.\${domain}"
|
||||
# Bitnami style
|
||||
hostname: "$subdomain.\${domain}"
|
||||
tls: true
|
||||
certManager: true
|
||||
# TODO Configure PVC for data & database
|
||||
# TODO Adjust $app Mailing config
|
||||
# mailer:
|
||||
# enabled: "\${outgoing_mail_enabled}"
|
||||
# host: "\${outgoing_mail_smtp_host}"
|
||||
# port: "\${outgoing_mail_smtp_port}"
|
||||
# username: "\${outgoing_mail_smtp_user}"
|
||||
# password: "\${outgoing_mail_smtp_password}"
|
||||
# fromemail: "\${outgoing_mail_from_address}"
|
||||
# TODO Adjust $app OpenID Connect Single Sign-On Configuration
|
||||
# - name: Stackspin
|
||||
# key: "\${client_id}"
|
||||
# secret: "\${client_secret}"
|
||||
# autoDiscoverUrl: 'https://sso.\${domain}/.well-known/openid-configuration'
|
||||
EOF
|
||||
|
||||
cat <<EOF >$app-pvc.yaml
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: $app-data
|
||||
namespace: $namespace
|
||||
labels:
|
||||
stackspin.net/backupSet: "$app"
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
volumeMode: Filesystem
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
storageClassName: local-path
|
||||
EOF
|
||||
|
||||
ls -l
|
||||
echo "To do: Obtain chart version, check configmap, create oauth secrets if needed" >&2
|
||||
exec $SHELL
|
8
.zshenv
8
.zshenv
|
@ -19,6 +19,7 @@ export XDG_CONFIG_HOME="$HOME/.config"
|
|||
|
||||
export JOURNAL="$(eval "dirname $(grep -1 journals $XDG_CONFIG_HOME/jrnl/jrnl.yaml | tail -1 | cut -d':' -f2-)" ||
|
||||
echo "$DATA/2-box/journal")"
|
||||
export_existing STACKSPIN "$DATA/1-projects/stack/stackspin"
|
||||
export_existing INSTALEE_HOME "$HOME/projects/instalee" "$DATA/1-projects/1-personal/instalee"
|
||||
export_existing VOSK_MODELS "/mnt/data/projects/vosk/models"
|
||||
# adjust programs to use xdg
|
||||
|
@ -100,14 +101,14 @@ export LS_OPTIONS='--color=auto --human-readable --si --group-directories-first
|
|||
export LESS="--raw-control-chars --ignore-case --LONG-PROMPT --jump-target=5 $(test $(less --version | head -1 | cut -f2 -d' ') -ge 590 && echo --incsearch)"
|
||||
# TODO put into config file and use --exclude-from
|
||||
# -x 'System Volume Information'
|
||||
export DIRS_GENERATED="-x generated -x .gradle -x cmake_build -x dist-newstyle -x node_modules -x __pycache__"
|
||||
export DIRS_GENERATED="-x generated -x .gradle -x cmake_build -x dist-newstyle -x node_modules -x __pycache__ -x .pytest_cache"
|
||||
export DIRS_IGNORE_SAFE="-x .cache -x .cpan -x *Cache -x .pyenv -x .local/cache -x share/baloo -x share/cabal -x share/cargo -x share/digikam -x share/gem -x share/JetBrains -x share/tldr -x share/syncthing -x share/Steam/ubuntu* -x share/Steam/package -x share/virtualenv -x share/Zeal -x state/gradle -x state/android -x Ferdi/Partitions -x oh-my-zsh -x wine/drive_c/windows -x vendor/cache $DIRS_GENERATED"
|
||||
export DIRS_IGNORE="-x .sync -x .stfolder -x *build -x .git -x .idea -x env -x out -x cache -x Partitions -x vendor/bundle -x log $DIRS_IGNORE_SAFE"
|
||||
# red stderr
|
||||
test -f "/usr/lib/libstderred.so" && export LD_PRELOAD="/usr/lib/libstderred.so${LD_PRELOAD:+:$LD_PRELOAD}"
|
||||
# software config
|
||||
export OSFONTDIR="$XDG_DATA_HOME"/fonts:/usr/share/fonts
|
||||
export TEXMF=/usr/share/texmf-dist/texmf-context
|
||||
# TODO check context export TEXMF=/usr/share/texmf-dist/texmf-context
|
||||
export KSCRIPT_IDEA_COMMAND=intellij-idea-ultimate-edition
|
||||
## enable pass extensions
|
||||
export PASSWORD_STORE_ENABLE_EXTENSIONS="true"
|
||||
|
@ -148,7 +149,7 @@ FD_BASE="fd --hidden --color=always --no-ignore-vcs"
|
|||
export FZF_DEFAULT_COMMAND="$FD_BASE --type file"
|
||||
export FZF_CTRL_T_COMMAND="$FD_BASE -d 7"
|
||||
## cplusplus - ctest, cmake, ninja
|
||||
SPARE_CORES=$(expr $(lscpu --extended | awk '{print $7}' | sort | uniq -c | head -1 | awk '{print $1}') \* 4 / 5)
|
||||
export SPARE_CORES=$(expr $(lscpu --extended | awk '{print $7}' | sort | uniq -c | head -1 | awk '{print $1}') \* 2 / 3)
|
||||
export CMAKE_BUILD_PARALLEL_LEVEL=${SPARE_CORES}
|
||||
export CTEST_PARALLEL_LEVEL=${SPARE_CORES}
|
||||
export CTEST_PROGRESS_OUTPUT=1
|
||||
|
@ -157,3 +158,4 @@ export CTEST_OUTPUT_ON_FAILURE=1
|
|||
# TODO move to proper place - is also called when firenvim starts
|
||||
#autolight
|
||||
#export TZ='Europe/Dublin'
|
||||
#export TZ='Africa/Nairobi'
|
||||
|
|
|
@ -67,7 +67,7 @@ and configuration for ~less~ and ~fzf~.
|
|||
* Package Setup (moving to [[https://github.com/xeruf/instalee][Instalee]])
|
||||
By Size:
|
||||
: LC_ALL=C pacman -Qi | awk '/^Name/{name=$3} /^Installed Size/{print $4$5, name}' | sort -h
|
||||
Copy list of in stalled packages:
|
||||
Copy list of installed packages:
|
||||
: pacman -Qe | cut -d\ -f1 | xclip -selection clipboard
|
||||
|
||||
- dotfiles :: yadm
|
||||
|
|
Loading…
Add table
Reference in a new issue