#!/bin/sh die () { echo "error: $*" >&2 exit 1 } all () { # pp maybe finish upd -a first and start to suspend, and it cause insp music() failed to rsync to pp, so I need to finish rsync to ssh before pp finish upd, so I put music() here, not perfect but good enough [ "$hostname" = xyzinsp ] && music # monthly_misc() needs to use qbittorrent-nox APIs, so I need to restart qbt before that, so I put qb() at top here, not perfect but good enough if [ "$hostname" = xyzia ]; then qb fi fast clean # don't run reflector if it is pp or aa or {it is studio and insp is in the same network} # somehow ping local hostname via ipv6 is very slow, so I use ipv4 to ping if ! { [ "$hostname" = xyzpp ] || [ "$hostname" = xyzaa ] || { [ "$hostname" = xyzstudio ] && ping -4qc1 xyzinsp > /dev/null;};}; then refl fi monthly_misc if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then userjs fi } backup () { case "$hostname" in xyzinsp) backup_branch=master;; *) backup_branch=${hostname#xyz};; esac if [ "$hostname" = xyzinsp ]; then # Pull repos and branches from studio, so one more backup on insp or duplicity will backup to ia,for one more backup for 321 backup rule # cfgl master and pp branches already satisfy 321 rule. Here are three copies: 1. master/pp, 2. studio, 3. gitlab or alternative # But I still copy those on insp because one more copy will not hurt. And all my other data stil satisfy 3 copies without backup to gitlab, so why left out cfgl? Also it may be nice if I what to use that local dir for searching things. cd "$HOME/programs/config_local_arch" || exit git branch | awk '{print ($1=="*")?$2:$1}' | while read -r branch; do git checkout "$branch" git pull # Push ca, ia, and other new vps branches (which means any branches other than master/studio/pp) to codeberg or any future alternative website for mirroring. Needed for public codes/data, considering my death may result in all copies on my computers being lost. case "$branch" in master|studio|pp) ;; *) git push;; esac done # git checkout will change mtime, need to change back so rsync won't think it needs to backup these files # https://stackoverflow.com/q/1964470/9008720 # https://stackoverflow.com/q/21735435/9008720 # https://github.com/MestreLion/git-tools # aur/git-tools-git git restore-mtime ( umask 077 # backup studio and other branches' cfgs, note: it backup a branch by clone/checkout to that branch first for dir in $(sudo find /root/programs -maxdepth 1 -mindepth 1 -type d -name 'config_local_arch_secrets*'); do sudo -E git -C "$dir" pull done ) # rsync backup from studio to insp rsync -avPR --delete studio:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/studio" fi if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ]; then # rsync backup from ia rsync -avPR --delete ia:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/ia" # In the future, maybe other VPS servers also need bakcup to insp and studio. Now, only ia needs fi if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then # rsync backup to studio # --files-from make -a not imply -r, so need to specify explicitly rsync -avPRr --delete --files-from="$XDG_CONFIG_HOME/myconf/upd_rsync_files" / "studio:/home/xyz/backup/$backup_branch" fi if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ] || [ "$hostname" = xyzpp ]; then # duplicity backup to ia # https://wiki.archlinux.org/title/Duplicity # Need // for absolute path, see manpage URL FORMAT section. If not use //, will store to /home/xyz/home/xyz/... # --files-from has a bug, this bug makes source url that is / not working while /home works, more see vq notes # --use-agent not working when ssh to pp and insp, works on insp, not sure why # --use-agent maybe timeout on gpg key and failed when do full backup, maybe due to key stored in gpg agent timeout, so I'm not using --use-agent on insp now sudo duplicity --ssh-askpass --encrypt-key 9790577D2BE328D46838117ED3F54FE03F3C68D6 --sign-key 05899270DF25BB1EEDF57BE824F769E5D08C9E9A --full-if-older-than 2Y --include /etc/.cfgs --include-filelist "/home/xyz/.config/myconf/upd_rsync_files" --exclude / / "sftp://xyz@ia.flylightning.xyz//home/xyz/backup/$backup_branch" fi } clean () { if [ "$hostname" = xyzinsp ]; then nsxiv -c # my ways # -exec can't replace -execdir here find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -empty -execdir rmdir -- '{}' \+ # -exec can replace -execdir here #find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -execdir rmdir --ignore-fail-on-non-empty -- '{}' \+ # nsxiv man page way #find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -empty ! -name '.' -exec rmdir -- '{}' \; fi if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then cd "$HOME/.mozilla/firefox/xxxxxxxx.fly/prefsjs_backups" || exit # https://stackoverflow.com/a/34862475/9008720 ls -t | tail -n +11 | tr '\n' '\0' | xargs -0 rm -- cd "$HOME/.mozilla/firefox/xxxxxxxx.fly/userjs_backups" || exit ls -t | tail -n +11 | tr '\n' '\0' | xargs -0 rm -- # https://unix.stackexchange.com/questions/92095/reset-atq-list-to-zero sudo systemctl stop atd echo 0 | sudo tee /var/spool/atd/.SEQ > /dev/null sudo systemctl start atd rm -rf "$XDG_VIDEOS_DIR/recordings/tmp/" fi paru -aSc --noconfirm # https://wiki.archlinux.org/title/Pacman/Tips_and_tricks#Removing_unused_packages_(orphans) pacman -Qdttq | sudo pacman -Rns --noconfirm - } # basic daily stuff fast () { pac misc if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ] || [ "$hostname" = xyzpp ]; then backup fi } userjs () { kill $(pidof "$BROWSER") # change working dir for cleaner cd "$HOME/.mozilla/firefox/xxxxxxxx.fly" || exit arkenfox-cleaner -s # when multiple firefox profiles, it will prompt me to choose, which breaks automation, so I explicitly specify one profile arkenfox-updater -s -p "$HOME/.mozilla/firefox/xxxxxxxx.fly" } misc () { "$EDITOR" +PlugClean! +PlugUpdate +qa if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then tldr --update fi if [ "$hostname" = xyzinsp ]; then sudo hardcode-fixer ssh pp '[ -s "$HOME/.local/share/sdcv_history" ] && cat "$HOME/.local/share/sdcv_history" && rm "$HOME/.local/share/sdcv_history"' >> "$XDG_DATA_HOME/sdcv_history" awk '!a[$0]++' "$XDG_DATA_HOME/sdcv_history" | sponge "$XDG_DATA_HOME/sdcv_history" # temperory solution before find a way of using git submodule or subtree with `cfg -l` git -C "$HOME/.mozilla/firefox/xxxxxxxx.fly/chrome/firefox-csshacks" pull git -C "$XDG_DOCUMENTS_DIR/notes" commit --all -m 'update' git -C "$XDG_DOCUMENTS_DIR/notes" push git -C "$HOME/programs/reminders" commit --all -m 'update' git -C "$HOME/programs/reminders" push pass git push fi if [ "$hostname" = xyzpp ]; then git -C "$XDG_DOCUMENTS_DIR/notes" pull git -C "$HOME/programs/reminders" pull pass git pull fi } pac () { pacout="$(sudo pacman --noconfirm -Syu | tee /dev/tty)" pacpacs="$(echo "$pacout" | grep -m1 '^Packages' | cut -d' ' -f3-)" # Update rust toolchains before paru so paru can compile things in newest rust if needed. [ "$hostname" = xyzinsp ] && rustup update aurout="$(paru --color never --noconfirm -aSu | tee /dev/tty)" aurpacs="$(echo "$aurout" | grep '^Aur' | cut -d' ' -f3-)" # /usr/share/libalpm/hooks/rebuild-detector.hook has a line `NeedsTargets` shows it maybe checkrebuild only upgraded packages by `printf 'zoom\nminiconda3' | checkrebuild` instead of maybe check all by `checkrebuild`, so I think query pacman hook will be faster than run another `checkrebuild` # notes about awk f=1 things see https://git.flylightning.xyz/public_archive_codes/tree/sh/mrt # about `/^(\(|:|=)/ {f=0}`: # - consider $aurout start with `^:: Looking for devel upgrades...` , rebuild-detector.hook maybe the last hook to run for $pacout # - consider ^(4/5), the hook is not the last # - consider paru `==> Making package: ...`, the hook maybe followed by this. Note: paru somehow still gives color output even if I use --color never, so I can't check with ^=, so I choose to check with ==> # awk use `if(!a[$2]++)` to check if package name is repeated in multiple checkrebuild pacman hook run, happened when upgrade python cause all python packages need to be rebuilt # TODO: Some packages maybe are rebuilt later on when paru upgrade packages, but those will still got shown in upd log. Try consider this situation. e.g., when pacman upgrade packages, checkrebuild hook output a b c d packages, then paru upgrade d, now checkrebuild hook output a b c, the final upd log will have a b c d all packages instead of a b c checkrebuild_pacs="$(echo "$pacout$aurout" | awk ' /^\([0-9]+\/[0-9]+\) Checking which packages need to be rebuilt$/ {f=1; next} /^(\(|:)|==>/ {f=0} f { if($2!~"zoom|miniconda3") if(!a[$2]++) printf("%s ",$2) }')" # part steal from aur comment # sometimes "ERROR: Failure while downloading": https://github.com/neovim/neovim/issues/15709 # echo 1, printf 1 and yes 1 all works? not sure why # aur neovim-nightly-bin has some issue on 12/26/2021? switch to community repo neovim temporary #rm -rf ~/.cache/paru/clone/neovim-nightly-bin/ && echo 1 | PARU_PAGER=cat paru --rebuild --redownload neovim-nightly-bin if [ "$hostname" = xyzinsp ]; then case "$pacpacs" in *qt5-base*) echo 1 | PARU_PAGER=cat paru --rebuild qt5-styleplugins;; esac case "$pacpacs" in *qt6-base*) echo 1 | PARU_PAGER=cat paru --rebuild qt6gtk2;; esac fi pacman -Qqme > "$XDG_CONFIG_HOME/myconf/pacman_Qqme" pacman -Qqne > "$XDG_CONFIG_HOME/myconf/pacman_Qqne" systemctl list-unit-files --state=enabled > "$XDG_CONFIG_HOME/myconf/sye" systemctl --user list-unit-files --state=enabled > "$XDG_CONFIG_HOME/myconf/syue" # pacdiff default use pacman database, so no need `sudo -E` for find, but will be a little bit slower # [^-] consider util-linux; \(^\|[^-]\) consider linux is the first package, ex: pacout is only 'linux-6.6.6' log="$log updated pacman packages: $pacpacs updated aur packages: $aurpacs pacdiff: $(pacdiff -o | tr '\n' ' ') checkrebuild: $checkrebuild_pacs $(if echo "$pacpacs" | grep -q '\(^\|[^-]\)linux-\(megi-\)\?[0-9]'; then echo 'kernel upgraded, need reboot'; fi) " } qb () { sudo systemctl stop qbittorrent-nox@xyz.service find "$XDG_DATA_HOME/qBittorrent/nova3/engines" -maxdepth 1 -type f ! -name 'jackett*' -a ! -name '__init__.py' -delete curlqb "$XDG_DATA_HOME/qBittorrent/nova3/engines" sudo systemctl start qbittorrent-nox@xyz.service } refl () { # why not use http: # https://www.reddit.com/r/archlinux/comments/kx149z/should_i_use_http_mirrors/ # https://www.reddit.com/r/archlinux/comments/ej4k4d/is_it_safe_to_use_not_secured_http_pacman_mirrors/ # rsync may need to change XferCommand in /etc/pacman.conf # https://www.reddit.com/r/archlinux/comments/mynw6e/rsync_mirrors_with_pacman/ # need --delay so no super out of sync mirrors case "$hostname" in xyzia) sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country ro --protocol https --delay 1 --fastest 3;; # aa now default wireguard to ca, so I choose to copy ca mirrorlist to aa instead # old cn mirrorlists code, only 2 cn mirror servers satisfied `--delay 1`, https://mirrors.jlu.edu.cn/archlinux/$repo/os/$arch is better than https://mirrors.neusoft.edu.cn/archlinux/$repo/os/$arch , the latter had issues #xyzaa) sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country cn --protocol https --delay 1 --fastest 2;; *) sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country us --protocol https --delay 1 --latest 25 --score 25 --fastest 10;; esac } music () { yt-dlp -f 'bestaudio[ext=opus]/bestaudio' --match-filter 'license=cc-by' --match-filter 'title~=(?i)cc-by' -P "$XDG_MUSIC_DIR/cc-by/scott_buckley" https://soundcloud.com/scottbuckley/tracks rsync -avP --delete "$XDG_MUSIC_DIR/cc-by/scott_buckley" pp:/home/xyz/music/cc-by rsync -avP --delete "$XDG_MUSIC_DIR/favorite" pp:/home/xyz/music } monthly_misc () { if [ "$hostname" = xyzinsp ]; then if scp ca:/etc/pacman.d/mirrorlist aa:/tmp; then echo 'Wait for aa sudo prompt and enter password:' ssh -t aa 'sudo mv /tmp/mirrorlist /etc/pacman.d/mirrorlist && sudo chown root:root /etc/pacman.d/mirrorlist' fi # if insp can ping studio, which means insp and studio is in the same network if ping -4qc1 xyzstudio > /dev/null && scp /etc/pacman.d/mirrorlist studio:/tmp; then echo 'Wait for studio sudo prompt and enter password:' ssh -t studio 'sudo mv /tmp/mirrorlist /etc/pacman.d/mirrorlist && sudo chown root:root /etc/pacman.d/mirrorlist' fi # https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1) curl -s -X POST '10.0.0.4:57151/api/v2/search/updatePlugins' & for qbt_wireguard_ip in 10.0.0.3 10.0.0.4; do curl -s https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt | awk '$0{printf("%s\\n",$0)}' | xargs -0 -I {} curl -s -d json='{"add_trackers":"{}"}' "$qbt_wireguard_ip:57151/api/v2/app/setPreferences" & # another way: `jq -r '.[]|"\(.added_on)\t\(.hash)\t\(.name)"'` curl -s "$qbt_wireguard_ip:57151/api/v2/torrents/info?category=useful" | jq -r '.[]|[.added_on,.hash,.name]|@tsv' | grep 'archlinux.*\.iso' | sort -n | head -n-1 | awk '{print $2}' | while read -r hash; do # need POST to delete torrents. -d default POST, so no need `-X POST` curl -s -d "hashes=$hash" -d deleteFiles=true "$qbt_wireguard_ip:57151/api/v2/torrents/delete" done & done wait while ! [ -d /run/media/xyz/Ventoy ]; do alarm 0 'Plug in usb flash drive' echo 'Plug in usb flash drive' sleep 10 done #[ -d /run/media/xyz/Ventoy ] || die "No usb flash drive" rsync -vPu studio:~/downloads/torrents/useful/archlinux*.iso /run/media/xyz/Ventoy/archlinux-x86_64.iso # only check checksum and gpg signature on insp is sufficient for me, else too much work if curl -s -o /run/media/xyz/Ventoy/archlinux-x86_64.iso.sig https://mirror.fcix.net/archlinux/iso/latest/archlinux-x86_64.iso.sig; then gpg --verify /run/media/xyz/Ventoy/archlinux-x86_64.iso.sig || die 'Arch iso gpg signature check failed' else die 'Arch iso gpg signature download failed' fi # need to cd to iso file dir to checksum cd /run/media/xyz/Ventoy || exit curl -s https://mirror.fcix.net/archlinux/iso/latest/sha256sums.txt | grep archlinux-x86_64\.iso | tee /run/media/xyz/Ventoy/archlinux-x86_64.iso.sha256 | sha256sum -c || die 'Arch iso checksum does not match' # if stay at /run/media/xyz/Ventoy, will cause it be busy and can't be umount, which will cause `ventoy -u` fail # need to be after `wait`, because checksum need to be at ventoy dir cd || exit disk="$(df /run/media/xyz/Ventoy/ | awk 'END{sub(/[[:digit:]]+$/,"",$1);print $1}')" sudo ventoy -l "$disk" | awk '/Ventoy:/{a=$2} /Ventoy Version in Disk:/{b=$NF;exit} END{exit((a==b)?1:0)}' && echo y | sudo ventoy -u "$disk" umount /run/media/xyz/Ventoy /run/media/xyz/FAT32DIR alarm 0 'Unplug usb flash drive' fi } hostname="$(hostname)" [ "$hostname" = xyzpp ] && gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type nothing if [ $# -eq 0 ]; then fast else while getopts abcfjmMopqr opt; do case $opt in a)all;; b)backup;; c)clean;; f)fast;; j)userjs;; m)misc;; M)music;; o)monthly_misc;; p)pac;; q)qb;; r)refl;; \?)exit 1;; esac done fi [ "$log" ] && printf '%s' "$log" | tee "$XDG_DOCUMENTS_DIR/logs/upd.log" [ "$hostname" = xyzpp ] && gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type suspend # `[ "$hostname" = xyzpp ] && ...` if check failed will have exit status of 1, unlike check with `if` # I decided to always `exit 0` if reached end, so commands like `upd -p && ...` can keep running exit 0