summaryrefslogtreecommitdiff
path: root/sh/upd
blob: 89113def57299469cae2144be75224e6bdc92b72 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
#!/bin/sh

die () {
	echo "error: $*" >&2
	exit 1
}

all () {
	# pp maybe finish upd -a first and start to suspend, and it cause insp music() failed to rsync to pp, so I need to finish rsync to ssh before pp finish upd, so I put music() here, not perfect but good enough
	[ "$hostname" = xyzinsp ] && music
	# monthly_misc() needs to use qbittorrent-nox APIs, so I need to restart qbt before that, so I put qb() at top here, not perfect but good enough
	if [ "$hostname" = xyzia ] || [ "$hostname" = xyzib ]; then
		qb
	fi
	fast
	clean
	# don't run reflector if it is pp or aa or {it is studio and insp is in the same network}
	# somehow ping local hostname via ipv6 is very slow, so I use ipv4 to ping
	if ! { [ "$hostname" = xyzpp ] || [ "$hostname" = xyzaa ] || { [ "$hostname" = xyzstudio ] && ping -4qc1 xyzinsp > /dev/null;};}; then
		refl
	fi
	monthly_misc
	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
		userjs
	fi
}

backup () {
	case "$hostname" in
		xyzinsp) backup_branch=master;;
		*) backup_branch=${hostname#xyz};;
	esac

	if [ "$hostname" = xyzinsp ]; then
		# Pull repos and branches from studio, so one more backup on insp or duplicity will backup to ia,for one more backup for 321 backup rule
		# cfgl master and pp branches already satisfy 321 rule. Here are three copies: 1. master/pp, 2. studio, 3. gitlab or alternative
		# But I still copy those on insp because one more copy will not hurt. And all my other data stil satisfy 3 copies without backup to gitlab, so why left out cfgl? Also it may be nice if I what to use that local dir for searching things.
		cd "$HOME/programs/config_local_arch" || exit
		git branch | awk '{print ($1=="*")?$2:$1}' | while read -r branch; do
			git checkout "$branch"
			git pull
			# Push ca, ia, and other new vps branches (which means any branches other than master/studio/pp) to codeberg or any future alternative website for mirroring. Needed for public codes/data, considering my death may result in all copies on my computers being lost.
			case "$branch" in
				master|studio|pp) ;;
				*) git push;;
			esac
		done
		# git checkout will change mtime, need to change back so rsync won't think it needs to backup these files
		# https://stackoverflow.com/q/1964470/9008720
		# https://stackoverflow.com/q/21735435/9008720
		# https://github.com/MestreLion/git-tools
		# aur/git-tools-git
		git restore-mtime

		(
			umask 077
			# backup studio and other branches' cfgs, note: it backup a branch by clone/checkout to that branch first
			for dir in $(sudo find /root/programs -maxdepth 1 -mindepth 1 -type d -name 'config_local_arch_secrets*'); do
				sudo -E git -C "$dir" pull
			done
		)

		# rsync backup from studio to insp
		rsync -avPR --delete studio:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/studio"

	fi
	
	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ]; then
		# rsync backup from ia
		rsync -avPR --delete xyz@ia.flylightning.xyz:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/ia"
		rsync -avPR --delete xyz@ib.flylightning.xyz:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/ib"
		# In the future, maybe other VPS servers also need bakcup to insp and studio. Now, both ia and ib need
	fi

	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
		# rsync backup to studio
		# --files-from make -a not imply -r, so need to specify explicitly
		rsync -avPRr --delete --files-from="$XDG_CONFIG_HOME/myconf/upd_rsync_files" / "studio:/home/xyz/backup/$backup_branch"
	fi

	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ] || [ "$hostname" = xyzpp ]; then
		# duplicity backup to ia
		# https://wiki.archlinux.org/title/Duplicity
		# Need // for absolute path, see manpage URL FORMAT section. If not use //, will store to /home/xyz/home/xyz/...
		# --files-from has a bug, this bug makes source url that is / not working while /home works, more see vq notes
		# --use-agent not working when ssh to pp and insp, works on insp, not sure why
		# --use-agent maybe timeout on gpg key and failed when do full backup, maybe due to key stored in gpg agent timeout, so I'm not using --use-agent on insp now
		sudo duplicity --ssh-askpass --encrypt-key 9790577D2BE328D46838117ED3F54FE03F3C68D6 --sign-key 05899270DF25BB1EEDF57BE824F769E5D08C9E9A --full-if-older-than 2Y --include /etc/.cfgs --include-filelist "/home/xyz/.config/myconf/upd_rsync_files" --exclude / / "sftp://xyz@ib.flylightning.xyz//home/xyz/backup/$backup_branch"
	fi
}

clean () {
	if [ "$hostname" = xyzinsp ]; then
		nsxiv -c
		# my ways
		# -exec can't replace -execdir here
		find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -empty -execdir rmdir -- '{}' \+
		# -exec can replace -execdir here
		#find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -execdir rmdir --ignore-fail-on-non-empty -- '{}' \+
		# nsxiv man page way
		#find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -empty ! -name '.' -exec rmdir -- '{}' \;

	fi

	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
		cd "$HOME/.mozilla/firefox/xxxxxxxx.fly/prefsjs_backups" || exit
		# https://stackoverflow.com/a/34862475/9008720
		ls -t | tail -n +11 | tr '\n' '\0' | xargs -0 rm --
		cd "$HOME/.mozilla/firefox/xxxxxxxx.fly/userjs_backups" || exit
		ls -t | tail -n +11 | tr '\n' '\0' | xargs -0 rm --
		# https://unix.stackexchange.com/questions/92095/reset-atq-list-to-zero
		sudo systemctl stop atd
		echo 0 | sudo tee /var/spool/atd/.SEQ > /dev/null
		sudo systemctl start atd
		rm -rf "$XDG_VIDEOS_DIR/recordings/tmp/"
	fi

	paru -aSc --noconfirm

	# https://wiki.archlinux.org/title/Pacman/Tips_and_tricks#Removing_unused_packages_(orphans)
	pacman -Qdttq | sudo pacman -Rns --noconfirm -
}

# basic daily stuff
fast () {
	pac
	misc
	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ] || [ "$hostname" = xyzpp ]; then
		backup
	fi
}

userjs () {
	kill $(pidof "$BROWSER")
	# change working dir for cleaner
	cd "$HOME/.mozilla/firefox/xxxxxxxx.fly" || exit
	arkenfox-cleaner -s
	# when multiple firefox profiles, it will prompt me to choose, which breaks automation, so I explicitly specify one profile
	arkenfox-updater -s -p "$HOME/.mozilla/firefox/xxxxxxxx.fly"
}

misc () {
	"$EDITOR" +PlugClean! +PlugUpdate +qa

	if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
		tldr --update
	fi

	if [ "$hostname" = xyzinsp ]; then
		sudo hardcode-fixer
		ssh pp '[ -s "$HOME/.local/share/sdcv_history" ] && cat "$HOME/.local/share/sdcv_history" && rm "$HOME/.local/share/sdcv_history"' >> "$XDG_DATA_HOME/sdcv_history"
		awk '!a[$0]++' "$XDG_DATA_HOME/sdcv_history" | sponge "$XDG_DATA_HOME/sdcv_history"
		# temperory solution before find a way of using git submodule or subtree with `cfg -l`
		git -C "$HOME/.mozilla/firefox/xxxxxxxx.fly/chrome/firefox-csshacks" pull
		git -C "$XDG_DOCUMENTS_DIR/notes" commit --all -m 'update'
		git -C "$XDG_DOCUMENTS_DIR/notes" push
		git -C "$HOME/programs/reminders" commit --all -m 'update'
		git -C "$HOME/programs/reminders" push
		pass git push
	fi

	if [ "$hostname" = xyzpp ]; then
		git -C "$XDG_DOCUMENTS_DIR/notes" pull
		git -C "$HOME/programs/reminders" pull
		pass git pull
	fi
}

pac () {
	pacout="$(sudo pacman --noconfirm -Syu | tee /dev/tty)"
	pacpacs="$(echo "$pacout" | grep -m1 '^Packages' | cut -d' ' -f3-)"
	# Update rust toolchains before paru so paru can compile things in newest rust if needed.
	[ "$hostname" = xyzinsp ] && rustup update
	aurout="$(paru --color never --noconfirm -aSu | tee /dev/tty)"
	aurpacs="$(echo "$aurout" | grep '^Aur' | cut -d' ' -f3-)"
	# /usr/share/libalpm/hooks/rebuild-detector.hook has a line `NeedsTargets` shows it maybe checkrebuild only upgraded packages by `printf 'zoom\nminiconda3' | checkrebuild` instead of maybe check all by `checkrebuild`, so I think query pacman hook will be faster than run another `checkrebuild`
	# notes about awk f=1 things see https://git.flylightning.xyz/public_archive_codes/tree/sh/mrt
	# about `/^(\(|:|=)/ {f=0}`:
	# - consider $aurout start with `^:: Looking for devel upgrades...` , rebuild-detector.hook maybe the last hook to run for $pacout
	# - consider ^(4/5), the hook is not the last
	# - consider paru `==> Making package: ...`, the hook maybe followed by this. Note: paru somehow still gives color output even if I use --color never, so I can't check with ^=, so I choose to check with ==>
	# awk use `if(!a[$2]++)` to check if package name is repeated in multiple checkrebuild pacman hook run, happened when upgrade python cause all python packages need to be rebuilt
	# TODO: Some packages maybe are rebuilt later on when paru upgrade packages, but those will still got shown in upd log. Try consider this situation. e.g., when pacman upgrade packages, checkrebuild hook output a b c d packages, then paru upgrade d, now checkrebuild hook output a b c, the final upd log will have a b c d all packages instead of a b c
	checkrebuild_pacs="$(echo "$pacout$aurout" | awk '
	/^\([0-9]+\/[0-9]+\) Checking which packages need to be rebuilt$/ {f=1; next}
	/^(\(|:)|==>/ {f=0}
	f {
		if($2!~"zoom|miniconda3")
			if(!a[$2]++)
				printf("%s ",$2)
	}')"
	# part steal from aur comment
	# sometimes "ERROR: Failure while downloading": https://github.com/neovim/neovim/issues/15709
	# echo 1, printf 1 and yes 1 all works? not sure why
	# aur neovim-nightly-bin has some issue on 12/26/2021? switch to community repo neovim temporary
	#rm -rf ~/.cache/paru/clone/neovim-nightly-bin/ && echo 1 | PARU_PAGER=cat paru --rebuild --redownload neovim-nightly-bin
	if [ "$hostname" = xyzinsp ]; then
		case "$pacpacs" in
			*qt5-base*) echo 1 | PARU_PAGER=cat paru --rebuild qt5-styleplugins;;
		esac
		case "$pacpacs" in
			*qt6-base*) echo 1 | PARU_PAGER=cat paru --rebuild qt6gtk2;;
		esac
	fi
	pacman -Qqme > "$XDG_CONFIG_HOME/myconf/pacman_Qqme"
	pacman -Qqne > "$XDG_CONFIG_HOME/myconf/pacman_Qqne"
	systemctl list-unit-files --state=enabled > "$XDG_CONFIG_HOME/myconf/sye"
	systemctl --user list-unit-files --state=enabled > "$XDG_CONFIG_HOME/myconf/syue"
	# pacdiff default use pacman database, so no need `sudo -E` for find, but will be a little bit slower
	# [^-] consider util-linux; \(^\|[^-]\) consider linux is the first package, ex: pacout is only 'linux-6.6.6'
	log="$log
updated pacman packages: $pacpacs
updated aur packages: $aurpacs
pacdiff: $(pacdiff -o | tr '\n' ' ')
checkrebuild: $checkrebuild_pacs
$(if echo "$pacpacs" | grep -q '\(^\|[^-]\)linux-\(megi-\)\?[0-9]'; then echo 'kernel upgraded, need reboot'; fi)
"
}

qb () {
	sudo systemctl stop qbittorrent-nox@xyz.service
	find "$XDG_DATA_HOME/qBittorrent/nova3/engines" -maxdepth 1 -type f ! -name 'jackett*' -a ! -name '__init__.py' -delete
	curlqb "$XDG_DATA_HOME/qBittorrent/nova3/engines"
	sudo systemctl start qbittorrent-nox@xyz.service
}

refl () {
	# why not use http:
	# https://www.reddit.com/r/archlinux/comments/kx149z/should_i_use_http_mirrors/
	# https://www.reddit.com/r/archlinux/comments/ej4k4d/is_it_safe_to_use_not_secured_http_pacman_mirrors/
	# rsync may need to change XferCommand in /etc/pacman.conf
	# https://www.reddit.com/r/archlinux/comments/mynw6e/rsync_mirrors_with_pacman/
	# need --delay so no super out of sync mirrors
	case "$hostname" in
		xyzia|xyzib) sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country ro --protocol https --delay 1 --fastest 3;;
		# aa now default wireguard to ca, so I choose to copy ca mirrorlist to aa instead
		# old cn mirrorlists code, only 2 cn mirror servers satisfied `--delay 1`, https://mirrors.jlu.edu.cn/archlinux/$repo/os/$arch is better than https://mirrors.neusoft.edu.cn/archlinux/$repo/os/$arch , the latter had issues
		#xyzaa) sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country cn --protocol https --delay 1 --fastest 2;;
		*) sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country us --protocol https --delay 1 --latest 25 --score 25 --fastest 10;;
	esac
}

music () {
	yt-dlp -f 'bestaudio[ext=opus]/bestaudio' --match-filter 'license=cc-by' --match-filter 'title~=(?i)cc-by' -P "$XDG_MUSIC_DIR/cc-by/scott_buckley" https://soundcloud.com/scottbuckley/tracks
	rsync -avP --delete "$XDG_MUSIC_DIR/cc-by/scott_buckley" pp:/home/xyz/music/cc-by
	rsync -avP --delete "$XDG_MUSIC_DIR/favorite" pp:/home/xyz/music
}

monthly_misc () {
	if [ "$hostname" = xyzinsp ]; then
		if scp ca:/etc/pacman.d/mirrorlist aa:/tmp; then
			echo 'Wait for aa sudo prompt and enter password:'
			ssh -t aa 'sudo mv /tmp/mirrorlist /etc/pacman.d/mirrorlist && sudo chown root:root /etc/pacman.d/mirrorlist'
		fi
		# if insp can ping studio, which means insp and studio is in the same network
		if ping -4qc1 xyzstudio > /dev/null && scp /etc/pacman.d/mirrorlist studio:/tmp; then
			echo 'Wait for studio sudo prompt and enter password:'
			ssh -t studio 'sudo mv /tmp/mirrorlist /etc/pacman.d/mirrorlist && sudo chown root:root /etc/pacman.d/mirrorlist'
		fi

		# https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)
		curl -sS -X POST '10.0.0.4:57151/api/v2/search/updatePlugins' &
		curl -sS -X POST '10.0.0.8:57151/api/v2/search/updatePlugins' &
		for qbt_wireguard_ip in 10.0.0.3 10.0.0.4 10.0.0.8; do
			curl -sS https://raw.githubusercontent.com/ngosang/trackerslist/master/trackers_best.txt | awk '$0{printf("%s\\n",$0)}' | xargs -0 -I {} curl -sS -d json='{"add_trackers":"{}"}' "$qbt_wireguard_ip:57151/api/v2/app/setPreferences" &
			# another way: `jq -r '.[]|"\(.added_on)\t\(.hash)\t\(.name)"'`
			curl -sS "$qbt_wireguard_ip:57151/api/v2/torrents/info?category=useful" | jq -r '.[]|[.added_on,.hash,.name]|@tsv' | grep 'archlinux.*\.iso' | sort -n | head -n-1 | awk '{print $2}' | while read -r hash; do
				# need POST to delete torrents. -d default POST, so no need `-X POST`
				curl -sS -d "hashes=$hash" -d deleteFiles=true "$qbt_wireguard_ip:57151/api/v2/torrents/delete"
			done &
		done
		wait

		while ! [ -d /run/media/xyz/Ventoy ]; do
			alarm 0 'Plug in usb flash drive'
			echo 'Plug in usb flash drive'
			sleep 10
		done
		#[ -d /run/media/xyz/Ventoy ] || die "No usb flash drive"

		rsync -vPu studio:~/downloads/torrents/useful/archlinux*.iso /run/media/xyz/Ventoy/archlinux-x86_64.iso
		# only check checksum and gpg signature on insp is sufficient for me, else too much work
		if curl -sS -o /run/media/xyz/Ventoy/archlinux-x86_64.iso.sig https://mirror.fcix.net/archlinux/iso/latest/archlinux-x86_64.iso.sig; then
			gpg --verify /run/media/xyz/Ventoy/archlinux-x86_64.iso.sig || die 'Arch iso gpg signature check failed'
		else
			die 'Arch iso gpg signature download failed'
		fi
		# need to cd to iso file dir to checksum
		cd /run/media/xyz/Ventoy || exit
		curl -sS https://mirror.fcix.net/archlinux/iso/latest/sha256sums.txt | grep archlinux-x86_64\.iso | tee /run/media/xyz/Ventoy/archlinux-x86_64.iso.sha256 | sha256sum -c || die 'Arch iso checksum does not match'
		# if stay at /run/media/xyz/Ventoy, will cause it be busy and can't be umount, which will cause `ventoy -u` fail
		# need to be after `wait`, because checksum need to be at ventoy dir
		cd || exit

		disk="$(df /run/media/xyz/Ventoy/ | awk 'END{sub(/[[:digit:]]+$/,"",$1);print $1}')"
		sudo ventoy -l "$disk" | awk '/Ventoy:/{a=$2} /Ventoy Version in Disk:/{b=$NF;exit} END{exit((a==b)?1:0)}' && echo y | sudo ventoy -u "$disk"
		umount /run/media/xyz/Ventoy /run/media/xyz/FAT32DIR

		alarm 0 'Unplug usb flash drive'
	fi
}

hostname="$(hostname)" 
[ "$hostname" = xyzpp ] && gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type nothing
if [ $# -eq 0 ]; then
	fast
else
	while getopts abcfjmMopqr opt; do
		case $opt in
			a)all;;
			b)backup;;
			c)clean;;
			f)fast;;
			j)userjs;;
			m)misc;;
			M)music;;
			o)monthly_misc;;
			p)pac;;
			q)qb;;
			r)refl;;
			\?)exit 1;;
		esac
	done
fi
[ "$log" ] && printf '%s' "$log" | tee "$XDG_DOCUMENTS_DIR/logs/upd.log"
[ "$hostname" = xyzpp ] && gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type suspend
# `[ "$hostname" = xyzpp ] && ...` if check failed will have exit status of 1, unlike check with `if`
# I decided to always `exit 0` if reached end, so commands like `upd -p && ...` can keep running
exit 0