1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
|
#!/bin/sh
all () {
fast
clean
[ "$hostname" != xyzpp ] && refl
if [ "$hostname" = xyzia ]; then
qb
fi
[ "$hostname" = xyzinsp ] && music
if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
userjs
fi
}
backup () {
case "$hostname" in
xyzinsp) backup_branch=master;;
*) backup_branch=${hostname#xyz};;
esac
if [ "$hostname" = xyzinsp ]; then
# Pull repos and branches from studio, so one more backup on insp or duplicity will backup to ia,for one more backup for 321 backup rule
# cfgl master and pp branches already satisfy 321 rule. Here are three copies: 1. master/pp, 2. studio, 3. gitlab or alternative
# But I still copy those on insp because one more copy will not hurt. And all my other data stil satisfy 3 copies without backup to gitlab, so why left out cfgl? Also it may be nice if I what to use that local dir for searching things.
cd "$HOME/programs/config_local_arch" || exit 1
git branch | awk '{print ($1=="*")?$2:$1}' | while read -r branch; do
git checkout "$branch"
git pull
# Push ca and ia branch to codeberg or any future alternative website for mirroring. Needed for public codes/data, considering my death may result in all copies on my computers being lost.
case "$branch" in
ia|ca) git push;;
esac
done
# git checkout will change mtime, need to change back so rsync won't think it needs to backup these files
# https://stackoverflow.com/q/1964470/9008720
# https://stackoverflow.com/q/21735435/9008720
# https://github.com/MestreLion/git-tools
# aur/git-tools-git
git restore-mtime
(
umask 077
# backup studio cfgs, note: it backup a branch by clone/checkout to that branch first
sudo -E git -C /root/programs/config_local_arch_secrets pull
# backup ia cfgs
sudo -E git -C /root/programs/config_local_arch_secrets_ia pull
# backup ca cfgs
sudo -E git -C /root/programs/config_local_arch_secrets_ca pull
)
# rsync backup from studio to insp
rsync -avPR --delete studio:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/studio"
fi
if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzstudio ]; then
# rsync backup from ia
rsync -avPR --delete ia:/home/xyz/.config/qBittorrent :/home/xyz/.local/share/qBittorrent/BT_backup "$HOME/backup/ia"
fi
if [ "$hostname" != xyzstudio ] && [ "$hostname" != xyzia ] && [ "$hostname" != xyzca ]; then
# rsync backup to studio
# --files-from make -a not imply -r, so need to specify explicitly
rsync -avPRr --delete --files-from="$XDG_CONFIG_HOME/myconf/upd_rsync_files" / "studio:/home/xyz/backup/$backup_branch"
fi
if [ "$hostname" != xyzia ] && [ "$hostname" != xyzca ]; then
# duplicity backup to ia
# https://wiki.archlinux.org/title/Duplicity
# Need // for absolute path, see manpage URL FORMAT section. If not use //, will store to /home/xyz/home/xyz/...
# --files-from has a bug, this bug makes source url that is / not working while /home works, more see vq notes
# --use-agent not working when ssh to pp and insp, works on insp, not sure why
use_agent=
[ "$hostname" = xyzinsp ] && use_agent='--use-agent'
sudo duplicity --ssh-askpass $use_agent --encrypt-key 9790577D2BE328D46838117ED3F54FE03F3C68D6 --sign-key 05899270DF25BB1EEDF57BE824F769E5D08C9E9A --full-if-older-than 2Y --include /etc/.cfgs --include-filelist "/home/xyz/.config/myconf/upd_rsync_files" --exclude / / "sftp://xyz@ia.flylightning.xyz//home/xyz/backup/$backup_branch"
fi
}
clean () {
if [ "$hostname" = xyzinsp ]; then
nsxiv -c
# my ways
# -exec can't replace -execdir here
find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -empty -execdir rmdir -- '{}' \+
# -exec can replace -execdir here
#find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -execdir rmdir --ignore-fail-on-non-empty -- '{}' \+
# nsxiv man page way
#find "$XDG_CACHE_HOME/nsxiv/" -depth -type d -empty ! -name '.' -exec rmdir -- '{}' \;
fi
if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
cd "$HOME/.mozilla/firefox/xxxxxxxx.fly/prefsjs_backups" || exit
# https://stackoverflow.com/a/34862475/9008720
ls -t | tail -n +11 | tr '\n' '\0' | xargs -0 rm --
cd "$HOME/.mozilla/firefox/xxxxxxxx.fly/userjs_backups" || exit
ls -t | tail -n +11 | tr '\n' '\0' | xargs -0 rm --
# https://unix.stackexchange.com/questions/92095/reset-atq-list-to-zero
sudo systemctl stop atd
echo 0 | sudo tee /var/spool/atd/.SEQ > /dev/null
sudo systemctl start atd
rm -rf "$XDG_VIDEOS_DIR/recordings/tmp/"
fi
paru -aSc --noconfirm
# https://wiki.archlinux.org/title/Pacman/Tips_and_tricks#Removing_unused_packages_(orphans)
pacman -Qdttq | sudo pacman -Rns --noconfirm -
}
# basic daily stuff
fast () {
pac
misc
if [ "$hostname" != xyzia ] && [ "$hostname" != xyzca ]; then
backup
fi
}
userjs () {
kill $(pidof "$BROWSER")
# change working dir for cleaner
cd "$HOME/.mozilla/firefox/xxxxxxxx.fly" || exit
arkenfox-cleaner -s
# when multiple firefox profiles, it will prompt me to choose, which breaks automation, so I explicitly specify one profile
arkenfox-updater -s -p "$HOME/.mozilla/firefox/xxxxxxxx.fly"
}
misc () {
"$EDITOR" +PlugClean! +PlugUpdate +qa
if [ "$hostname" = xyzinsp ] || [ "$hostname" = xyzpp ]; then
tldr --update
fi
if [ "$hostname" = xyzinsp ]; then
sudo hardcode-fixer
ssh pp '[ -s "$HOME/.local/share/sdcv_history" ] && cat "$HOME/.local/share/sdcv_history" && rm "$HOME/.local/share/sdcv_history"' >> "$XDG_DATA_HOME/sdcv_history"
awk '!a[$0]++' "$XDG_DATA_HOME/sdcv_history" | sponge "$XDG_DATA_HOME/sdcv_history"
# temperory solution before find a way of using git submodule or subtree with `cfg -l`
git -C "$HOME/.mozilla/firefox/xxxxxxxx.fly/chrome/firefox-csshacks" pull
git -C "$XDG_DOCUMENTS_DIR/notes" commit --all -m 'update'
git -C "$XDG_DOCUMENTS_DIR/notes" push
git -C "$HOME/programs/reminders" commit --all -m 'update'
git -C "$HOME/programs/reminders" push
pass git push
fi
if [ "$hostname" = xyzpp ]; then
git -C "$XDG_DOCUMENTS_DIR/notes" pull
git -C "$HOME/programs/reminders" pull
pass git pull
fi
}
pac () {
pacout="$(sudo pacman --noconfirm -Syu | tee /dev/tty)"
pacpacs="$(echo "$pacout" | grep -m1 '^Packages' | cut -d' ' -f3-)"
# Update rust toolchains before paru so paru can compile things in newest rust if needed.
[ "$hostname" = xyzinsp ] && rustup update
aurout="$(paru --color never --noconfirm -aSu --ignore libredwg-git | tee /dev/tty)"
aurpacs="$(echo "$aurout" | grep '^Aur' | cut -d' ' -f3-)"
# /usr/share/libalpm/hooks/rebuild-detector.hook has a line `NeedsTargets` shows it maybe checkrebuild only upgraded packages by `printf 'zoom\nminiconda3' | checkrebuild` instead of maybe check all by `checkrebuild`, so I think query pacman hook will be faster than run another `checkrebuild`
# notes about awk f=1 things see https://git.flylightning.xyz/public_archive_codes/tree/sh/mrt
# about `/^(\(|:|=)/ {f=0}`:
# - consider $aurout start with `^:: Looking for devel upgrades...` , rebuild-detector.hook maybe the last hook to run for $pacout
# - consider ^(4/5), the hook is not the last
# - consider paru `==> Making package: ...`, the hook maybe followed by this. Note: paru somehow still gives color output even if I use --color never, so I can't check with ^=, so I choose to check with ==>
# awk use `if(!a[$2]++)` to check if package name is repeated in multiple checkrebuild pacman hook run, happened when upgrade python cause all python packages need to be rebuilt
# TODO: Some packages maybe are rebuilt later on when paru upgrade packages, but those will still got shown in upd log. Try consider this situation. e.g., when pacman upgrade packages, checkrebuild hook output a b c d packages, then paru upgrade d, now checkrebuild hook output a b c, the final upd log will have a b c d all packages instead of a b c
checkrebuild_pacs="$(echo "$pacout$aurout" | awk '
/^\([0-9]+\/[0-9]+\) Checking which packages need to be rebuilt$/ {f=1; next}
/^(\(|:)|==>/ {f=0}
f {
if($2!~"zoom|miniconda3")
if(!a[$2]++)
printf("%s ",$2)
}')"
# part steal from aur comment
# sometimes "ERROR: Failure while downloading": https://github.com/neovim/neovim/issues/15709
# echo 1, printf 1 and yes 1 all works? not sure why
# aur neovim-nightly-bin has some issue on 12/26/2021? switch to community repo neovim temporary
#rm -rf ~/.cache/paru/clone/neovim-nightly-bin/ && echo 1 | PARU_PAGER=cat paru --rebuild --redownload neovim-nightly-bin
if [ "$hostname" = xyzinsp ]; then
case "$pacpacs" in
*qt5-base*) echo 1 | PARU_PAGER=cat paru --rebuild qt5-styleplugins;;
esac
case "$pacpacs" in
*qt6-base*) echo 1 | PARU_PAGER=cat paru --rebuild qt6gtk2;;
esac
fi
pacman -Qqme > "$XDG_CONFIG_HOME/myconf/pacman_Qqme"
pacman -Qqne > "$XDG_CONFIG_HOME/myconf/pacman_Qqne"
systemctl list-unit-files --state=enabled > "$XDG_CONFIG_HOME/myconf/sye"
systemctl --user list-unit-files --state=enabled > "$XDG_CONFIG_HOME/myconf/syue"
# pacdiff default use pacman database, so no need `sudo -E` for find, but will be a little bit slower
# [^-] consider util-linux; \(^\|[^-]\) consider linux is the first package, ex: pacout is only 'linux-6.6.6'
log="$log
updated pacman packages: $pacpacs
updated aur packages: $aurpacs
pacdiff: $(pacdiff -o | tr '\n' ' ')
checkrebuild: $checkrebuild_pacs
$(if echo "$pacpacs" | grep -q '\(^\|[^-]\)linux-\(megi-\)\?[0-9]'; then echo 'kernel upgraded, need reboot'; fi)
"
}
qb () {
sudo systemctl stop qbittorrent-nox@xyz.service
find "$XDG_DATA_HOME/qBittorrent/nova3/engines" -maxdepth 1 -type f ! -name 'jackett*' -a ! -name '__init__.py' -delete
curlqb "$XDG_DATA_HOME/qBittorrent/nova3/engines"
sudo systemctl start qbittorrent-nox@xyz.service
}
refl () {
# why not use http:
# https://www.reddit.com/r/archlinux/comments/kx149z/should_i_use_http_mirrors/
# https://www.reddit.com/r/archlinux/comments/ej4k4d/is_it_safe_to_use_not_secured_http_pacman_mirrors/
# rsync may need to change XferCommand in /etc/pacman.conf
# https://www.reddit.com/r/archlinux/comments/mynw6e/rsync_mirrors_with_pacman/
# need --delay so no super out of sync mirrors
if [ "$hostname" != xyzia ]; then
sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country us --protocol https --delay 1 --latest 25 --score 25 --fastest 10
else
sudo reflector --verbose --save /etc/pacman.d/mirrorlist --country ro --protocol https --delay 1 --fastest 3
fi
}
music () {
yt-dlp -f 'bestaudio[ext=opus]/bestaudio' --match-filter 'license=cc-by' --match-filter 'title~=(?i)cc-by' -P "$XDG_MUSIC_DIR/cc-by/scott_buckley" https://soundcloud.com/scottbuckley/tracks
rsync -avP --delete "$XDG_MUSIC_DIR/cc-by/scott_buckley" pp:/home/xyz/music/cc-by
rsync -avP --delete "$XDG_MUSIC_DIR/favorite" pp:/home/xyz/music
}
hostname="$(hostname)"
[ "$hostname" = xyzpp ] && gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type nothing
if [ $# -eq 0 ]; then
fast
else
while getopts abcfjmMpqr opt; do
case $opt in
a)all;;
b)backup;;
c)clean;;
f)fast;;
j)userjs;;
m)misc;;
M)music;;
p)pac;;
q)qb;;
r)refl;;
\?)exit 1;;
esac
done
fi
[ "$log" ] && printf '%s' "$log" | tee "$XDG_DOCUMENTS_DIR/logs/upd.log"
[ "$hostname" = xyzpp ] && gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-type suspend
# `[ "$hostname" = xyzpp ] && ...` if check failed will have exit status of 1, unlike check with `if`
# I decided to always `exit 0` if reached end, so commands like `upd -p && ...` can keep running
exit 0
|