From a6a12ad66d60def11af3c1f4d8d13db4eda74fab Mon Sep 17 00:00:00 2001
From: xyz <gky44px1999@gmail.com>
Date: Tue, 23 Nov 2021 22:39:41 -0800
Subject: refactor

---
 home/xyz/.local/bin/curlkg | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

(limited to 'home/xyz/.local/bin/curlkg')

diff --git a/home/xyz/.local/bin/curlkg b/home/xyz/.local/bin/curlkg
index 68881502..4276e820 100755
--- a/home/xyz/.local/bin/curlkg
+++ b/home/xyz/.local/bin/curlkg
@@ -15,7 +15,7 @@ get_json () {
 	curl -s -G --data-urlencode type=get_uinfo -d outCharset=utf-8 -d start="$1" -d num=$num -d share_uid="$uid" "$homepage_url" | sed 's/.*({\(.*\)}).*/{\1}/'
 }
 
-# concurrent file append with less than 4096 bytes will be atomic on linux, at least on my arch linux machine
+# concurrent file append with less than 4096 bytes will be atomic on arch linux ext4
 # so need to process json data before append to the file, to append less than 4096 bytes for each process
 # another appraoch is to use a loop to write to several files each corresponding to one process
 # https://stackoverflow.com/questions/1154446/is-file-append-atomic-in-unix
-- 
cgit v1.2.3-70-g09d2