dockerfiles

Docker files for home server usage
Log | Files | Refs

server.sh (23154B)


      1 #!/usr/bin/env bash
      2 
      3 # functions
      4 function find_directory {
      5 	find "$directory_home" -maxdepth 3 -mount -type d -name "$1" -not -path "*/docker/stagit/*" 2>/dev/null
      6 }
      7 function find_remote {
      8 	rclone listremotes | awk -v remote="$1" '$0 ~ remote {print $0;exit}'
      9 }
     10 function check_root {
     11 	if [ "$EUID" -ne 0 ]; then
     12 		echo "Please run as root"
     13 		exit 0
     14 	fi
     15 }
     16 function check_not_root {
     17 	if [ "$EUID" -eq 0 ]; then
     18 		echo "Don't run this function as root"
     19 		exit 0
     20 	fi
     21 }
     22 function check_depends {
     23 	dependencies=(aria2c awk bash bc docker docker-compose ffmpeg git gnuplot journalctl logname media-sort mp3val opustags phockup pip3 python3 qpdf rbw rclone scour sed seq sort uniq vnstat we-get yt-dlp)
     24 	echo "Checking dependencies..."
     25 	for i in "${dependencies[@]}"; do
     26 		echo -n "$i: "
     27 		if [[ $(command -v "$i") ]]; then
     28 			echo -e "\e[32mpresent\e[39m"
     29 		else
     30 			echo -e "\e[31mmissing\e[39m"
     31 		fi
     32 	done
     33 	exit 1
     34 }
     35 function umount_remote {
     36 	if [ -z "$2" ]; then
     37 		working_directory="$(find_directory "$1")"
     38 	else
     39 		working_directory="$(find_directory "$2")"
     40 	fi
     41 	umount "$working_directory"
     42 	fusermount -uz "$working_directory" 2>/dev/null
     43 	find "$working_directory" -maxdepth 1 -mount -type d -not -path "*/\.*" -empty -delete
     44 }
     45 function password_manager {
     46 	check_not_root
     47 	case "$1" in
     48 	addr) rbw get --full "$2" | awk '/URI:/ {print $2}' ;;
     49 	full) rbw get --full "$2" ;;
     50 	pass) rbw get "$2" ;;
     51 	sync) rbw sync ;;
     52 	user) rbw get --full "$2" | awk '/Username:/ {print $2}' ;;
     53 	*) rbw get "$2" ;;
     54 	esac
     55 }
     56 function duolingo_streak {
     57 	check_not_root
     58 	# check api is installed
     59 	[[ -d "$(find_directory $directory_config)/duolingo" ]] || git clone https://github.com/KartikTalwar/Duolingo.git "$(find_directory $directory_config)/duolingo"
     60 	# cd to git dir to include module
     61 	cd "$(find_directory $directory_config)/duolingo" || return
     62 	# write script
     63 	password_manager sync
     64 	{
     65 		printf "#!/usr/bin/env python3\\n\\n"
     66 		printf "import duolingo\\n"
     67 		printf "lingo  = duolingo.Duolingo('%s', password='%s')\\n" "$(password_manager user duolingo)" "$(password_manager pass duolingo)"
     68 		printf "lingo.buy_streak_freeze()"
     69 	} >"streak-freeze.py"
     70 	# run and remove script
     71 	python "streak-freeze.py"
     72 	rm "streak-freeze.py"
     73 }
     74 function blog_duolingo_rank {
     75 	duo_username="$(awk -F'[/()]' '/Duolingo/ {print $5}' "$(find_directory blog."$domain")"/content/about.md)"
     76 	rank_filename="$(find_directory blog."$domain")/content/posts/logging-duolingo-ranks-over-time.md"
     77 	echo -n "Fetching data for $duo_username... "
     78 	page_source="$(curl -s https://duome.eu/"$duo_username")"
     79 	rank_lingot="$(printf %s "$page_source" | awk -F"[#><]" '/icon lingot/ {print $15}')"
     80 	rank_streak="$(printf %s "$page_source" | awk -F"[#><]" '/icon streak/{getline;print $15}')'"
     81 	echo -e "$i \e[32mdone\e[39m"
     82 	echo -n "Appending ranks to page... "
     83 	echo "| $(date +%F) | $(date +%H:%M) | $rank_streak | $rank_lingot |" | tr -d \' >>"$rank_filename"
     84 	echo -e "$i \e[32mdone\e[39m"
     85 	lastmod "$rank_filename"
     86 }
     87 function docker_build {
     88 	cd "$directory_script" || exit
     89 	# write env file, overwriting any existing
     90 	password_manager sync
     91 	{
     92 		printf "DOMAIN=%s\\n" "$domain"
     93 		printf "PUID=%s\\n" "$(id -u)"
     94 		printf "PGID=%s\\n" "$(id -g)"
     95 		printf "TZ=%s\\n" "$(timedatectl status | awk '/Time zone/ {print $3}')"
     96 		printf "DOCKDIR=%s\\n" "$(find_directory docker)"
     97 		printf "SYNCDIR=%s\\n" "$(find_directory vault)"
     98 		printf "TANKDIR=%s\\n" "$directory_tank"
     99 		printf "DBPASSWORD=%s\\n" "$(password_manager pass postgresql)"
    100 		printf "VPNUSER=%s\\n" "$(password_manager user transmission-openvpn)"
    101 		printf "VPNPASS=%s\\n" "$(password_manager pass transmission-openvpn)"
    102 		printf "HTPASSWD=%s\\n" "$(docker exec -it caddy caddy hash-password --plaintext "$(password_manager pass htpasswd)" | base64 -w0)"
    103 		printf "TODOSECRET=%s\\n" "$(password_manager full vikunja | awk '/secret:/ {print $2}')"
    104 	} >"$directory_script/.env"
    105 	# make network, if not existing
    106 	if ! printf "%s" "$(docker network ls)" | grep -q "proxy"; then
    107 		echo Creating docker network
    108 		docker network create proxy
    109 	fi
    110 	# start containers
    111 	echo Starting docker containers
    112 	docker-compose up -d --remove-orphans
    113 	# delete temporary env file
    114 	if [[ -f "$directory_script/.env" ]]; then
    115 		echo Deleting detected env file
    116 		rm "$directory_script/.env"
    117 	fi
    118 	# clean up existing stuff
    119 	echo Cleaning up existing docker files
    120 	for i in volume image system network; do
    121 		docker "$i" prune -f
    122 	done
    123 	docker system prune -af
    124 }
    125 function media_logger {
    126 	# specify directories
    127 	git_directory="$(find_directory logger)"
    128 	file_git_log="$git_directory/media.log"
    129 	log_remote="$(find_remote nas)"
    130 	git_logger="git --git-dir=$git_directory/.git --work-tree=$git_directory"
    131 	# git configuruation
    132 	if [ ! -e "$git_directory" ]; then
    133 		printf "Logger directory not found, quitting...\n"
    134 		exit 1
    135 	fi
    136 	if [ ! -e "$git_directory/.git" ]; then
    137 		printf "Initialising blank git repo...\n"
    138 		$git_logger init
    139 	fi
    140 	if [ -e "$file_git_log.xz" ]; then
    141 		printf "Decompressing existing xz archive...\n"
    142 		xz -d "$file_git_log.xz"
    143 	fi
    144 	if [ -e "$file_git_log" ]; then
    145 		printf "Removing existing log file...\n"
    146 		rm "$file_git_log"
    147 	fi
    148 	printf "Creating log...\n"
    149 	rclone ls "$log_remote" | sort -k2 >"$file_git_log"
    150 	printf "Appending size information...\n"
    151 	rclone size "$log_remote" >>"$file_git_log"
    152 	printf "Commiting log file to repository...\n"
    153 	$git_logger add "$file_git_log"
    154 	$git_logger commit -m "Update: $(date +%F)"
    155 	if [ -e "$file_git_log.xz" ]; then
    156 		printf "Removing xz archive...\n"
    157 		rm "$file_git_log.xz"
    158 	fi
    159 	printf "Compressing log file...\n"
    160 	xz "$file_git_log"
    161 	printf "Compressing repository...\n"
    162 	$git_logger config pack.windowMemory 10m
    163 	$git_logger config pack.packSizeLimit 20m
    164 	$git_logger gc --aggressive --prune
    165 	printf "Log complete!\n"
    166 }
    167 function parse_magnets {
    168 	# sources and destinations
    169 	cd "$(find_directory vault)" || exit
    170 	# check for aria
    171 	if [ ! -x "$(command -v aria2c)" ]; then # not installed
    172 		echo "Aria doesn't seem to be installed. Exiting" && exit
    173 	fi
    174 	# trackers
    175 	trackers_list=(
    176 		"udp://9.rarbg.to:2710/announce"
    177 		"udp://denis.stalker.upeer.me:6969/announce"
    178 		"udp://exodus.desync.com:6969/announce"
    179 		"udp://ipv6.tracker.harry.lu:80/announce"
    180 		"udp://open.demonii.si:1337/announce"
    181 		"udp://open.stealth.si:80/announce"
    182 		"udp://p4p.arenabg.com:1337/announce"
    183 		"udp://retracker.lanta-net.ru:2710/announce"
    184 		"udp://tracker.coppersurfer.tk:6969/announce"
    185 		"udp://tracker.cyberia.is:6969/announce"
    186 		"udp://tracker.leechers-paradise.org:6969/announce"
    187 		"udp://tracker.open-internet.nl:6969/announce"
    188 		"udp://tracker.opentrackr.org:1337/announce"
    189 		"udp://tracker.pirateparty.gr:6969/announce"
    190 		"udp://tracker.tiny-vps.com:6969/announce"
    191 		"udp://tracker.torrent.eu.org:451/announce"
    192 	)
    193 	for i in "${trackers_list[@]}"; do
    194 		trackers="$i,$trackers"
    195 	done
    196 	# magnet loop
    197 	for j in *.magnet; do
    198 		[ -f "$j" ] || break
    199 		timeout 3m aria2c --bt-tracker="$trackers" --bt-metadata-only=true --bt-save-metadata=true "$(cat "$j")" && rm "$j"
    200 		# wait for files to be picked up
    201 		sleep 10s
    202 	done
    203 	# removed added files
    204 	for k in *.added; do
    205 		[ -f "$k" ] || break
    206 		for i in *.added; do rm "$i"; done
    207 	done
    208 }
    209 function sort_media {
    210 	# variables
    211 	source=seedbox
    212 	destination=/mnt/media/
    213 	# check mounts
    214 	mount_remote mount "$source"
    215 	# main sorting process
    216 	dir_import="$(find_directory seedbox)/"
    217 	if [[ -d "$dir_import" ]]; then
    218 		dir_tv="$destination/videos/television"
    219 		dir_mov="$destination/videos/movies"
    220 		temp_tv="{{ .Name }}/{{ .Name }} S{{ printf \"%02d\" .Season }}E{{ printf \"%02d\" .Episode }}{{ if ne .ExtraEpisode -1 }}-{{ printf \"%02d\" .ExtraEpisode }}{{end}}.{{ .Ext }}"
    221 		temp_mov="{{ .Name }} ({{ .Year }})/{{ .Name }}.{{ .Ext }}"
    222 		media-sort --action copy --concurrency 1 --accuracy-threshold 90 --tv-dir "$dir_tv" --movie-dir "$dir_mov" --tv-template "$temp_tv" --movie-template "$temp_mov" --recursive --overwrite-if-larger --extensions "mp4,m4v,mkv" "$dir_import"
    223 	else
    224 		printf "Import directory not found.\\n"
    225 		exit 0
    226 	fi
    227 	# unmount and remove after
    228 	umount_remote "$source"
    229 }
    230 function mount_remote {
    231 	if [ -n "$2" ]; then
    232 		printf "Mounting specified remote...\\n"
    233 		rclone_mount_process "$2"
    234 	else
    235 		printf "Mounting all remotes...\\n"
    236 		rclone_array="$(rclone listremotes | awk -F '[-:]' '/^drive/ && !/backups/ && !/saves/ {print $2}' | xargs)"
    237 		for i in $rclone_array; do
    238 			rclone_mount_process "$i"
    239 		done
    240 	fi
    241 }
    242 function rclone_mount_process {
    243 	remote="$(find_remote "$1")"
    244 	mount_point="$directory_home/$1"
    245 	mkdir -p "$mount_point"
    246 	if [[ -f "$mount_point/.mountcheck" || -n "$(find "$mount_point" -maxdepth 1 -mindepth 1 | head -n 1)" ]]; then
    247 		printf "%s already mounted.\\n" "$1"
    248 	else
    249 		printf "%s not mounted, mounting... " "$1"
    250 		fusermount -uz "$mount_point" 2>/dev/null && sleep 3
    251 		rclone mount "$remote" "$mount_point" --daemon
    252 		printf "done\\n"
    253 	fi
    254 }
    255 function blog_status {
    256 	status_uptime=$(($(cut -f1 -d. </proc/uptime) / 86400))
    257 	{
    258 		printf -- "---\\ntitle: Status\\nlayout: single\\n---\\n\\n"
    259 		printf "*Generated on %(%Y-%m-%d at %H:%M)T*\\n\\n" -1
    260 		printf "* Uptime: %s Day%s\\n" "$status_uptime" "$(if (("$status_uptime" > 1)); then echo s; fi)"
    261 		printf "* CPU Load: %s\\n" "$(cut -d" " -f1-3 </proc/loadavg)"
    262 		printf "* Users: %s\\n" "$(who | wc -l)"
    263 		printf "* RAM Usage: %s%%\\n" "$(printf "%.2f" "$(free | awk '/Mem/ {print $3/$2 * 100.0}')")"
    264 		printf "* Swap Usage: %s%%\\n" "$(printf "%.2f" "$(free | awk '/Swap/ {print $3/$2 * 100.0}')")"
    265 		printf "* Root Storage: %s\\n" "$(df / | awk 'END{print $5}')"
    266 		printf "* Tank Storage: %s\\n" "$(df | awk -v tank="$directory_tank" '$0 ~ tank {print $5}')"
    267 		printf "* Torrent Ratio: %s\\n" "$(echo "scale=3; $(awk '/uploaded/ {print $2}' "$(find_directory docker)"/transmission/stats.json)" / "$(awk '/downloaded/ {print $2}' "$(find_directory docker)"/transmission/stats.json | sed 's/,//g')" | bc)"
    268 		printf "* NAS Storage: %s\\n" "$(git --git-dir="$(find_directory logger)/.git" show | awk 'END{print $3" "$4}')"
    269 		printf "* [Containers](https://github.com/breadcat/Dockerfiles): %s\\n" "$(docker ps -q | wc -l)/$(docker ps -aq | wc -l)"
    270 		printf "* Packages: %s\\n" "$(pacman -Q | wc -l)"
    271 		printf "* Monthly Data: %s\\n\\n" "$(vnstat -m --oneline | cut -f11 -d\;)"
    272 		printf "Hardware specifications themselves are covered on the [hardware page](/hardware/#server).\\n"
    273 	} >"$(find_directory blog."$domain")/content/status.md"
    274 }
    275 function blog_weight {
    276 	if [ "$2" = "date" ]; then
    277 		printf "Writing empty dates... "
    278 		weight_filename="$(find_directory blog."$domain")/content/weight.md"
    279 		page_source="$(head -n -1 "$weight_filename")"
    280 		previous_date="$(printf %s "$page_source" | awk -F, 'END{print $1}')"
    281 		sequence_count="$((($(date --date="$(date +%F)" +%s) - $(date --date="$previous_date" +%s)) / (60 * 60 * 24)))"
    282 		{
    283 			printf "%s\\n" "$page_source"
    284 			printf "%s" "$(for i in $(seq $sequence_count); do printf "%s,\\n" "$(date -d "$previous_date+$i day" +%F)"; done)"
    285 			printf "\\n</pre></details>"
    286 		} >"$weight_filename"
    287 		printf "done\\n"
    288 		exit 0
    289 	fi
    290 	printf "Drawing graph... "
    291 	weight_filename="$(find_directory blog."$domain")/content/weight.md"
    292 	weight_rawdata="$(awk '/<pre>/{flag=1; next} /<\/pre>/{flag=0} flag' "$weight_filename" | sort -u)"
    293 	weight_dateinit="$(awk '/date:/ {print $2}' "$weight_filename")"
    294 	grep "^$(date +%Y)-" <<<"$weight_rawdata" >temp.dat
    295 	gnuplot <<-EOF
    296 		set grid
    297 		set datafile separator comma
    298 		set xlabel "Month"
    299 		set xdata time
    300 		set timefmt "%Y-%m-%d"
    301 		set xtics format "%b"
    302 		set ylabel "Weight (kg)"
    303 		set key off
    304 		set term svg font 'sans-serif,12'
    305 		set sample 50
    306 		set output "temp.svg"
    307 		plot "temp.dat" using 1:2 smooth cspline with lines
    308 	EOF
    309 	printf "done\\nWriting page... "
    310 	{
    311 		printf -- "---\\ntitle: Weight\\nlayout: single\\ndate: %s\\nlastmod: %(%Y-%m-%dT%H:%M:00)T\\n---\\n\\n" "$weight_dateinit" -1
    312 		printf "%s\\n\\n" "$(scour -i temp.svg --strip-xml-prolog --remove-descriptive-elements --create-groups --enable-id-stripping --enable-comment-stripping --shorten-ids --no-line-breaks)"
    313 		printf "<details><summary>Raw data</summary>\\n<pre>\\n%s\\n</pre></details>" "$weight_rawdata"
    314 
    315 	} >"$weight_filename"
    316 	printf "done\\nCleaning up... "
    317 	rm temp.{dat,svg}
    318 	printf "done\\n"
    319 }
    320 function remotes_dedupe {
    321 	dests=$(rclone listremotes | grep "gdrive" -c)
    322 	for i in $(seq "$dests"); do
    323 		remote=$(rclone listremotes | grep "gdrive.*$i")
    324 		echo Deduplicating "$remote"
    325 		rclone dedupe --dedupe-mode newest "$remote" --log-file "$(find_directory $directory_config)/logs/rclone-dupe-$(date +%F-%H%M).log"
    326 	done
    327 }
    328 function remotes_tokens {
    329 	echo "Refreshing rclone remote tokens"
    330 	for i in $(rclone listremotes | awk -v remote="$backup_prefix" '$0 ~ remote {print $0}'); do
    331 		if rclone lsd "$i" &>/dev/null; then
    332 			echo -e "$i \e[32msuccess\e[39m"
    333 		else
    334 			echo -e "$i \e[31mfailed\e[39m"
    335 		fi
    336 	done
    337 }
    338 function remotes_sync {
    339 	source=$(find_remote "gdrive")
    340 	dests=$(rclone listremotes | grep "gdrive" -c)
    341 	for i in $(seq 2 "$dests"); do
    342 		dest=$(rclone listremotes | grep "gdrive.*$i")
    343 		if rclone lsd "$dest" &>/dev/null; then
    344 			if [ -n "$2" ]; then
    345 				directory="$2"
    346 				printf "Syncing %s directory to %s...\\n" "$directory" "$dest"
    347 				rclone sync "$source/$directory" "$dest/$directory" --drive-server-side-across-configs --drive-stop-on-upload-limit --verbose --log-file "$(find_directory $directory_config)/logs/rclone-sync-$directory-$(date +%F-%H%M).log"
    348 			else
    349 				printf "Syncing %s to %s...\\n" "$source" "$dest"
    350 				rclone sync "$source" "$dest" --drive-server-side-across-configs --drive-stop-on-upload-limit --verbose --log-file "$(find_directory $directory_config)/logs/rclone-sync-$(date +%F-%H%M).log"
    351 			fi
    352 		fi
    353 	done
    354 }
    355 function parse_photos {
    356 	source=$(find_directory DCIM)
    357 	mount="pictures"
    358 	mount_remote mount "$mount"
    359 	destination=$(find_directory $mount)
    360 	# main sorting process
    361 	if [[ -d "$destination" ]]; then
    362 		phockup "$source" "$destination/personal/photos/" -m
    363 		find "$source" -maxdepth 3 -mount -type d -not -path "*/\.*" -empty -delete
    364 	else
    365 		printf "Import directory not found.\\n"
    366 		exit 0
    367 	fi
    368 	umount_remote "$mount"
    369 }
    370 function backup_docker {
    371 	check_not_root
    372 	password_manager sync
    373 	password="$(password_manager pass 'backup archive password')"
    374 	backup_final="$(find_remote backups)"
    375 	cd "$(find_directory $directory_config)" || exit
    376 	for i in */; do
    377 		backup_file="$(basename "$i")_backup-$(date +%F-%H%M).tar.xz.gpg"
    378 		echo -n Backing up "$i"...
    379 		if docker ps -a | grep -q "$i"; then
    380 			docker stop "$i" 1>/dev/null
    381 			sudo tar -cJf - "$i" | gpg -c --batch --passphrase "$password" >"$backup_file"
    382 			docker start "$i" 1>/dev/null
    383 		else
    384 			sudo tar -cJf - "$i" | gpg -c --batch --passphrase "$password" >"$backup_file"
    385 		fi
    386 		sudo chown "$username":"$username" "$backup_file"
    387 		echo -e "$i \e[32mdone\e[39m"
    388 	done
    389 	# send to remotes, final operation is a move, removing the backup
    390 	for i in *_backup-*."tar.xz.gpg"; do
    391 		for j in $(rclone listremotes | awk -v remote="$backup_prefix" '$0 ~ remote {print $0}'); do
    392 			echo -n Copying "$i" to "$j"...
    393 			rclone copy "$i" "$j" && echo -e "\e[32mdone\e[39m" || echo -e "\e[31mfailed\e[39m"
    394 		done
    395 		echo -n Moving "$i" to "$backup_final"...
    396 		rclone move "$i" "$backup_final" && echo -e "\e[32mdone\e[39m" || echo -e "\e[31mfailed\e[39m"
    397 	done
    398 }
    399 function clean_space {
    400 	space_initial="$(df / | awk 'FNR==2{ print $4}')"
    401 	mkdir -p "$(find_directory $directory_config)/logs"
    402 	log_file="$(find_directory $directory_config)/logs/clean-$(date +%F-%H%M).log"
    403 	check_root
    404 	# journals
    405 	journalctl --vacuum-size=75M >>"$log_file"
    406 	# docker
    407 	for i in volume image; do
    408 		docker "$i" prune -f >>"$log_file"
    409 	done
    410 	docker system prune -af >>"$log_file"
    411 	# package manager
    412 	if [[ $distro =~ "Debian" ]]; then
    413 		export DEBIAN_FRONTEND=noninteractive
    414 		apt-get clean >>"$log_file"
    415 	elif [[ $distro =~ "Arch" ]]; then
    416 		readarray -t orphans < <(pacman -Qtdq)
    417 		pacman -Rns "${orphans[@]}"
    418 		pacman -Sc --noconfirm
    419 	else
    420 		echo "Who knows what you're running"
    421 	fi
    422 	# temp directory
    423 	rm -rf /tmp/tmp.* >>"$log_file"
    424 	space_after="$(df / | awk 'FNR==2{ print $4}')"
    425 	printf "Bytes freed: %s\\n" "$(("$space_after" - "$space_initial"))"
    426 }
    427 function remote_sizes {
    428 	dests=$(rclone listremotes | grep "gdrive" -c)
    429 	for i in $(seq "$dests"); do
    430 		remote=$(rclone listremotes | grep "gdrive.*$i")
    431 		echo -n Calculating "$remote"...
    432 		rclone size "$remote" | xargs
    433 	done
    434 }
    435 function system_update {
    436 	check_root
    437 	if [[ $distro =~ "Debian" ]]; then
    438 		# Update Debian
    439 		export DEBIAN_FRONTEND=noninteractive
    440 		apt-get update
    441 		apt-get dist-upgrade -y
    442 		apt-get autoremove --purge -y
    443 		apt-get clean
    444 		if [ -x "$(command -v yt-dlp)" ]; then
    445 			yt-dlp -U
    446 		fi
    447 		if [ -x "$(command -v rclone)" ]; then
    448 			curl --silent "https://rclone.org/install.sh" | bash
    449 		fi
    450 	elif [[ $distro =~ "Arch" ]]; then
    451 		# Update Archlinux
    452 		if [ -x "$(command -v yay)" ]; then
    453 			sudo -u "$username" yay --noconfirm
    454 		else
    455 			pacman -Syu --noconfirm
    456 		fi
    457 	else
    458 		echo "Who knows what you're running"
    459 	fi
    460 	# Manually update docker containers
    461 	if [ -x "$(command -v docker)" ]; then
    462 		echo "Updating all Docker containers..."
    463 		docker run -it --name watchtower --rm -v /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower --run-once
    464 	fi
    465 	# Update remaining applications
    466 	find "$(find_directory $directory_config)" -maxdepth 2 -name ".git" -type d | sed 's/\/.git//' | xargs -P10 -I{} git -C {} pull
    467 	if [ -x "$(command -v we-get)" ]; then
    468 		echo "Updating we-get..."
    469 		pip3 install --upgrade git+https://github.com/rachmadaniHaryono/we-get
    470 	fi
    471 	if [ -x "$(command -v media-sort)" ]; then
    472 		echo "Updating media-sort..."
    473 		cd "/usr/local/bin" || return
    474 		curl https://i.jpillora.com/media-sort | bash
    475 	fi
    476 }
    477 function process_music {
    478 	post="$(find_directory blog."$domain")/content/music.md"
    479 	header="$(grep -v \| "$post")"
    480 	echo -n "Checking for $(basename "$post")..."
    481 	if test -f "$post"; then
    482 		echo -e "$i \e[32mexists\e[39m"
    483 	else
    484 		echo -e "$i \e[31mdoes not exist\e[39m. Exiting"
    485 		exit 1
    486 	fi
    487 	source=liked.csv
    488 	echo -n "Checking for $source..."
    489 	if test -f "$source"; then
    490 		echo -e "$i \e[32mexists\e[39m"
    491 	else
    492 		echo -e "$i \e[31mdoes not exist\e[39m. Exiting"
    493 		exit 1
    494 	fi
    495 	echo -n "Processing $source... "
    496 	tail -n +2 <"$source" |
    497 		awk -F'\",\"' '{print $4}' |
    498 		sed 's/\\\,/,/g' >temp.artists.log
    499 	tail -n +2 <"$source" |
    500 		awk -F'\",\"' '{print $2}' |
    501 		sed 's/ - .... - Remaster$//g' |
    502 		sed 's/ - .... Remaster$//g' |
    503 		sed 's/ - .... Remastered Edition$//g' |
    504 		sed 's/ - .... re-mastered version$//g' |
    505 		sed 's/ - .... Remastered Version$//g' |
    506 		sed 's/ - feat.*$//g' |
    507 		sed 's/ - Live$//g' |
    508 		sed 's/ - Original Mix$//g' |
    509 		sed 's/ - Remaster$//g' |
    510 		sed 's/ - Remastered ....$//g' |
    511 		sed 's/ - Remastered$//g' |
    512 		sed 's/ - Remastered Version$//g' |
    513 		sed 's/ (.... Remaster)$//g' |
    514 		sed 's/ (feat.*)$//g' |
    515 		sed 's/ (Live)//g' |
    516 		sed 's/\[[^][]*\]//g' |
    517 		awk '{print "["$0"]"}' >temp.tracks.log
    518 	tail -n +2 <"$source" |
    519 		awk -F'\"' '{print $2}' |
    520 		awk '{print "("$0")"}' >temp.links.log
    521 	echo -e "\e[32mdone\e[39m"
    522 	# write page
    523 	echo -n "Writing page... "
    524 	{
    525 		printf "%s\n\n" "$header"
    526 		printf "%s" "$(paste temp.artists.log temp.tracks.log temp.links.log | sed 's/\t/ \| /g' | sed 's/^/\| /g' | sed 's/$/ \|/g' | sed 's/\] | (/\](/g')" | sort | uniq -i | sed -e '1i\| ------ \| ----- \|' | sed -e '1i\| Artist \| Title \|'
    527 	} >"$post"
    528 	echo -e "\e[32mdone\e[39m"
    529 	lastmod "$post"
    530 	echo -n "Deleting temporary files... "
    531 	rm temp.{artists,tracks,links}.log
    532 	echo -e "\e[32mdone\e[39m"
    533 }
    534 
    535 function blog_quote_sort {
    536 	quote_file="$(find_directory blog."$domain")"/content/quotes.md
    537 	file_header="$(head -n 7 "$quote_file")"
    538 	file_body="$(tail -n +7 "$quote_file" | sort | uniq -i | sed G)"
    539 	echo -n "Processing $(basename "$quote_file")... "
    540 	shasum_original="$(sha512sum "$quote_file" | awk '{print $1}')"
    541 	{
    542 		printf "%s\n" "$file_header"
    543 		printf "%s" "$file_body"
    544 	} >"$quote_file"
    545 	shasum_modified="$(sha512sum "$quote_file" | awk '{print $1}')"
    546 	if [[ "$shasum_original" != "$shasum_modified" ]]; then
    547 		lastmod "$i" 1>/dev/null
    548 		echo -e "\e[32mmodified\e[39m"
    549 	else
    550 		echo -e "\e[33munmodified\e[39m"
    551 	fi
    552 }
    553 
    554 function sort_languages {
    555 	for i in "$(find_directory blog."$domain")"/content/languages/*; do
    556 		if [[ "$i" = *index.md ]]; then continue; fi # there's probably a better way of doing this, but I can't figure it out
    557 		echo -n "Processing $(basename "$i")... "
    558 		shasum_original="$(sha512sum "$i" | awk '{print $1}')"
    559 		file_header="$(head -n 8 "$i")"
    560 		file_body="$(tail -n +9 "$i" | sort | uniq -i)"
    561 		{
    562 			printf "%s\n" "$file_header"
    563 			printf "%s" "$file_body"
    564 		} >"$i"
    565 		shasum_modified="$(sha512sum "$i" | awk '{print $1}')"
    566 		if [[ "$shasum_original" != "$shasum_modified" ]]; then
    567 			lastmod "$i" 1>/dev/null
    568 			echo -e "\e[32mmodified\e[39m"
    569 		else
    570 			echo -e "\e[33munmodified\e[39m"
    571 		fi
    572 	done
    573 }
    574 
    575 function lastmod {
    576 	echo -n "Amending lastmod value... "
    577 	mod_timestamp="$(date +%FT%H:%M:00)"
    578 	sed -i "s/lastmod: .*/lastmod: $mod_timestamp/g" "$1"
    579 	echo -e "$i \e[32mdone\e[39m"
    580 }
    581 
    582 function stagit_generate {
    583 	# variables
    584 	source_directory="$(find_directory vault)/src"
    585 	source_stylesheet="$(find_directory vault)/src/dockerfiles/configs/stagit.css"
    586 	destination_directory="$(find_directory docker)/stagit"
    587 	repositories=$(find "$source_directory" -type d -name '.git' | sed 's|/\.git$||')
    588 	# stagit loop
    589 	for repo in $repositories; do
    590 		bare_name=$(basename "$repo")
    591 		output_directory="$destination_directory/$bare_name"
    592 		mkdir -p "$output_directory"
    593 		cd "$output_directory" || exit
    594 		echo "Generating $bare_name..."
    595 		stagit "$repo"
    596 		cp "$source_stylesheet" "style.css"
    597 	done
    598 	# index
    599 	cd "$destination_directory" || exit
    600 	stagit-index "${source_directory}/"*/ >index.html
    601 	cp "$source_stylesheet" "style.css"
    602 }
    603 
    604 function main {
    605 	distro="$(awk -F'"' '/^NAME/ {print $2}' /etc/os-release)"
    606 	username="$(logname)"
    607 	directory_config="docker"
    608 	directory_home="/home/$username"
    609 	directory_tank="/mnt/tank"
    610 	backup_prefix="backup-"
    611 	domain="minskio.co.uk"
    612 	directory_script="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
    613 	case "$1" in
    614 	backup) backup_docker ;;
    615 	bookmarks) echo "Creating sorted bookmark file..." && grep -P "\t\t\t\<li\>" "$(find_directory startpage)/index.html" | sort -t\> -k3 >"$(find_directory startpage)/bookmarks.txt" ;;
    616 	clean) clean_space ;;
    617 	dedupe) remotes_dedupe ;;
    618 	depends) check_depends ;;
    619 	docker) docker_build ;;
    620 	langs) sort_languages ;;
    621 	logger) media_logger ;;
    622 	magnet) parse_magnets ;;
    623 	mount) mount_remote "$@" ;;
    624 	music) process_music "$@" ;;
    625 	permissions) check_root && chown "$username":"$username" "$directory_script/rclone.conf" ;;
    626 	photos) parse_photos ;;
    627 	quotes) blog_quote_sort ;;
    628 	rank) blog_duolingo_rank ;;
    629 	refresh) remotes_tokens ;;
    630 	size) remote_sizes ;;
    631 	sort) sort_media ;;
    632 	stagit) stagit_generate ;;
    633 	status) blog_status ;;
    634 	streak) duolingo_streak ;;
    635 	sync) remotes_sync "$@" ;;
    636 	umount) umount_remote "$@" ;;
    637 	update) system_update ;;
    638 	weight) blog_weight "$@" ;;
    639 	*) echo "$0" && awk '/^function main/,EOF' "$0" | awk '/case/{flag=1;next}/esac/{flag=0}flag' | awk -F"\t|)" '{print $2}' | tr -d "*" | sort | xargs ;;
    640 	esac
    641 }
    642 
    643 main "$@"