sfeed_download: sync latest version - randomcrap - random crap programs of varying quality
(HTM) git clone git://git.codemadness.org/randomcrap
(DIR) Log
(DIR) Files
(DIR) Refs
(DIR) README
(DIR) LICENSE
---
(DIR) commit 2c74d5b3ae88411b316b99630f5864c3137ccce9
(DIR) parent 0bf44ff4ffdb6a7b92fe9cb94780b88154f1650a
(HTM) Author: Hiltjo Posthuma <hiltjo@codemadness.org>
Date: Thu, 6 Apr 2023 20:26:47 +0200
sfeed_download: sync latest version
Diffstat:
M sfeed/sfeed_download | 23 +++++++++++------------
1 file changed, 11 insertions(+), 12 deletions(-)
---
(DIR) diff --git a/sfeed/sfeed_download b/sfeed/sfeed_download
@@ -1,13 +1,11 @@
#!/bin/sh
-# Downloader for URLs and enclosures in feed files.
+# sfeed_download: downloader for URLs and enclosures in sfeed(5) files.
# Dependencies: awk, curl, flock, xargs (-P), yt-dlp.
cachefile="${SFEED_CACHEFILE:-$HOME/.sfeed/downloaded_urls}"
jobs="${SFEED_JOBS:-4}"
lockfile="${HOME}/.sfeed/sfeed_download.lock"
-youtubedl="yt-dlp"
-
# log(feedname, s, status)
log() {
if [ "$1" != "-" ]; then
@@ -15,14 +13,14 @@ log() {
else
s="$2"
fi
- printf '[%s]: %s: %s\n' "$(date +'%H:%M:%S')" "${s}" "$3" >&2
+ printf '[%s]: %s: %s\n' "$(date +'%H:%M:%S')" "${s}" "$3"
}
# fetch(url, feedname)
fetch() {
case "$1" in
*youtube.com*)
- $youtubedl "$1";;
+ yt-dlp "$1";;
*.flac|*.ogg|*.m3u|*.m3u8|*.m4a|*.mkv|*.mp3|*.mp4|*.wav|*.webm)
# allow 2 redirects, hide User-Agent, connect timeout is 15 seconds.
curl -O -L --max-redirs 2 -H "User-Agent:" -f -s --connect-timeout 15 "$1";;
@@ -41,14 +39,13 @@ downloader() {
if [ "${feedname}" != "-" ]; then
mkdir -p "${feedname}"
if ! cd "${feedname}"; then
- log "${feedname}" "${msg}: ${feedname}" "DIR FAIL"
- exit 1
+ log "${feedname}" "${msg}: ${feedname}" "DIR FAIL" >&2
+ return 1
fi
fi
log "${feedname}" "${msg}" "START"
- fetch "${url}" "${feedname}"
- if [ $? = 0 ]; then
+ if fetch "${url}" "${feedname}"; then
log "${feedname}" "${msg}" "OK"
# append it safely in parallel to the cachefile on a
@@ -57,21 +54,23 @@ downloader() {
printf '%s\n' "${url}" >> "${cachefile}"
) 9>"${lockfile}"
else
- log "${feedname}" "${msg}" "FAIL"
+ log "${feedname}" "${msg}" "FAIL" >&2
+ return 1
fi
+ return 0
}
if [ "${SFEED_DOWNLOAD_CHILD}" = "1" ]; then
# Downloader helper for parallel downloading.
# Receives arguments: $1 = URL, $2 = title, $3 = feed filename or "-".
- # It should write the URI to the cachefile if it is succesful.
+ # It should write the URI to the cachefile if it is successful.
downloader "$1" "$2" "$3"
exit $?
fi
# ...else parent mode:
-tmp=$(mktemp)
+tmp="$(mktemp)" || exit 1
trap "rm -f ${tmp}" EXIT
[ -f "${cachefile}" ] || touch "${cachefile}"