Check-in by ben on 2025-04-12 20:53:31 Declare private variables in api.awk Remove unnecessary line continuation characters. INSERTED DELETED 1 1 src/api.awk 5 5 src/download/index.dcgi.m4 2 2 src/list/index.dcgi.m4 2 2 src/lists/index.dcgi.m4 5 5 src/search/index.dcgi.m4 5 5 src/web.awk 20 20 TOTAL over 6 changed files Index: src/api.awk ================================================================== --- src/api.awk +++ src/api.awk @@ -1,6 +1,6 @@ -function api_request(url, method, output, data) { +function api_request(url, method, output, data, curlcfg, retval) { retval = "" curlcfg = gettemp() printf "--connect-timeout 10\n" > curlcfg printf "--max-redirs 0\n" >> curlcfg if (url ~ /^https:/) { Index: src/download/index.dcgi.m4 ================================================================== --- src/download/index.dcgi.m4 +++ src/download/index.dcgi.m4 @@ -8,11 +8,11 @@ include(src/config.awk) incl(src/api.awk) incl(src/cgi.awk) incl(src/util.awk) -function main(cmd, dir, files, file_size, format, iaout, is_archive, \ +function main(cmd, dir, files, file_size, format, iaout, is_archive, is_proxy, item_server, label, mtime, name, source, url) { dir = parts[2] item_id = parts[3] if (dir == "download") { @@ -59,13 +59,13 @@ } } close(cmd) for (i = 1; i <= files; i++) { - label = sprintf("%s %s %s", \ - shorten_left(name[i], 40), \ - strftime("%Y-%m-%d %H:%M", mtime[i]), \ + label = sprintf("%s %s %s", + shorten_left(name[i], 40), + strftime("%Y-%m-%d %H:%M", mtime[i]), human_size(file_size[i])) url = sprintf("http://%s%s/%s", item_server, dir, name[i]) if (is_proxy) { if (file_size[i] > max_bin_size * size_mb) { printf "[h|%s|URL:%s|%s|%s]\n", label, uri_encode(url), @@ -78,11 +78,11 @@ printf "[h|%s|URL:%s|%s|%s]\n", label, uri_encode(url), server, port } is_archive = detect_archive(url) if (is_archive) { - url = sprintf("http://%s/view_archive.php?archive=%s/%s", \ + url = sprintf("http://%s/view_archive.php?archive=%s/%s", item_server, dir, name[i]) printf "[h|%s (View Contents)|URL:%s|%s|%s]\n", shorten_left(name[i], 40), uri_encode(url), server, port } Index: src/list/index.dcgi.m4 ================================================================== --- src/list/index.dcgi.m4 +++ src/list/index.dcgi.m4 @@ -157,14 +157,14 @@ title = $3 } else if ($1 == ".response.body.hits.hits[]._score") { # the _score field happens to be toward the end of each item if (length(title) > 0) { if (length(creator) > 0) { - label = sprintf("[%s] %s by %s", mediatype[type], \ + label = sprintf("[%s] %s by %s", mediatype[type], gph_encode(shorten(title, 40)), shorten(creator, 18)) } else { - label = sprintf("[%s] %s", mediatype[type], \ + label = sprintf("[%s] %s", mediatype[type], gph_encode(shorten(title, 58))) } printf "[1|%s|%s/details/%s|%s|%s]\n", label, cgipath, id, server, port >>output count++ Index: src/lists/index.dcgi.m4 ================================================================== --- src/lists/index.dcgi.m4 +++ src/lists/index.dcgi.m4 @@ -46,11 +46,11 @@ while ((cmd | getline) > 0) { if ($1 == ".value[]" && $2 == "o") { # add information for previous list if (!is_private && length(name) > 0 && item_count > 0) { label = shorten_left(name, 50) - item = sprintf("[1|%4d Items: %-50s|%s/list/%%09%s/%d|%s|%s]", \ + item = sprintf("[1|%4d Items: %-50s|%s/list/%%09%s/%d|%s|%s]", item_count, label, cgipath, search, id, server, port) record = label "\t" id "\t" item count++ records[count] = record } @@ -72,11 +72,11 @@ close(cmd) # add information for previous list if (!is_private && length(name) > 0 && item_count > 0) { label = shorten_left(name, 50) - item = sprintf("[1|%4d Items: %-50s|%s/list/%%09%s/%d|%s|%s]", \ + item = sprintf("[1|%4d Items: %-50s|%s/list/%%09%s/%d|%s|%s]", item_count, label, cgipath, search, id, server, port) record = label "\t" id "\t" item count++ records[count] = record } Index: src/search/index.dcgi.m4 ================================================================== --- src/search/index.dcgi.m4 +++ src/search/index.dcgi.m4 @@ -89,15 +89,15 @@ # remove anyfield, a hobgoblin of consistency searchstr = search gsub(/anyfield:/, "", searchstr) - url = sprintf("%s/advancedsearch.php?q=%s&output=json&rows=%d&page=%d", \ + url = sprintf("%s/advancedsearch.php?q=%s&output=json&rows=%d&page=%d", api_endpoint, searchstr, rows, page) if (length(order) > 0) { split(order, parts, " ") - url = url sprintf("&sort%%5B0%%5D=%s %s", order_names[parts[1]], \ + url = url sprintf("&sort%%5B0%%5D=%s %s", order_names[parts[1]], parts[2]) } for (i = 1; i <= fields; i++) { url = url sprintf("&fl%%5B%d%%5D=%s", i-1, field[i]) } @@ -130,11 +130,11 @@ } else { pages = int(numfound / rows) if (numfound % rows != 0) { pages++ } - printf "# %s search results, page %d of %d\n", numfound, \ + printf "# %s search results, page %d of %d\n", numfound, page, pages >>output print "" >>output } # format search results as a gopher directory (menu) @@ -163,14 +163,14 @@ } else if ($1 == ".response.docs[].title" && $2 == "s") { # the title field happens to be toward the end of each item title = $3 count++ if (length(creator) > 0) { - label = sprintf("[%s] %s by %s", mediatype[type], \ + label = sprintf("[%s] %s by %s", mediatype[type], gph_encode(shorten(title, 40)), shorten(creator, 18)) } else { - label = sprintf("[%s] %s", mediatype[type], \ + label = sprintf("[%s] %s", mediatype[type], gph_encode(shorten(title, 58))) } if (type == "collection") { printf "[1|%s|%s/search/%%09collection:(%s)|%s|%s]\n", label, cgipath, id, server, port >>output Index: src/web.awk ================================================================== --- src/web.awk +++ src/web.awk @@ -1,7 +1,7 @@ -function dump(search, type, base, cmd, curlcfg, is_html, is_image, \ - label, limit, link, marker, parts, prefix, proto, relative, root, \ +function dump(search, type, base, cmd, curlcfg, is_html, is_image, + label, limit, link, marker, parts, prefix, proto, relative, root, url) { url = search gsub(/%3F/, "?", url) @@ -42,11 +42,11 @@ } # Use strings command to guard webdump from binary input. # Use "strings -a" to avoid security pitfalls. - cmd = sprintf("%s -K %s 2>&1 | %s -a -n 3 | %s -ilr -w 60", \ + cmd = sprintf("%s -K %s 2>&1 | %s -a -n 3 | %s -ilr -w 60", cmd_curl, curlcfg, cmd_strings, cmd_webdump) # Parse base out of original URL. # Use this to convert relative links to full URLs. # webdump has the -b option for this. @@ -122,11 +122,11 @@ function print_html(output, html, cmd, marker, work) { work = gettemp() gsub(/\\n/, "
", html) print html >work close(work) - cmd = sprintf("%s -a -n 3 <%s | %s -ilr -w 60", cmd_strings, work, \ + cmd = sprintf("%s -a -n 3 <%s | %s -ilr -w 60", cmd_strings, work, cmd_webdump) marker = 999999 while ((cmd | getline) > 0) { gsub(/\t/, " ") gsub(/\\t/, " ") @@ -172,11 +172,11 @@ # Print the webdump references section, translating archive.org URLs to # pharos URLs -function print_ref_pharos(output, str, id, label, link, prefix, \ +function print_ref_pharos(output, str, id, label, link, prefix, relative, token) { if (match(str, /^ [0-9]+\. /)) { prefix = substr(str, 0, RLENGTH) link = substr(str, RLENGTH+1)