twitter_status.sh - randomcrap - random crap programs of varying quality
(HTM) git clone git://git.codemadness.org/randomcrap
(DIR) Log
(DIR) Files
(DIR) Refs
(DIR) README
(DIR) LICENSE
---
twitter_status.sh (5123B)
---
1 #!/bin/sh
2 # Dependencies: json2tsv, awk, sh.
3 # Credits/shout-outs to: leot for many ideas and testing.
4
5 # Twitter authentication bearer (seems to be static).
6 bearer="AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
7
8 # guest token.
9 token=""
10
11 # acquire guest token.
12 # guesttoken()
13 guesttoken() {
14 # fail on redirects, hide User-Agent, timeout is 15 seconds.
15 curl -X POST -L --max-redirs 0 -H "User-Agent:" -f -s -m 15 \
16 -H "Authorization: Bearer ${bearer}" \
17 'https://api.twitter.com/1.1/guest/activate.json' 2>/dev/null | \
18 sed -nE 's@.*\{"guest_token":"([^"]*)"\}.*@\1@p'
19 }
20
21 # fetch a feed via HTTP/HTTPS etc.
22 # fetch(name, twittername, feedfile)
23 fetch() {
24 url="https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name=$2&tweet_mode=extended&count=100&include_rts=1"
25
26 # fail on redirects, hide User-Agent, timeout is 15 seconds.
27 curl -L --max-redirs 0 -H "User-Agent:" -f -s -m 15 \
28 -H "Authorization: Bearer ${bearer}" \
29 -H "x-guest-token: $token" \
30 "${url}" 2>/dev/null
31 }
32
33 # getid(url)
34 getid() {
35 url="$1"
36 id="${url##*/status/}"
37 id="${id%%\?*}"
38 id="${id%%/*}"
39 printf '%s' "$id"
40 }
41
42 convert() {
43 json2tsv | \
44 LC_ALL=C awk -F '\t' '
45 BEGIN {
46 maxdepth = 2;
47 }
48 function checkdepth(id) {
49 depth = 0;
50 _id = id;
51 while (tweets[_id, "in_reply_to_status_id_str"]) {
52 _id = tweets[_id, "in_reply_to_status_id_str"];
53 depth++;
54 }
55 return depth;
56 }
57 function indent(depth) {
58 # avoid deep nesting for visual clarity.
59 d = depth;
60 if (d > maxdepth)
61 d = maxdepth;
62 s = "";
63 for (i = 0; i < d; i++) {
64 s = s "\t";
65 }
66 return s;
67 }
68 function replaceall(str, search, repl) {
69 s = "";
70 for (rest = str; idx = index(rest, search);) {
71 s = s substr(rest, 1, idx - 1) repl;
72 rest = substr(rest, idx + length(search));
73 }
74 return s rest;
75 }
76 $1 ~ /^\.globalObjects\.tweets\.[0-9]+$/ && $2 == "o" {
77 id = substr($1, 23);
78 }
79 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.[a-z_]*$/ {
80 if (url && media_url_https)
81 urls[url] = media_url_https; # preferred
82 else if (url && expanded_url && !urls[url])
83 urls[url] = expanded_url;
84
85 match($1, /\.[a-z_]*$/);
86 if (RSTART == 0)
87 next;
88 key = substr($1, RSTART + 1);
89 tweets[id, key] = $3;
90 }
91 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.entities\.media\[\]$/ && $2 == "o" {
92 if (url && media_url_https)
93 urls[url] = media_url_https; # preferred
94 media_url_https = url = "";
95 }
96 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.entities\.media\[\]\.url$/ && $2 == "s" {
97 url = $3;
98 }
99 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.entities\.media\[\]\.media_url_https$/ && $2 == "s" {
100 media_url_https = $3;
101 }
102 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.entities\.urls\[\]$/ && $2 == "o" {
103 if (url && expanded_url && !urls[url])
104 urls[url] = expanded_url;
105 expanded_url = url = "";
106 }
107 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.entities\.urls\[\]\.url$/ && $2 == "s" {
108 url = $3;
109 }
110 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.entities\.urls\[\]\.expanded_url$/ && $2 == "s" {
111 expanded_url = $3;
112 }
113 # video
114 $1 ~ /^\.globalObjects\.tweets\.[0-9]+\.extended_entities\.media\[\]\.video_info\.variants\[\]\.url$/ && $2 == "s" {
115 tweets[id, "video"] = $3;
116 }
117 $1 ~ /^\.globalObjects\.users\.[0-9]+$/ && $2 == "o" {
118 userid = substr($1, 22);
119 }
120 $1 ~ /^\.globalObjects\.users\.[0-9]+\.[a-z_]*$/ && $2 == "s" {
121 match($1, /\.[a-z_]*$/);
122 if (RSTART == 0)
123 next;
124 key = substr($1, RSTART + 1);
125 users[userid, key] = $3;
126 }
127 # timeline order.
128 #$1 ~ /^\.timeline\.instructions\[\]\.addEntries\.entries\[\]\.entryId$/ && $2 == "s" {
129 # gsub("^[a-zA-Z]*-", "", $3);
130 $1 ~ /^\.timeline\.instructions\[\]\.addEntries\.entries\[\]\..*\.content\.tweet\.id$/ && $2 == "s" {
131 order[n++] = $3;
132 }
133 END {
134 if (url && media_url_https)
135 urls[url] = media_url_https; # preferred
136 else if (url && expanded_url && !urls[url])
137 urls[url] = expanded_url;
138 for (c = 0; c < n; c++) {
139 id = order[c];
140 text = tweets[id, "full_text"];
141 text = replaceall(text, "\\n", " "); # replace newline with space.
142 text = replaceall(text, "<", "<"); # some entities.
143 text = replaceall(text, ">", ">");
144 if (!length(text))
145 continue;
146 # expand urls.
147 for (k in urls) {
148 text = replaceall(text, k, urls[k]);
149 }
150
151 userid = tweets[id, "user_id_str"];
152 user_screenname = users[userid, "screen_name"];
153 user_name = users[userid, "name"];
154
155 prefix = indent(checkdepth(id));
156
157 print prefix "Date: " tweets[id, "created_at"];
158 print prefix "From: " user_name " (@" user_screenname ")";
159 if (tweets[id, "video"])
160 print prefix "Video: " tweets[id, "video"];
161
162 print prefix "Message: " text;
163 print "";
164 }
165 }'
166 }
167
168 url="$1"
169 id="$(getid "$1")"
170 if test -z "$id"; then
171 echo "no status ID found in url" >&2
172 exit 1
173 fi
174 url="https://api.twitter.com/2/timeline/conversation/${id}.json?tweet_mode=extended&count=100&include_rts=1"
175
176 # get quest token.
177 token=$(guesttoken)
178 if [ -z "${token}" ]; then
179 echo "Failed to acquire guest token" >&2
180 exit 1
181 fi
182
183 # fail on redirects, hide User-Agent, timeout is 15 seconds.
184 curl -v -L --max-redirs 0 -H "User-Agent:" -f -m 15 \
185 -H "Authorization: Bearer ${bearer}" \
186 -H "x-guest-token: $token" \
187 "${url}" 2>/dev/null | convert