call raise_for_status() on all GET requests

This commit is contained in:
io 2021-06-14 20:34:33 +00:00
parent a46d7fe95c
commit fe1474ffd0
1 changed files with 12 additions and 6 deletions

18
main.py
View File

@ -78,6 +78,12 @@ def extract_toot(toot):
return(toot) return(toot)
def get(*args, **kwargs):
r = requests.get(*args, **kwargs)
r.raise_for_status()
return r
client = Mastodon( client = Mastodon(
client_id=cfg['client']['id'], client_id=cfg['client']['id'],
client_secret=cfg['client']['secret'], client_secret=cfg['client']['secret'],
@ -149,11 +155,11 @@ for f in following:
try: try:
# 1. download host-meta to find webfinger URL # 1. download host-meta to find webfinger URL
r = requests.get("https://{}/.well-known/host-meta".format(instance), timeout=10) r = get("https://{}/.well-known/host-meta".format(instance), timeout=10)
# 2. use webfinger to find user's info page # 2. use webfinger to find user's info page
uri = patterns["uri"].search(r.text).group(1) uri = patterns["uri"].search(r.text).group(1)
uri = uri.format(uri="{}@{}".format(f.username, instance)) uri = uri.format(uri="{}@{}".format(f.username, instance))
r = requests.get(uri, headers={"Accept": "application/json"}, timeout=10) r = get(uri, headers={"Accept": "application/json"}, timeout=10)
j = r.json() j = r.json()
found = False found = False
for link in j['links']: for link in j['links']:
@ -167,7 +173,7 @@ for f in following:
# 3. download first page of outbox # 3. download first page of outbox
uri = "{}/outbox?page=true".format(uri) uri = "{}/outbox?page=true".format(uri)
r = requests.get(uri, timeout=15) r = get(uri, timeout=15)
j = r.json() j = r.json()
except: except:
print("oopsy woopsy!! we made a fucky wucky!!!\n(we're probably rate limited, please hang up and try again)") print("oopsy woopsy!! we made a fucky wucky!!!\n(we're probably rate limited, please hang up and try again)")
@ -188,7 +194,7 @@ for f in following:
else: else:
print("Using standard mode") print("Using standard mode")
uri = "{}&min_id={}".format(uri, last_toot) uri = "{}&min_id={}".format(uri, last_toot)
r = requests.get(uri) r = get(uri)
j = r.json() j = r.json()
print("Downloading and saving posts", end='', flush=True) print("Downloading and saving posts", end='', flush=True)
@ -226,9 +232,9 @@ for f in following:
# get the next/previous page # get the next/previous page
try: try:
if not pleroma: if not pleroma:
r = requests.get(j['prev'], timeout=15) r = get(j['prev'], timeout=15)
else: else:
r = requests.get(j['next'], timeout=15) r = get(j['next'], timeout=15)
except requests.Timeout: except requests.Timeout:
print("HTTP timeout, site did not respond within 15 seconds") print("HTTP timeout, site did not respond within 15 seconds")
except KeyError: except KeyError: