diff --git a/deletionwatcher.py b/deletionwatcher.py index 45bf2ee7f69..7d38bb7b157 100644 --- a/deletionwatcher.py +++ b/deletionwatcher.py @@ -167,7 +167,13 @@ def _check_batch(saved): uri = get_se_api_url_for_route("posts/{}".format(ids)) params = get_se_api_default_params_questions_answers_posts_add_site(site) res = requests.get(uri, params=params, timeout=GlobalVars.default_requests_timeout) - json = res.json() + try: + json = res.json() + except json.decoder.JSONDecodeError: + log('warning', + 'DeletionWatcher API request received invalid JSON in response (code {})'.format(res.status_code)) + log('warning', res.text) + continue if 'backoff' in json: DeletionWatcher.next_request_time = time.time() + json['backoff'] @@ -176,8 +182,7 @@ def _check_batch(saved): log('warning', 'DeletionWatcher API request received no items in response (code {})'.format(res.status_code)) log('warning', res.text) - # This really should do a better job of recovery, as we could retry and/or go to the next site. - return + continue for post in json['items']: compare_date = post["last_edit_date"] if "last_edit_date" in post else post["creation_date"]