import psycopg2
import urllib.request
import json
-import itertools
from argparse import ArgumentParser
from datetime import datetime, timezone
from uuid import UUID
parser.add_argument('service_name')
args = parser.parse_args()
- USER_AGENT='sidebot/0.1.0'
- MAX_PAGES=3
+ USER_AGENT = 'sidebot/0.1.0'
+ MAX_PAGES = 3
dbconn = psycopg2.connect('')
db = dbconn.cursor()
stopped_short = False
before_arg = "LiveUpdate_{}".format(last_update_id) if last_update_id else ""
for page_no in range(MAX_PAGES):
- res = urllib.request.urlopen(urllib.request.Request(
- 'https://www.reddit.com/live/{}.json?raw_json=1&limit=100&before={}'.format(
- event_id, before_arg
- ), headers={"User-Agent": USER_AGENT}))
+ res = urllib.request.urlopen(
+ urllib.request.Request(
+ 'https://www.reddit.com/live/{}.json?raw_json=1&limit=100&before={}'.format(event_id, before_arg),
+ headers = {"User-Agent": USER_AGENT}))
if res.status != 200:
raise RuntimeError('HTTP {} {}'.format(res.status, res.reason))
page = json.load(res)['data']['children']
raise RuntimeError('HTTP {} {}'.format(res.status, res.reason))
data = json.load(res)['data']
resources = data['resources']
- nsfw_arg = str(int(data['nsfw']))
(_, kget_slice, kget) = find_parse_section(resources, 'GET',
event_id=event_id, retain=keep_kget)
text = text[:slice_.start] + new_text + text[slice_.stop:]
return text
- new_resources = patch_text(resources, itertools.chain((
- (slice_, proc.text()) for (slice_, proc) in [
- (kget_slice, kget),
- (notd_slice, notd)
- ] if proc.dirty),
- ((notd_banner_slice, notd_banner) for d in range(int(not stopped_short and now_ts is not None)))
- ))
+ patches = [
+ (slice_, proc.text())
+ for (slice_, proc) in [(kget_slice, kget), (notd_slice, notd)]
+ if proc.dirty
+ ]
+
+ # patch banner only if we got the latest update
+ if not stopped_short and now_ts is not None:
+ patches.append((notd_banner_slice, notd_banner))
+
+ new_resources = patch_text(resources, patches)
if new_resources != resources:
req = urllib.request.Request(