Sidebot refactoring
authorJakob Cornell <jakob+gpg@jcornell.net>
Tue, 11 Oct 2022 23:35:48 +0000 (18:35 -0500)
committerJakob Cornell <jakob+gpg@jcornell.net>
Tue, 11 Oct 2022 23:35:48 +0000 (18:35 -0500)
sidebot/sidebot/main.py

index 8ea08bdf87da39086c46fa0f68d1ae6b5466e035..d9fcf6f38e2a44d3d139d6eeb71f38652248400f 100644 (file)
@@ -1,7 +1,6 @@
 import psycopg2
 import urllib.request
 import json
-import itertools
 from argparse import ArgumentParser
 from datetime import datetime, timezone
 from uuid import UUID
@@ -17,8 +16,8 @@ def main():
        parser.add_argument('service_name')
        args = parser.parse_args()
 
-       USER_AGENT='sidebot/0.1.0'
-       MAX_PAGES=3
+       USER_AGENT = 'sidebot/0.1.0'
+       MAX_PAGES = 3
 
        dbconn = psycopg2.connect('')
        db = dbconn.cursor()
@@ -42,10 +41,10 @@ def main():
        stopped_short = False
        before_arg = "LiveUpdate_{}".format(last_update_id) if last_update_id else ""
        for page_no in range(MAX_PAGES):
-               res = urllib.request.urlopen(urllib.request.Request(
-                               'https://www.reddit.com/live/{}.json?raw_json=1&limit=100&before={}'.format(
-                                       event_id, before_arg
-                               ), headers={"User-Agent": USER_AGENT}))
+               res = urllib.request.urlopen(
+                       urllib.request.Request(
+                               'https://www.reddit.com/live/{}.json?raw_json=1&limit=100&before={}'.format(event_id, before_arg),
+                               headers = {"User-Agent": USER_AGENT}))
                if res.status != 200:
                        raise RuntimeError('HTTP {} {}'.format(res.status, res.reason))
                page = json.load(res)['data']['children']
@@ -66,7 +65,6 @@ def main():
                raise RuntimeError('HTTP {} {}'.format(res.status, res.reason))
        data = json.load(res)['data']
        resources = data['resources']
-       nsfw_arg = str(int(data['nsfw']))
 
        (_, kget_slice, kget) = find_parse_section(resources, 'GET',
                        event_id=event_id, retain=keep_kget)
@@ -105,13 +103,17 @@ def main():
                        text = text[:slice_.start] + new_text + text[slice_.stop:]
                return text
 
-       new_resources = patch_text(resources, itertools.chain((
-                               (slice_, proc.text()) for (slice_, proc) in [
-                                       (kget_slice, kget),
-                                       (notd_slice, notd)
-                               ] if proc.dirty),
-                               ((notd_banner_slice, notd_banner) for d in range(int(not stopped_short and now_ts is not None)))
-                       ))
+       patches = [
+               (slice_, proc.text())
+               for (slice_, proc) in [(kget_slice, kget), (notd_slice, notd)]
+               if proc.dirty
+       ]
+
+       # patch banner only if we got the latest update
+       if not stopped_short and now_ts is not None:
+               patches.append((notd_banner_slice, notd_banner))
+
+       new_resources = patch_text(resources, patches)
 
        if new_resources != resources:
                req = urllib.request.Request(