# roadmap
# - for PM, allowed threads, and modmail sync, create a lightweight version to run frequently to handle new events quickly--distinct from the more expensive "full sync" that is implemented here.
+from urllib.parse import urlencode, urlparse, urlunparse
import urllib.request
import urllib.parse
return result and result.group(1)
def allowed_threads():
- req = urllib.request.Request('https://oauth.reddit.com/r/livecounting/search?q=url%3Alive+site%3Areddit.com+self%3Ano&restrict_sr=on&include_over_18=on&sort=new&t=all&limit=100', method='GET')
- req.add_header('Authorization', 'Bearer {}'.format(access_token))
- req.add_header('User-Agent', 'autojoin/0.1.0')
- res = json.load(urllib.request.urlopen(req))
- flakes = (flake_from_url(thing['data']['url']) for thing in res['data']['children'] if thing['data']['is_self'] is False)
- return set((f for f in flakes if f))
+ flakes = []
+ params = {
+ "q": "url:live+site:reddit.com+self:no",
+ "restrict_sr": "on",
+ "include_over_18": "on",
+ "sort": "new",
+ "t": "all",
+ "limit": "100",
+ }
+ while True:
+ req = urllib.request.Request(
+ urlunparse(
+ urlparse("https://oauth.reddit.com/r/livecounting/search")
+ ._replace(query = urlencode(params))
+ ),
+ method = "GET",
+ headers = {
+ "Authorization": "Bearer {}".format(access_token),
+ "User-Agent": "autojoin/0.1.0",
+ }
+ )
+ with urllib.request.urlopen(req) as resp:
+ data = json.load(resp)
+ flakes.extend(
+ flake_from_url(thing["data"]["url"])
+ for thing in data["data"]["children"]
+ if not thing["data"]["is_self"]
+ )
+ if data["after"] is None:
+ return set(filter(None, flakes))
+ else:
+ params["after"] = data["after"]
cr.execute("BEGIN")
#cr.execute("DELETE FROM live_autojoin_allowed_event WHERE service_name = %s", (service_name,))