qotnews/apiserver/feeds/hackernews.py

97 lines
2.8 KiB
Python
Raw Normal View History

2019-08-24 05:07:16 +00:00
import logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.DEBUG)
2019-08-24 05:07:16 +00:00
2019-12-01 22:18:41 +00:00
if __name__ == '__main__':
import sys
sys.path.insert(0,'.')
2019-08-23 08:23:48 +00:00
import requests
2019-12-01 22:18:41 +00:00
from utils import clean
2019-08-23 08:23:48 +00:00
API_TOPSTORIES = lambda x: 'https://hacker-news.firebaseio.com/v0/topstories.json'
API_ITEM = lambda x : 'https://hn.algolia.com/api/v1/items/{}'.format(x)
SITE_LINK = lambda x : 'https://news.ycombinator.com/item?id={}'.format(x)
SITE_AUTHOR_LINK = lambda x : 'https://news.ycombinator.com/user?id={}'.format(x)
def api(route, ref=None):
2019-08-24 05:07:16 +00:00
try:
r = requests.get(route(ref), timeout=5)
if r.status_code != 200:
raise Exception('Bad response code ' + str(r.status_code))
2019-08-24 05:07:16 +00:00
return r.json()
except KeyboardInterrupt:
raise
2020-11-02 00:17:22 +00:00
except BaseException as e:
logging.error('Problem hitting hackernews API: {}, trying again'.format(str(e)))
try:
r = requests.get(route(ref), timeout=15)
if r.status_code != 200:
raise Exception('Bad response code ' + str(r.status_code))
return r.json()
except KeyboardInterrupt:
raise
2019-08-24 05:07:16 +00:00
except BaseException as e:
logging.error('Problem hitting hackernews API: {}'.format(str(e)))
return False
2019-08-23 08:23:48 +00:00
def feed():
2020-07-07 20:53:46 +00:00
return [str(x) for x in api(API_TOPSTORIES) or []]
2019-08-23 08:23:48 +00:00
def comment(i):
2019-08-30 06:23:14 +00:00
if 'author' not in i:
return False
2019-08-23 08:23:48 +00:00
c = {}
c['author'] = i.get('author', '')
c['score'] = i.get('points', 0)
c['date'] = i.get('created_at_i', 0)
2019-12-01 22:18:41 +00:00
c['text'] = clean(i.get('text', '') or '')
2019-08-23 08:23:48 +00:00
c['comments'] = [comment(j) for j in i['children']]
2019-08-30 06:23:14 +00:00
c['comments'] = list(filter(bool, c['comments']))
2019-08-23 08:23:48 +00:00
return c
def comment_count(i):
alive = 1 if i['author'] else 0
return sum([comment_count(c) for c in i['comments']]) + alive
def story(ref):
r = api(API_ITEM, ref)
if not r:
logging.info('Bad Hackernews API response.')
return False
2019-08-23 08:23:48 +00:00
if 'deleted' in r:
logging.info('Story was deleted.')
2019-08-23 08:23:48 +00:00
return False
elif r.get('type', '') != 'story':
logging.info('Type "{}" is not "story".'.format(r.get('type', '')))
2019-08-23 08:23:48 +00:00
return False
s = {}
s['author'] = r.get('author', '')
s['author_link'] = SITE_AUTHOR_LINK(r.get('author', ''))
s['score'] = r.get('points', 0)
s['date'] = r.get('created_at_i', 0)
s['title'] = r.get('title', '')
s['link'] = SITE_LINK(ref)
s['url'] = r.get('url', '')
s['comments'] = [comment(i) for i in r['children']]
2019-08-30 06:23:14 +00:00
s['comments'] = list(filter(bool, s['comments']))
2019-08-23 08:23:48 +00:00
s['num_comments'] = comment_count(s) - 1
if 'text' in r and r['text']:
2019-12-01 22:18:41 +00:00
s['text'] = clean(r['text'] or '')
2019-08-23 08:23:48 +00:00
return s
# scratchpad so I can quickly develop the parser
2019-08-23 08:23:48 +00:00
if __name__ == '__main__':
2020-06-26 21:28:39 +00:00
print(feed())
#print(story(20763961))
2020-06-26 21:28:39 +00:00
#print(story(20802050))