Improve logging, sends tweets to nitter.net

This commit is contained in:
Tanner Collin 2022-03-05 23:48:46 +00:00
parent a693ea5342
commit a25457254f
7 changed files with 33 additions and 11 deletions

View File

@ -34,6 +34,10 @@ def get_article(url):
logging.info('Readerserver not configured, aborting.') logging.info('Readerserver not configured, aborting.')
return '' return ''
if url.startswith('https://twitter.com'):
logging.info('Replacing twitter.com url with nitter.net')
url = url.replace('twitter.com', 'nitter.net')
try: try:
r = requests.post(settings.READER_URL, data=dict(url=url), timeout=20) r = requests.post(settings.READER_URL, data=dict(url=url), timeout=20)
if r.status_code != 200: if r.status_code != 200:
@ -82,7 +86,7 @@ def update_story(story, is_manual=False):
return False return False
if story['date'] and not is_manual and story['date'] + TWO_DAYS < time.time(): if story['date'] and not is_manual and story['date'] + TWO_DAYS < time.time():
logging.info('Story too old, removing') logging.info('Story too old, removing. Date: {}'.format(story['date']))
return False return False
if story.get('url', '') and not story.get('text', ''): if story.get('url', '') and not story.get('text', ''):

View File

@ -61,11 +61,15 @@ def comment_count(i):
def story(ref): def story(ref):
r = api(API_ITEM, ref) r = api(API_ITEM, ref)
if not r: return False if not r:
logging.info('Bad Hackernews API response.')
return False
if 'deleted' in r: if 'deleted' in r:
logging.info('Story was deleted.')
return False return False
elif r.get('type', '') != 'story': elif r.get('type', '') != 'story':
logging.info('Type "{}" is not "story".'.format(r.get('type', '')))
return False return False
s = {} s = {}

View File

@ -81,7 +81,9 @@ def iter_comments(flat_comments):
def story(ref): def story(ref):
r = api(API_ITEM, ref) r = api(API_ITEM, ref)
if not r: return False if not r:
logging.info('Bad Lobsters API response.')
return False
s = {} s = {}
try: try:

View File

@ -27,7 +27,9 @@ def api(route):
def story(ref): def story(ref):
html = api(ref) html = api(ref)
if not html: return False if not html:
logging.info('Bad http GET response.')
return False
soup = BeautifulSoup(html, features='html.parser') soup = BeautifulSoup(html, features='html.parser')

View File

@ -59,7 +59,9 @@ def comment(i):
def story(ref): def story(ref):
try: try:
r = reddit.submission(ref) r = reddit.submission(ref)
if not r: return False if not r:
logging.info('Bad Reddit API response.')
return False
s = {} s = {}
s['author'] = r.author.name if r.author else '[Deleted]' s['author'] = r.author.name if r.author else '[Deleted]'
@ -74,6 +76,7 @@ def story(ref):
s['num_comments'] = r.num_comments s['num_comments'] = r.num_comments
if s['score'] < 25 and s['num_comments'] < 10: if s['score'] < 25 and s['num_comments'] < 10:
logging.info('Score ({}) or num comments ({}) below threshold.'.format(s['score'], s['num_comments']))
return False return False
if r.selftext: if r.selftext:

View File

@ -71,11 +71,15 @@ def story(ref):
html = api(SITE_LINK(group_lookup[ref], ref)) html = api(SITE_LINK(group_lookup[ref], ref))
else: else:
html = api(API_ITEM(ref)) html = api(API_ITEM(ref))
if not html: return False if not html:
logging.info('Bad Tildes API response.')
return False
soup = BeautifulSoup(html, features='html.parser') soup = BeautifulSoup(html, features='html.parser')
a = soup.find('article', class_='topic-full') a = soup.find('article', class_='topic-full')
if a is None: return False if a is None:
logging.info('Tildes <article> element not found.')
return False
h = a.find('header') h = a.find('header')
lu = h.find('a', class_='link-user') lu = h.find('a', class_='link-user')
@ -83,6 +87,7 @@ def story(ref):
error = a.find('div', class_='text-error') error = a.find('div', class_='text-error')
if error: if error:
if 'deleted' in error.string or 'removed' in error.string: if 'deleted' in error.string or 'removed' in error.string:
logging.info('Article was deleted or removed.')
return False return False
s = {} s = {}
@ -103,6 +108,7 @@ def story(ref):
s['num_comments'] = int(ch.h2.string.split(' ')[0]) if ch else 0 s['num_comments'] = int(ch.h2.string.split(' ')[0]) if ch else 0
if s['score'] < 8 and s['num_comments'] < 6: if s['score'] < 8 and s['num_comments'] < 6:
logging.info('Score ({}) or num comments ({}) below threshold.'.format(s['score'], s['num_comments']))
return False return False
td = a.find('div', class_='topic-full-text') td = a.find('div', class_='topic-full-text')

View File

@ -169,9 +169,10 @@ def feed_thread():
continue continue
try: try:
nid = new_id() nid = new_id()
logging.info('Adding ref: {}, id: {}, source: {}'.format(ref, nid, source))
database.put_ref(ref, nid, source) database.put_ref(ref, nid, source)
logging.info('Added ref ' + ref)
except database.IntegrityError: except database.IntegrityError:
logging.info('Already have ID / ref, skipping.')
continue continue
ref_list = database.get_reflist(FEED_LENGTH) ref_list = database.get_reflist(FEED_LENGTH)
@ -186,7 +187,7 @@ def feed_thread():
except AttributeError: except AttributeError:
story = dict(id=item['sid'], ref=item['ref'], source=item['source']) story = dict(id=item['sid'], ref=item['ref'], source=item['source'])
logging.info('Updating story: ' + str(story['ref']) + ', index: ' + str(news_index)) logging.info('Updating {} story: {}, index: {}'.format(story['source'], story['ref'], news_index))
valid = feed.update_story(story) valid = feed.update_story(story)
if valid: if valid:
@ -209,10 +210,10 @@ def feed_thread():
logging.critical('feed_thread error: {} {}'.format(e.__class__.__name__, e)) logging.critical('feed_thread error: {} {}'.format(e.__class__.__name__, e))
http_server.stop() http_server.stop()
print('Starting Feed thread...') logging.info('Starting Feed thread...')
gevent.spawn(feed_thread) gevent.spawn(feed_thread)
print('Starting HTTP thread...') logging.info('Starting HTTP thread...')
try: try:
http_server.serve_forever() http_server.serve_forever()
except KeyboardInterrupt: except KeyboardInterrupt: