diff --git a/apiserver/feed.py b/apiserver/feed.py index a53874a..60db037 100644 --- a/apiserver/feed.py +++ b/apiserver/feed.py @@ -10,7 +10,6 @@ from bs4 import BeautifulSoup from feeds import hackernews, reddit, tildes, manual OUTLINE_API = 'https://api.outline.com/v3/parse_article' -ARCHIVE_API = 'https://archive.fo/submit/' READ_API = 'http://127.0.0.1:33843' INVALID_DOMAINS = ['youtube.com', 'bloomberg.com', 'wsj.com'] @@ -72,8 +71,6 @@ def get_content_type(url): def update_story(story, is_manual=False): res = {} - logging.info('Updating story ' + str(story['ref'])) - if story['source'] == 'hackernews': res = hackernews.story(story['ref']) elif story['source'] == 'reddit': diff --git a/apiserver/server.py b/apiserver/server.py index 868c16b..cdf740f 100644 --- a/apiserver/server.py +++ b/apiserver/server.py @@ -175,6 +175,8 @@ def feed_thread(): except AttributeError: story = dict(id=item['sid'], ref=item['ref'], source=item['source']) + logging.info('Updating story: ' + str(story['ref']) + ', index: ' + str(news_index)) + valid = feed.update_story(story) if valid: database.put_story(story) @@ -183,7 +185,7 @@ def feed_thread(): database.del_ref(item['ref']) logging.info('Removed ref {}'.format(item['ref'])) else: - logging.info('Skipping index') + logging.info('Skipping index: ' + str(news_index)) gevent.sleep(6)