From 810e8c5ead9eb4b456b9f341356d22c76d8d7ea4 Mon Sep 17 00:00:00 2001 From: Tanner Collin Date: Tue, 15 Oct 2019 21:03:47 +0000 Subject: [PATCH] Archive WSJ articles first, catch KeyboardInterrupt --- apiserver/feed.py | 29 +++++++++++++++++------------ apiserver/feeds/hackernews.py | 2 ++ apiserver/feeds/reddit.py | 4 ++++ apiserver/feeds/tildes.py | 2 ++ apiserver/server.py | 2 ++ 5 files changed, 27 insertions(+), 12 deletions(-) diff --git a/apiserver/feed.py b/apiserver/feed.py index 7a0068f..2d7fba8 100644 --- a/apiserver/feed.py +++ b/apiserver/feed.py @@ -12,6 +12,7 @@ OUTLINE_API = 'https://outlineapi.com/article' ARCHIVE_API = 'https://archive.fo/submit/' READ_API = 'http://127.0.0.1:33843' +ARCHIVE_FIRST = ['bloomberg.com', 'wsj.com'] INVALID_FILES = ['.pdf', '.png', '.jpg', '.gif'] INVALID_DOMAINS = ['youtube.com'] @@ -19,13 +20,13 @@ def list(): feed = [] feed += [(x, 'hackernews') for x in hackernews.feed()[:10]] feed += [(x, 'reddit') for x in reddit.feed()[:10]] - feed += [(x, 'tildes') for x in tildes.feed()[:10]] + feed += [(x, 'tildes') for x in tildes.feed()[:5]] return feed def get_article(url): - if 'bloomberg.com' in url: + if any([domain in url for domain in ARCHIVE_FIRST]): try: - logging.info('Article from Bloomberg, archiving first...') + logging.info('Article from {}, archiving first...'.format(url)) data = {'submitid': '9tjtS1EYe5wy8AJiYgVfH9P97uHU1IHG4lO67hsQpHOC3KKJrhqVIoQG2U7Rg%2Fpr', 'url': url} r = requests.post(ARCHIVE_API, data=data, timeout=20, allow_redirects=False) if r.status_code == 200: @@ -35,6 +36,8 @@ def get_article(url): url = r.headers['location'] else: raise Exception('Bad response code ' + str(r.status_code)) + except KeyboardInterrupt: + raise except BaseException as e: logging.error('Problem archiving article: {}'.format(str(e))) return '' @@ -53,6 +56,8 @@ def get_article(url): if 'URL is not supported by Outline' in html: raise Exception('URL not supported by Outline') return html + except KeyboardInterrupt: + raise except BaseException as e: logging.error('Problem outlining article: {}'.format(str(e))) @@ -63,6 +68,8 @@ def get_article(url): if r.status_code != 200: raise Exception('Bad response code ' + str(r.status_code)) return r.text + except KeyboardInterrupt: + raise except BaseException as e: logging.error('Problem getting article: {}'.format(str(e))) return '' @@ -86,15 +93,13 @@ def update_story(story): return False if story.get('url', '') and not story.get('text', ''): - for ext in INVALID_FILES: - if story['url'].endswith(ext): - logging.info('URL invalid file type ({})'.format(ext)) - return False - - for domain in INVALID_DOMAINS: - if domain in story['url']: - logging.info('URL invalid domain ({})'.format(domain)) - return False + if any([story['url'].endswith(ext) for ext in INVALID_FILES]): + logging.info('URL invalid file type ({})'.format(ext)) + return False + + if any([domain in story['url'] for domain in INVALID_DOMAINS]): + logging.info('URL invalid domain ({})'.format(domain)) + return False logging.info('Getting article ' + story['url']) story['text'] = get_article(story['url']) diff --git a/apiserver/feeds/hackernews.py b/apiserver/feeds/hackernews.py index e9c9a31..2670886 100644 --- a/apiserver/feeds/hackernews.py +++ b/apiserver/feeds/hackernews.py @@ -17,6 +17,8 @@ def api(route, ref=None): if r.status_code != 200: raise Exception('Bad response code ' + str(r.status_code)) return r.json() + except KeyboardInterrupt: + raise except BaseException as e: logging.error('Problem hitting hackernews API: {}'.format(str(e))) return False diff --git a/apiserver/feeds/reddit.py b/apiserver/feeds/reddit.py index b7ee104..2bc3f7d 100644 --- a/apiserver/feeds/reddit.py +++ b/apiserver/feeds/reddit.py @@ -24,6 +24,8 @@ reddit = praw.Reddit('bot') def feed(): try: return [x.id for x in reddit.subreddit(SUBREDDITS).hot()] + except KeyboardInterrupt: + raise except PRAWException as e: logging.error('Problem hitting reddit API: {}'.format(str(e))) return [] @@ -70,6 +72,8 @@ def story(ref): return s + except KeyboardInterrupt: + raise except PRAWException as e: logging.error('Problem hitting reddit API: {}'.format(str(e))) return False diff --git a/apiserver/feeds/tildes.py b/apiserver/feeds/tildes.py index 5f5dd11..c3c8e56 100644 --- a/apiserver/feeds/tildes.py +++ b/apiserver/feeds/tildes.py @@ -25,6 +25,8 @@ def api(route): if r.status_code != 200: raise Exception('Bad response code ' + str(r.status_code)) return r.text + except KeyboardInterrupt: + raise except BaseException as e: logging.error('Problem hitting tildes website: {}'.format(str(e))) return False diff --git a/apiserver/server.py b/apiserver/server.py index 4669b48..72d90a7 100644 --- a/apiserver/server.py +++ b/apiserver/server.py @@ -152,6 +152,8 @@ try: news_index += 1 if news_index == CACHE_LENGTH: news_index = 0 +except KeyboardInterrupt: + logging.info('Exiting...') finally: with shelve.open(DATA_FILE) as db: logging.info('Writing caches to disk...')