forked from tanner/qotnews
		
	Archive WSJ articles first, catch KeyboardInterrupt
This commit is contained in:
		| @@ -12,6 +12,7 @@ OUTLINE_API = 'https://outlineapi.com/article' | ||||
| ARCHIVE_API = 'https://archive.fo/submit/' | ||||
| READ_API = 'http://127.0.0.1:33843' | ||||
|  | ||||
| ARCHIVE_FIRST = ['bloomberg.com', 'wsj.com'] | ||||
| INVALID_FILES = ['.pdf', '.png', '.jpg', '.gif'] | ||||
| INVALID_DOMAINS = ['youtube.com'] | ||||
|  | ||||
| @@ -19,13 +20,13 @@ def list(): | ||||
|     feed = [] | ||||
|     feed += [(x, 'hackernews') for x in hackernews.feed()[:10]] | ||||
|     feed += [(x, 'reddit') for x in reddit.feed()[:10]] | ||||
|     feed += [(x, 'tildes') for x in tildes.feed()[:10]] | ||||
|     feed += [(x, 'tildes') for x in tildes.feed()[:5]] | ||||
|     return feed | ||||
|  | ||||
| def get_article(url): | ||||
|     if 'bloomberg.com' in url: | ||||
|     if any([domain in url for domain in ARCHIVE_FIRST]): | ||||
|         try: | ||||
|             logging.info('Article from Bloomberg, archiving first...') | ||||
|             logging.info('Article from {}, archiving first...'.format(url)) | ||||
|             data = {'submitid': '9tjtS1EYe5wy8AJiYgVfH9P97uHU1IHG4lO67hsQpHOC3KKJrhqVIoQG2U7Rg%2Fpr', 'url': url} | ||||
|             r = requests.post(ARCHIVE_API, data=data, timeout=20, allow_redirects=False) | ||||
|             if r.status_code == 200: | ||||
| @@ -35,6 +36,8 @@ def get_article(url): | ||||
|                 url = r.headers['location'] | ||||
|             else: | ||||
|                 raise Exception('Bad response code ' + str(r.status_code)) | ||||
|         except KeyboardInterrupt: | ||||
|             raise | ||||
|         except BaseException as e: | ||||
|             logging.error('Problem archiving article: {}'.format(str(e))) | ||||
|             return '' | ||||
| @@ -53,6 +56,8 @@ def get_article(url): | ||||
|         if 'URL is not supported by Outline' in html: | ||||
|             raise Exception('URL not supported by Outline') | ||||
|         return html | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except BaseException as e: | ||||
|         logging.error('Problem outlining article: {}'.format(str(e))) | ||||
|  | ||||
| @@ -63,6 +68,8 @@ def get_article(url): | ||||
|         if r.status_code != 200: | ||||
|             raise Exception('Bad response code ' + str(r.status_code)) | ||||
|         return r.text | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except BaseException as e: | ||||
|         logging.error('Problem getting article: {}'.format(str(e))) | ||||
|         return '' | ||||
| @@ -86,15 +93,13 @@ def update_story(story): | ||||
|         return False | ||||
|  | ||||
|     if story.get('url', '') and not story.get('text', ''): | ||||
|         for ext in INVALID_FILES: | ||||
|             if story['url'].endswith(ext): | ||||
|                 logging.info('URL invalid file type ({})'.format(ext)) | ||||
|                 return False | ||||
|         if any([story['url'].endswith(ext) for ext in INVALID_FILES]): | ||||
|             logging.info('URL invalid file type ({})'.format(ext)) | ||||
|             return False | ||||
|  | ||||
|         for domain in INVALID_DOMAINS: | ||||
|             if domain in story['url']: | ||||
|                 logging.info('URL invalid domain ({})'.format(domain)) | ||||
|                 return False | ||||
|         if any([domain in story['url'] for domain in INVALID_DOMAINS]): | ||||
|             logging.info('URL invalid domain ({})'.format(domain)) | ||||
|             return False | ||||
|  | ||||
|         logging.info('Getting article ' + story['url']) | ||||
|         story['text'] = get_article(story['url']) | ||||
|   | ||||
| @@ -17,6 +17,8 @@ def api(route, ref=None): | ||||
|         if r.status_code != 200: | ||||
|             raise Exception('Bad response code ' + str(r.status_code)) | ||||
|         return r.json() | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except BaseException as e: | ||||
|         logging.error('Problem hitting hackernews API: {}'.format(str(e))) | ||||
|         return False | ||||
|   | ||||
| @@ -24,6 +24,8 @@ reddit = praw.Reddit('bot') | ||||
| def feed(): | ||||
|     try: | ||||
|         return [x.id for x in reddit.subreddit(SUBREDDITS).hot()] | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except PRAWException as e: | ||||
|         logging.error('Problem hitting reddit API: {}'.format(str(e))) | ||||
|         return [] | ||||
| @@ -70,6 +72,8 @@ def story(ref): | ||||
|  | ||||
|         return s | ||||
|  | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except PRAWException as e: | ||||
|         logging.error('Problem hitting reddit API: {}'.format(str(e))) | ||||
|         return False | ||||
|   | ||||
| @@ -25,6 +25,8 @@ def api(route): | ||||
|         if r.status_code != 200: | ||||
|             raise Exception('Bad response code ' + str(r.status_code)) | ||||
|         return r.text | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except BaseException as e: | ||||
|         logging.error('Problem hitting tildes website: {}'.format(str(e))) | ||||
|         return False | ||||
|   | ||||
| @@ -152,6 +152,8 @@ try: | ||||
|         news_index += 1 | ||||
|         if news_index == CACHE_LENGTH: news_index = 0 | ||||
|  | ||||
| except KeyboardInterrupt: | ||||
|     logging.info('Exiting...') | ||||
| finally: | ||||
|     with shelve.open(DATA_FILE) as db: | ||||
|         logging.info('Writing caches to disk...') | ||||
|   | ||||
		Reference in New Issue
	
	Block a user