Ignore certain files and domains, remove refs

This commit is contained in:
Tanner Collin 2019-09-24 08:22:06 +00:00
parent 0496fbba45
commit 0053147226
2 changed files with 41 additions and 19 deletions

View File

@ -11,11 +11,14 @@ from feeds import hackernews, reddit, tildes
OUTLINE_API = 'https://outlineapi.com/article' OUTLINE_API = 'https://outlineapi.com/article'
READ_API = 'http://127.0.0.1:33843' READ_API = 'http://127.0.0.1:33843'
INVALID_FILES = ['.pdf', '.png', '.jpg', '.gif']
INVALID_DOMAINS = ['youtube.com']
def list(): def list():
feed = [] feed = []
feed += [(x, 'hackernews') for x in hackernews.feed()[:10]] feed += [(x, 'hackernews') for x in hackernews.feed()[:10]]
feed += [(x, 'reddit') for x in reddit.feed()[:5]] feed += [(x, 'reddit') for x in reddit.feed()[:10]]
feed += [(x, 'tildes') for x in tildes.feed()[:5]] feed += [(x, 'tildes') for x in tildes.feed()[:10]]
return feed return feed
def get_article(url): def get_article(url):
@ -58,17 +61,29 @@ def update_story(story):
res = reddit.story(story['ref']) res = reddit.story(story['ref'])
elif story['source'] == 'tildes': elif story['source'] == 'tildes':
res = tildes.story(story['ref']) res = tildes.story(story['ref'])
else:
return
if res: if res:
story.update(res) story.update(res) # join dicts
else:
logging.info('Article not ready yet')
return False
if story.get('url', '') and not story.get('text', ''): if story.get('url', '') and not story.get('text', ''):
if not story['url'].endswith('.pdf'): for ext in INVALID_FILES:
logging.info('Getting article ' + story['url']) if story['url'].endswith(ext):
story['text'] = get_article(story['url']) logging.info('URL invalid file type ({})'.format(ext))
else: return False
story['text'] = '<p>Unsupported article type.</p>'
for domain in INVALID_DOMAINS:
if domain in story['url']:
logging.info('URL invalid domain ({})'.format(domain))
return False
logging.info('Getting article ' + story['url'])
story['text'] = get_article(story['url'])
if not story['text']: return False
return True
if __name__ == '__main__': if __name__ == '__main__':
test_news_cache = {} test_news_cache = {}

View File

@ -25,14 +25,13 @@ with shelve.open(DATA_FILE) as db:
news_ref_to_id = db.get('news_ref_to_id', {}) news_ref_to_id = db.get('news_ref_to_id', {})
news_cache = db.get('news_cache', {}) news_cache = db.get('news_cache', {})
flask_app = Flask(__name__) flask_app = Flask(__name__)
cors = CORS(flask_app) cors = CORS(flask_app)
@flask_app.route('/') @flask_app.route('/')
def index(): def index():
front_page = [news_cache[news_ref_to_id[ref]] for ref in news_list] front_page = [news_cache[news_ref_to_id[ref]] for ref in news_list]
front_page = [copy.copy(x) for x in front_page if 'title' in x] front_page = [copy.copy(x) for x in front_page if 'text' in x and x['text']][:100]
for story in front_page: for story in front_page:
if 'comments' in story: story.pop('comments') if 'comments' in story: story.pop('comments')
if 'text' in story: story.pop('text') if 'text' in story: story.pop('text')
@ -61,6 +60,16 @@ def new_id():
nid = gen_rand_id() nid = gen_rand_id()
return nid return nid
def remove_ref(old_ref, archive=False):
while old_ref in news_list:
news_list.remove(old_ref)
old_story = news_cache.pop(news_ref_to_id[old_ref])
old_id = news_ref_to_id.pop(old_ref)
logging.info('Removed ref {} id {}.'.format(old_ref, old_id))
if archive:
with shelve.open(DATA_FILE) as db:
db[old_id] = old_story
try: try:
while True: while True:
if news_index == 0: if news_index == 0:
@ -76,18 +85,16 @@ try:
logging.info('Added {} new refs.'.format(len(new_items))) logging.info('Added {} new refs.'.format(len(new_items)))
while len(news_list) > CACHE_LENGTH: while len(news_list) > CACHE_LENGTH:
old_ref = news_list.pop() old_ref = news_list[-1]
old_story = news_cache.pop(news_ref_to_id[old_ref]) remove_ref(old_ref, archive=True)
old_id = news_ref_to_id.pop(old_ref)
logging.info('Removed ref {} id {}.'.format(old_ref, old_id))
with shelve.open(DATA_FILE) as db:
db[old_id] = old_story
if news_index < len(news_list): if news_index < len(news_list):
update_ref = news_list[news_index] update_ref = news_list[news_index]
update_id = news_ref_to_id[update_ref] update_id = news_ref_to_id[update_ref]
news_story = news_cache[update_id] news_story = news_cache[update_id]
feed.update_story(news_story) valid = feed.update_story(news_story)
if not valid:
remove_ref(update_ref)
time.sleep(3) time.sleep(3)