scraper settings, ordering and loop.
This commit is contained in:
parent
6ea9844d00
commit
9bfc6fc6fa
|
@ -54,27 +54,26 @@ def list():
|
||||||
return feed
|
return feed
|
||||||
|
|
||||||
def get_article(url):
|
def get_article(url):
|
||||||
|
scrapers = {
|
||||||
|
'declutter': declutter,
|
||||||
|
'outline': outline,
|
||||||
|
'local': local,
|
||||||
|
}
|
||||||
|
available = settings.SCRAPERS or ['local']
|
||||||
|
if 'local' not in available:
|
||||||
|
available += ['local']
|
||||||
|
|
||||||
|
for scraper in available:
|
||||||
|
if scraper not in scrapers.keys():
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
return declutter.get_html(url)
|
html = scrapers[scraper].get_html(url)
|
||||||
|
if html:
|
||||||
|
return html
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
raise
|
raise
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
|
||||||
return outline.get_html(url)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
return local.get_html(url)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def get_content_type(url):
|
def get_content_type(url):
|
||||||
|
|
|
@ -23,6 +23,8 @@ NUM_SUBSTACK = 10
|
||||||
# 'rnz national': { 'url': "https://www.rnz.co.nz/news/national", 'count': 10},
|
# 'rnz national': { 'url': "https://www.rnz.co.nz/news/national", 'count': 10},
|
||||||
# }
|
# }
|
||||||
|
|
||||||
|
SCRAPERS = ['declutter', 'outline', 'local']
|
||||||
|
|
||||||
# Reddit account info
|
# Reddit account info
|
||||||
# leave blank if not using Reddit
|
# leave blank if not using Reddit
|
||||||
REDDIT_CLIENT_ID = ''
|
REDDIT_CLIENT_ID = ''
|
||||||
|
|
Loading…
Reference in New Issue
Block a user