2020-11-04 02:00:58 +00:00
|
|
|
import logging
|
|
|
|
logging.basicConfig(
|
|
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
|
|
level=logging.DEBUG)
|
|
|
|
import requests
|
|
|
|
|
|
|
|
READ_API = 'http://127.0.0.1:33843/details'
|
2020-11-04 02:14:51 +00:00
|
|
|
TIMEOUT = 20
|
2020-11-04 02:00:58 +00:00
|
|
|
|
|
|
|
def get_html(url):
|
|
|
|
try:
|
2020-11-04 02:04:20 +00:00
|
|
|
logging.info(f"Local Scraper: {url}")
|
2020-11-04 02:00:58 +00:00
|
|
|
details = get_details(url)
|
2020-11-04 02:14:51 +00:00
|
|
|
if not details:
|
|
|
|
return ''
|
2020-11-04 02:00:58 +00:00
|
|
|
return details['content']
|
|
|
|
except:
|
|
|
|
raise
|
|
|
|
|
|
|
|
def get_details(url):
|
|
|
|
try:
|
2020-11-04 02:14:51 +00:00
|
|
|
r = requests.post(READ_API, data=dict(url=url), timeout=TIMEOUT)
|
2020-11-04 02:00:58 +00:00
|
|
|
if r.status_code != 200:
|
|
|
|
raise Exception('Bad response code ' + str(r.status_code))
|
|
|
|
return r.json()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
|
|
|
except BaseException as e:
|
|
|
|
logging.error('Problem getting article: {}'.format(str(e)))
|
2020-11-04 02:14:51 +00:00
|
|
|
return None
|