forked from tanner/qotnews
Compare commits
33 Commits
9f4ff4acf0
...
bfa4108a8e
Author | SHA1 | Date |
---|---|---|
Jason Schwarzenberger | bfa4108a8e | 4 years ago |
Jason Schwarzenberger | 0bd0d40a31 | 4 years ago |
Jason Schwarzenberger | 4e04595415 | 4 years ago |
Jason | 006db2960c | 4 years ago |
Jason Schwarzenberger | 1f063f0dac | 4 years ago |
Jason Schwarzenberger | 1658346aa9 | 4 years ago |
Jason Schwarzenberger | 2dbc702b40 | 4 years ago |
Jason Schwarzenberger | 1c4764e67d | 4 years ago |
Jason | ee49d2021e | 4 years ago |
Jason | c391c50ab1 | 4 years ago |
Jason Schwarzenberger | 095f0d549a | 4 years ago |
Jason Schwarzenberger | c21c71667e | 4 years ago |
Jason Schwarzenberger | c3a2c91a11 | 4 years ago |
Jason Schwarzenberger | 0f39446a61 | 4 years ago |
Jason Schwarzenberger | 351059aab1 | 4 years ago |
Jason Schwarzenberger | 4488e2c292 | 4 years ago |
Jason Schwarzenberger | afda5b635c | 4 years ago |
Jason Schwarzenberger | 0fc1a44d2b | 4 years ago |
Jason Schwarzenberger | 9fff1b9e46 | 4 years ago |
Jason Schwarzenberger | 16b59f6c67 | 4 years ago |
Jason Schwarzenberger | 939f4775a7 | 4 years ago |
Jason Schwarzenberger | 9bfc6fc6fa | 4 years ago |
Jason Schwarzenberger | 6ea9844d00 | 4 years ago |
Jason Schwarzenberger | 1318259d3d | 4 years ago |
Jason Schwarzenberger | 98a0c2257c | 4 years ago |
Jason Schwarzenberger | e6976db25d | 4 years ago |
Jason Schwarzenberger | 9edc8b7cca | 4 years ago |
Jason Schwarzenberger | 33e21e7f30 | 4 years ago |
Jason Schwarzenberger | 892a99eca6 | 4 years ago |
Jason Schwarzenberger | d718d05a04 | 4 years ago |
Jason Schwarzenberger | d1795eb1b8 | 4 years ago |
Tanner Collin | 9a279d44b1 | 4 years ago |
Tanner Collin | e506804666 | 4 years ago |
17 changed files with 377 additions and 217 deletions
@ -0,0 +1,41 @@ |
|||||||
|
import logging |
||||||
|
logging.basicConfig( |
||||||
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', |
||||||
|
level=logging.DEBUG) |
||||||
|
import requests |
||||||
|
|
||||||
|
DECLUTTER_API = 'https://declutter.1j.nz/details' |
||||||
|
DECLUTTER_COMMENT_API = 'https://declutter.1j.nz/comments' |
||||||
|
TIMEOUT = 30 |
||||||
|
|
||||||
|
|
||||||
|
def get_html(url): |
||||||
|
logging.info(f"Declutter Scraper: {url}") |
||||||
|
details = get_details(url) |
||||||
|
if not details: |
||||||
|
return '' |
||||||
|
return details['content'] |
||||||
|
|
||||||
|
def get_details(url): |
||||||
|
try: |
||||||
|
r = requests.post(DECLUTTER_API, data=dict(url=url), timeout=TIMEOUT) |
||||||
|
if r.status_code != 200: |
||||||
|
raise Exception('Bad response code ' + str(r.status_code)) |
||||||
|
return r.json() |
||||||
|
except KeyboardInterrupt: |
||||||
|
raise |
||||||
|
except BaseException as e: |
||||||
|
logging.error('Problem decluttering article: {}'.format(str(e))) |
||||||
|
return None |
||||||
|
|
||||||
|
def get_comments(url): |
||||||
|
try: |
||||||
|
r = requests.post(DECLUTTER_COMMENT_API, data=dict(url=url), timeout=TIMEOUT) |
||||||
|
if r.status_code != 200: |
||||||
|
raise Exception('Bad response code ' + str(r.status_code)) |
||||||
|
return r.json() |
||||||
|
except KeyboardInterrupt: |
||||||
|
raise |
||||||
|
except BaseException as e: |
||||||
|
logging.error('Problem getting comments for article: {}'.format(str(e))) |
||||||
|
return None |
@ -0,0 +1,27 @@ |
|||||||
|
import logging |
||||||
|
logging.basicConfig( |
||||||
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', |
||||||
|
level=logging.DEBUG) |
||||||
|
import requests |
||||||
|
|
||||||
|
READ_API = 'http://127.0.0.1:33843/details' |
||||||
|
TIMEOUT = 20 |
||||||
|
|
||||||
|
def get_html(url): |
||||||
|
logging.info(f"Local Scraper: {url}") |
||||||
|
details = get_details(url) |
||||||
|
if not details: |
||||||
|
return '' |
||||||
|
return details['content'] |
||||||
|
|
||||||
|
def get_details(url): |
||||||
|
try: |
||||||
|
r = requests.post(READ_API, data=dict(url=url), timeout=TIMEOUT) |
||||||
|
if r.status_code != 200: |
||||||
|
raise Exception('Bad response code ' + str(r.status_code)) |
||||||
|
return r.json() |
||||||
|
except KeyboardInterrupt: |
||||||
|
raise |
||||||
|
except BaseException as e: |
||||||
|
logging.error('Problem getting article: {}'.format(str(e))) |
||||||
|
return None |
@ -0,0 +1,37 @@ |
|||||||
|
import logging |
||||||
|
logging.basicConfig( |
||||||
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', |
||||||
|
level=logging.DEBUG) |
||||||
|
import requests |
||||||
|
|
||||||
|
OUTLINE_REFERER = 'https://outline.com/' |
||||||
|
OUTLINE_API = 'https://api.outline.com/v3/parse_article' |
||||||
|
TIMEOUT = 20 |
||||||
|
|
||||||
|
def get_html(url): |
||||||
|
details = get_details(url) |
||||||
|
if not details: |
||||||
|
return '' |
||||||
|
return details['html'] |
||||||
|
|
||||||
|
def get_details(url): |
||||||
|
try: |
||||||
|
logging.info(f"Outline Scraper: {url}") |
||||||
|
params = {'source_url': url} |
||||||
|
headers = {'Referer': OUTLINE_REFERER} |
||||||
|
r = requests.get(OUTLINE_API, params=params, headers=headers, timeout=TIMEOUT) |
||||||
|
if r.status_code == 429: |
||||||
|
logging.info('Rate limited by outline, sleeping 30s and skipping...') |
||||||
|
time.sleep(30) |
||||||
|
return None |
||||||
|
if r.status_code != 200: |
||||||
|
raise Exception('Bad response code ' + str(r.status_code)) |
||||||
|
data = r.json()['data'] |
||||||
|
if 'URL is not supported by Outline' in data['html']: |
||||||
|
raise Exception('URL not supported by Outline') |
||||||
|
return data |
||||||
|
except KeyboardInterrupt: |
||||||
|
raise |
||||||
|
except BaseException as e: |
||||||
|
logging.error('Problem outlining article: {}'.format(str(e))) |
||||||
|
return None |
@ -0,0 +1,43 @@ |
|||||||
|
const request = require('request'); |
||||||
|
const JSDOM = require('jsdom').JSDOM; |
||||||
|
const { Readability } = require('readability'); |
||||||
|
|
||||||
|
const options = url => ({ |
||||||
|
url: url, |
||||||
|
headers: { |
||||||
|
'User-Agent': 'Googlebot/2.1 (+http://www.google.com/bot.html)', |
||||||
|
'X-Forwarded-For': '66.249.66.1', |
||||||
|
}, |
||||||
|
}); |
||||||
|
|
||||||
|
const extract = (url, body) => { |
||||||
|
const doc = new JSDOM(body, { url: url }); |
||||||
|
const reader = new Readability(doc.window.document); |
||||||
|
return reader.parse(); |
||||||
|
}; |
||||||
|
|
||||||
|
|
||||||
|
module.exports.FORM = '<form method="POST" action="/" accept-charset="UTF-8"><input name="url"><button type="submit">SUBMIT</button></form>'; |
||||||
|
module.exports.scrape = (req, res) => request(options(req.body.url), (error, response, body) => { |
||||||
|
if (error || response.statusCode != 200) { |
||||||
|
console.log('Response error:', error ? error.toString() : response.statusCode); |
||||||
|
return res.sendStatus(response ? response.statusCode : 404); |
||||||
|
} |
||||||
|
const article = extract(url, body); |
||||||
|
if (article && article.content) { |
||||||
|
return res.send(article.content); |
||||||
|
} |
||||||
|
return res.sendStatus(404); |
||||||
|
}); |
||||||
|
|
||||||
|
module.exports.details = (req, res) => request(options(req.body.url), (error, response, body) => { |
||||||
|
if (error || response.statusCode != 200) { |
||||||
|
console.log('Response error:', error ? error.toString() : response.statusCode); |
||||||
|
return res.sendStatus(response ? response.statusCode : 404); |
||||||
|
} |
||||||
|
const article = extract(url, body); |
||||||
|
if (article) { |
||||||
|
return res.send(article); |
||||||
|
} |
||||||
|
return res.sendStatus(404); |
||||||
|
}); |
File diff suppressed because one or more lines are too long
Loading…
Reference in new issue