forked from tanner/qotnews
basically add declutter like capabilities.
This commit is contained in:
44
readerserver/scraper/browser/_browser.js
Normal file
44
readerserver/scraper/browser/_browser.js
Normal file
@@ -0,0 +1,44 @@
|
||||
const { firefox } = require("playwright");
|
||||
const { JSDOM } = require("jsdom");
|
||||
const { Readability } = require("@mozilla/readability");
|
||||
|
||||
const { getUserAgent } = require('../../utils/user-agent');
|
||||
|
||||
module.exports.getDetails = async (url) => {
|
||||
const { userAgent, headers } = getUserAgent(url);
|
||||
|
||||
const browser = await firefox.launch({ args: [], headless: true });
|
||||
const tab = await browser.newPage({
|
||||
extraHTTPHeaders: headers,
|
||||
userAgent,
|
||||
viewport: { width: 2000, height: 10000 },
|
||||
});
|
||||
|
||||
try {
|
||||
await tab.route(/.*/, (route) => {
|
||||
const routeUrl = route.request().url();
|
||||
const blockedDomains = Object.keys(blockedRegexes);
|
||||
const domain = matchUrlDomain(blockedDomains, routeUrl);
|
||||
if (domain && routeUrl.match(blockedRegexes[domain])) {
|
||||
return route.abort();
|
||||
}
|
||||
return route.continue();
|
||||
});
|
||||
await tab.addInitScript({ path: "scraper/browser/scripts/bypass-paywalls-chrome/src/js/contentScript.js" });
|
||||
await tab.addInitScript({ path: "scraper/browser/scripts/cosmetic-filters.js" });
|
||||
await tab.addInitScript({ path: "scraper/browser/scripts/fix-relative-links.js" });
|
||||
await tab.goto(url, { timeout: 60000, waitUntil: "domcontentloaded" });
|
||||
await tab.waitForTimeout(2000);
|
||||
|
||||
const body = await tab.content();
|
||||
const doc = new JSDOM(body, { url });
|
||||
const reader = new Readability(doc.window.document);
|
||||
const article = reader.parse();
|
||||
return article;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
} finally {
|
||||
await tab.close();
|
||||
await browser.close();
|
||||
}
|
||||
};
|
34
readerserver/scraper/browser/_comments.js
Normal file
34
readerserver/scraper/browser/_comments.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const { JSDOM } = require("jsdom");
|
||||
const { firefox } = require("playwright");
|
||||
const { getUserAgent } = require('../../utils/user-agent');
|
||||
const { disqusThread } = require('../../utils/disqus-thread');
|
||||
|
||||
const DISQUS_EMBED = 'https://disqus.com/embed/comments/';
|
||||
|
||||
module.exports.getComments = async (url) => {
|
||||
const { userAgent, headers } = getUserAgent(url);
|
||||
|
||||
const browser = await firefox.launch({ args: [], headless: true });
|
||||
const tab = await browser.newPage({
|
||||
extraHTTPHeaders: headers,
|
||||
userAgent,
|
||||
viewport: { width: 2000, height: 10000 },
|
||||
});
|
||||
|
||||
try {
|
||||
await tab.goto(url, { timeout: 60000, waitUntil: "domcontentloaded" });
|
||||
|
||||
const response = await tab.waitForResponse(response => response.url().includes(DISQUS_EMBED));
|
||||
const text = await response.text();
|
||||
const dom = new JSDOM(text, response.url());
|
||||
const script = dom.window.document.querySelector('#disqus-threadData')
|
||||
const data = JSON.parse(script.innerHTML);
|
||||
|
||||
return disqusThread(data);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
} finally {
|
||||
await tab.close();
|
||||
await browser.close();
|
||||
}
|
||||
};
|
40
readerserver/scraper/browser/index.js
Normal file
40
readerserver/scraper/browser/index.js
Normal file
@@ -0,0 +1,40 @@
|
||||
const { getDetails } = require('./_browser');
|
||||
const { getComments } = require('./_comments');
|
||||
|
||||
module.exports.scrape = async (req, res) => {
|
||||
try {
|
||||
const article = await getDetails(req.body.url);
|
||||
if (!article || !article.content) {
|
||||
throw new Error('failed to get details.');
|
||||
}
|
||||
return res.send(article.content);
|
||||
} catch (e) {
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.details = async (req, res) => {
|
||||
try {
|
||||
const article = await getDetails(req.body.url);
|
||||
if (!article) {
|
||||
throw new Error('failed to get details.');
|
||||
}
|
||||
return res.send(article);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.comments = async (req, res) => {
|
||||
try {
|
||||
const comments = await getComments(req.body.url);
|
||||
if (!comments) {
|
||||
throw new Error('failed to get comments.');
|
||||
}
|
||||
return res.send(comments);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
};
|
Submodule readerserver/scraper/browser/scripts/bypass-paywalls-chrome added at 44f3d1b114
96
readerserver/scraper/browser/scripts/cosmetic-filters.js
Normal file
96
readerserver/scraper/browser/scripts/cosmetic-filters.js
Normal file
@@ -0,0 +1,96 @@
|
||||
(function () {
|
||||
removeHiddenElements();
|
||||
|
||||
if (matchDomain("stuff.co.nz")) {
|
||||
removeSelectors([
|
||||
".support-brief-container",
|
||||
'[class*="donation-in-"]',
|
||||
".sics-component__sharebar",
|
||||
".breaking-news-pointer",
|
||||
".bigbyline-container",
|
||||
[
|
||||
".sics-component__html-injector.sics-component__story__paragraph",
|
||||
"READ MORE:",
|
||||
],
|
||||
]);
|
||||
}
|
||||
if (matchDomain("nzherald.co.nz")) {
|
||||
removeSelectors([
|
||||
"[href$='#commenting-widget']",
|
||||
".related-articles",
|
||||
".article__print-button",
|
||||
".share-bar",
|
||||
".c-suggest-links.read-more-links",
|
||||
".website-of-year",
|
||||
".meta-data",
|
||||
".article__kicker",
|
||||
".author__image",
|
||||
]);
|
||||
}
|
||||
if (matchDomain(["rnz.co.nz", "radionz.co.nz"])) {
|
||||
removeSelectors([".c-advert-app", ".c-sub-nav"]);
|
||||
}
|
||||
if (matchDomain(["newsroom.co.nz"])) {
|
||||
removeSelectors([".article_content__section", ".bio"]);
|
||||
}
|
||||
if (matchDomain(["newshub.co.nz"])) {
|
||||
removeSelectors([".c-ArticleHeading-authorPicture", ".relatedarticles"]);
|
||||
}
|
||||
if (matchDomain(["tvnz.co.nz"])) {
|
||||
removeSelectors([".signup-container container"]);
|
||||
}
|
||||
|
||||
function matchDomain(domains) {
|
||||
const hostname = window.location.hostname;
|
||||
if (typeof domains === "string") {
|
||||
domains = [domains];
|
||||
}
|
||||
return domains.some(
|
||||
(domain) => hostname === domain || hostname.endsWith("." + domain)
|
||||
);
|
||||
}
|
||||
|
||||
function removeDOMElement(...elements) {
|
||||
for (const element of elements) {
|
||||
if (element) {
|
||||
element.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pageContains(selector, text) {
|
||||
const elements = document.querySelectorAll(selector);
|
||||
return Array.prototype.filter.call(elements, function (element) {
|
||||
return RegExp(text).test(element.textContent);
|
||||
});
|
||||
}
|
||||
|
||||
function removeHiddenElements() {
|
||||
window.setTimeout(function () {
|
||||
const selector = "*:not(script):not(head):not(meta):not(link):not(style)";
|
||||
Array.from(document.querySelectorAll(selector))
|
||||
.filter((element) => {
|
||||
const computed = getComputedStyle(element);
|
||||
const displayNone = computed["display"] === "none";
|
||||
const visibilityHidden = computed["visibility"] === "hidden";
|
||||
return displayNone || visibilityHidden;
|
||||
})
|
||||
.forEach((element) => element && element.remove());
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
function removeSelectors(selectors) {
|
||||
window.setTimeout(function () {
|
||||
const elements = selectors.flatMap((s) => {
|
||||
if (typeof s === "string") {
|
||||
return Array.from(document.querySelectorAll(s));
|
||||
}
|
||||
if (s && s.constructor.name === "Array") {
|
||||
return pageContains(...s);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
removeDOMElement(...elements);
|
||||
}, 1000);
|
||||
}
|
||||
})();
|
14
readerserver/scraper/browser/scripts/fix-relative-links.js
Normal file
14
readerserver/scraper/browser/scripts/fix-relative-links.js
Normal file
@@ -0,0 +1,14 @@
|
||||
(function () {
|
||||
const { host, protocol } = window.location;
|
||||
const url = `${protocol}//${host}`;
|
||||
[
|
||||
['[src^="/"]', 'src'],
|
||||
['[href^="/"]', 'href']
|
||||
].forEach(([selector, attribute]) => {
|
||||
Array.from(document.querySelectorAll(selector))
|
||||
.filter(e => e.attributes[attribute] && /^\/[^\/]/.test(e.attributes[attribute].value))
|
||||
.forEach((e) => {
|
||||
e.attributes[attribute].value = `${url}${e.attributes[attribute].value}`;
|
||||
});
|
||||
});
|
||||
})();
|
@@ -1,13 +1,8 @@
|
||||
const request = require('request');
|
||||
const JSDOM = require('jsdom').JSDOM;
|
||||
const { Readability } = require('readability');
|
||||
const fetch = require('node-fetch');
|
||||
const { JSDOM } = require('jsdom');
|
||||
const { Readability } = require('@mozilla/readability');
|
||||
|
||||
const { headers } = require('../constants');
|
||||
|
||||
const options = url => ({
|
||||
url,
|
||||
headers,
|
||||
});
|
||||
const { getUserAgent } = require('../utils/user-agent');
|
||||
|
||||
const extract = (url, body) => {
|
||||
const doc = new JSDOM(body, { url: url });
|
||||
@@ -15,27 +10,50 @@ const extract = (url, body) => {
|
||||
return reader.parse();
|
||||
};
|
||||
|
||||
module.exports.scrape = async (req, res) => {
|
||||
try {
|
||||
const { userAgent, headers } = getUserAgent(req.body.url);
|
||||
const response = await fetch(req.body.url, {
|
||||
headers: {
|
||||
...headers,
|
||||
'User-Agent': userAgent
|
||||
}
|
||||
});
|
||||
if (!response.ok) {
|
||||
return res.sendStatus(response.statusCode);
|
||||
}
|
||||
const html = await response.text();
|
||||
const article = await extract(req.body.url, html);
|
||||
if (article && article.content) {
|
||||
return res.send(article.content);
|
||||
}
|
||||
return res.sendStatus(404);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports.scrape = (req, res) => request(options(req.body.url), (error, response, body) => {
|
||||
if (error || response.statusCode != 200) {
|
||||
console.log('Response error:', error ? error.toString() : response.statusCode);
|
||||
return res.sendStatus(response ? response.statusCode : 404);
|
||||
module.exports.details = async (req, res) => {
|
||||
try {
|
||||
const { userAgent, headers } = getUserAgent(req.body.url);
|
||||
const response = await fetch(req.body.url, {
|
||||
headers: {
|
||||
...headers,
|
||||
'User-Agent': userAgent
|
||||
}
|
||||
});
|
||||
if (!response.ok) {
|
||||
return res.sendStatus(response.statusCode);
|
||||
}
|
||||
const html = await response.text();
|
||||
const article = await extract(req.body.url, html);
|
||||
if (article) {
|
||||
return res.send(article);
|
||||
}
|
||||
return res.sendStatus(404);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
const article = extract(req.body.url, body);
|
||||
if (article && article.content) {
|
||||
return res.send(article.content);
|
||||
}
|
||||
return res.sendStatus(404);
|
||||
});
|
||||
|
||||
module.exports.details = (req, res) => request(options(req.body.url), (error, response, body) => {
|
||||
if (error || response.statusCode != 200) {
|
||||
console.log('Response error:', error ? error.toString() : response.statusCode);
|
||||
return res.sendStatus(response ? response.statusCode : 404);
|
||||
}
|
||||
const article = extract(req.body.url, body);
|
||||
if (article) {
|
||||
return res.send(article);
|
||||
}
|
||||
return res.sendStatus(404);
|
||||
});
|
||||
};
|
Reference in New Issue
Block a user