Compare commits
161 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
a6e1644ddf | ||
|
f56fb68871 | ||
|
add7c16bff | ||
|
f3cc27033f | ||
|
7a20b6ce44 | ||
|
78d49b6f3a | ||
|
3da344c463 | ||
|
5c3b802315 | ||
|
66a4953b83 | ||
|
4e5dc65461 | ||
|
ce9694b70c | ||
|
c99829f052 | ||
|
6459d07ce5 | ||
|
33a25fa34e | ||
|
8727be6d86 | ||
|
5169f5ad27 | ||
|
da7f6330bf | ||
|
fdb4494cd8 | ||
|
04a9890ac9 | ||
|
0676f754f6 | ||
|
2a2bf4d671 | ||
|
d4260feb72 | ||
|
afe3e08055 | ||
|
58f4e18404 | ||
|
ead1774191 | ||
|
d1c513b9d6 | ||
|
cee104ea06 | ||
|
8c40124e07 | ||
|
f524ecec7b | ||
|
888c341296 | ||
|
33c622216c | ||
|
a606f4e0cd | ||
|
e53c5fc904 | ||
|
59c6f17e67 | ||
|
32f1455bbb | ||
|
7f46646b9a | ||
|
daa49ede7e | ||
|
8115d86335 | ||
|
60e34935ee | ||
|
f670479bd7 | ||
|
3e78765952 | ||
|
5273c6d3fe | ||
|
35e47c4049 | ||
|
f9fdac0992 | ||
|
deeb1d4649 | ||
|
da62f8859b | ||
|
fe4b02e8a1 | ||
|
085dd47d13 | ||
|
8bb2d174bf | ||
|
72e2232469 | ||
|
247715a76e | ||
|
5c96092a57 | ||
|
2439c113b3 | ||
|
0f5e28136d | ||
|
bb1413b586 | ||
|
0a27c0da1f | ||
|
fe01ea52e5 | ||
|
3daae5fa1b | ||
|
25caee17d6 | ||
|
c1b6349771 | ||
|
54a4c7e55a | ||
|
b12a3570b0 | ||
|
0bfa920654 | ||
|
9341b4d966 | ||
|
a2e5faa3b5 | ||
|
a86eb98c1a | ||
|
abf7f0a802 | ||
|
d288546d6f | ||
|
cc130942ca | ||
|
f0b14408d4 | ||
|
e1830a589b | ||
|
32bc3b906b | ||
|
f5e65632b8 | ||
|
1fe524207e | ||
|
dc3d17b171 | ||
|
539350a83d | ||
|
2f730c1f52 | ||
|
e0960d59f3 | ||
|
f5b38f5c6b | ||
|
c9da2a078b | ||
|
78654e0c63 | ||
|
3b885e4327 | ||
|
55d50a86d8 | ||
|
55e7f6bb14 | ||
|
5668fa5dbc | ||
|
b771b52501 | ||
|
f5c7a658ba | ||
|
f5ccd844da | ||
|
6a91b9402f | ||
|
b80c1a5cb5 | ||
|
b23e470317 | ||
|
7420b5ece9 | ||
|
64ced635cc | ||
|
9318627f1b | ||
|
3d0a3f1577 | ||
|
587b10c438 | ||
|
00954c6cac | ||
|
637bc38476 | ||
|
164b7e72c4 | ||
|
3169af3002 | ||
|
d588a60930 | ||
|
408e2870b2 | ||
|
44b8b36547 | ||
|
4f49684194 | ||
|
1d78b1c592 | ||
|
0374794536 | ||
|
943a1cfa4f | ||
|
9cee370a25 | ||
|
5efc6ef2d3 | ||
|
4ec50e20cb | ||
|
c1b7877f4b | ||
|
7b8cbfc9b9 | ||
|
bfa4108a8e | ||
|
0bd0d40a31 | ||
|
4e04595415 | ||
|
006db2960c | ||
|
1f063f0dac | ||
|
1658346aa9 | ||
|
2dbc702b40 | ||
|
1c4764e67d | ||
|
ee49d2021e | ||
|
c391c50ab1 | ||
|
095f0d549a | ||
|
c21c71667e | ||
|
c3a2c91a11 | ||
|
0f39446a61 | ||
|
351059aab1 | ||
|
4488e2c292 | ||
|
afda5b635c | ||
|
0fc1a44d2b | ||
|
9fff1b9e46 | ||
|
16b59f6c67 | ||
|
939f4775a7 | ||
|
9bfc6fc6fa | ||
|
6ea9844d00 | ||
|
1318259d3d | ||
|
98a0c2257c | ||
|
e6976db25d | ||
|
9edc8b7cca | ||
|
33e21e7f30 | ||
|
892a99eca6 | ||
|
d718d05a04 | ||
|
d1795eb1b8 | ||
|
9f4ff4acf0 | ||
|
db6aad84ec | ||
|
29f8a8b8cc | ||
|
abf8589e02 | ||
|
b759f46582 | ||
|
736cdc8576 | ||
|
244d416f6e | ||
|
5f98a2e76a | ||
|
0567cdfd9b | ||
|
4f90671cec | ||
|
e63a1456a5 | ||
|
76f1d57702 | ||
|
de80389ed0 | ||
|
4e64cf682a | ||
|
c5fe5d25a0 | ||
|
283a2b1545 | ||
|
0d6a86ace2 | ||
|
f23bf628e0 |
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
[submodule "readerserver"]
|
||||
path = readerserver
|
||||
url = https://github.com/master5o1/declutter.git
|
20
README.md
|
@ -20,7 +20,7 @@ $ sudo apt install yarn
|
|||
Clone this repo:
|
||||
|
||||
```text
|
||||
$ git clone https://gogs.tannercollin.com/tanner/qotnews.git
|
||||
$ git clone --recurse-submodules https://git.1j.nz/jason/qotnews.git
|
||||
$ cd qotnews
|
||||
```
|
||||
|
||||
|
@ -37,14 +37,14 @@ $ source env/bin/activate
|
|||
|
||||
Configure Praw for your Reddit account (optional):
|
||||
|
||||
* Go to https://www.reddit.com/prefs/apps
|
||||
* Click "Create app"
|
||||
* Name: whatever
|
||||
* App type: script
|
||||
* Description: blank
|
||||
* About URL: blank
|
||||
* Redirect URL: your GitHub profile
|
||||
* Submit, copy the client ID and client secret into `settings.py` below
|
||||
- Go to https://www.reddit.com/prefs/apps
|
||||
- Click "Create app"
|
||||
- Name: whatever
|
||||
- App type: script
|
||||
- Description: blank
|
||||
- About URL: blank
|
||||
- Redirect URL: your GitHub profile
|
||||
- Submit, copy the client ID and client secret into `settings.py` below
|
||||
|
||||
```text
|
||||
(env) $ vim settings.py.example
|
||||
|
@ -109,7 +109,7 @@ stdout_logfile_maxbytes=1MB
|
|||
[program:qotnewsreader]
|
||||
user=qotnews
|
||||
directory=/home/qotnews/qotnews/readerserver
|
||||
command=node main.js
|
||||
command=node index.js
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/var/log/qotnewsreader.log
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import json
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy import create_engine, Column, String, ForeignKey, Integer
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.types import JSON
|
||||
|
||||
engine = create_engine('sqlite:///data/qotnews.sqlite', connect_args={'timeout': 120})
|
||||
Session = sessionmaker(bind=engine)
|
||||
|
@ -15,8 +15,8 @@ class Story(Base):
|
|||
|
||||
sid = Column(String(16), primary_key=True)
|
||||
ref = Column(String(16), unique=True)
|
||||
meta_json = Column(String)
|
||||
full_json = Column(String)
|
||||
meta = Column(JSON)
|
||||
data = Column(JSON)
|
||||
title = Column(String)
|
||||
|
||||
class Reflist(Base):
|
||||
|
@ -24,6 +24,7 @@ class Reflist(Base):
|
|||
|
||||
rid = Column(Integer, primary_key=True)
|
||||
ref = Column(String(16), unique=True)
|
||||
urlref = Column(String)
|
||||
sid = Column(String, ForeignKey('stories.sid'), unique=True)
|
||||
source = Column(String(16))
|
||||
|
||||
|
@ -36,19 +37,21 @@ def get_story(sid):
|
|||
|
||||
def put_story(story):
|
||||
story = story.copy()
|
||||
full_json = json.dumps(story)
|
||||
data = {}
|
||||
data.update(story)
|
||||
|
||||
story.pop('text', None)
|
||||
story.pop('comments', None)
|
||||
meta_json = json.dumps(story)
|
||||
meta = {}
|
||||
meta.update(story)
|
||||
meta.pop('text', None)
|
||||
meta.pop('comments', None)
|
||||
|
||||
try:
|
||||
session = Session()
|
||||
s = Story(
|
||||
sid=story['id'],
|
||||
ref=story['ref'],
|
||||
full_json=full_json,
|
||||
meta_json=meta_json,
|
||||
data=data,
|
||||
meta=meta,
|
||||
title=story.get('title', None),
|
||||
)
|
||||
session.merge(s)
|
||||
|
@ -63,24 +66,41 @@ def get_story_by_ref(ref):
|
|||
session = Session()
|
||||
return session.query(Story).filter(Story.ref==ref).first()
|
||||
|
||||
def get_reflist(amount):
|
||||
def get_stories_by_url(url):
|
||||
session = Session()
|
||||
q = session.query(Reflist).order_by(Reflist.rid.desc()).limit(amount)
|
||||
return [dict(ref=x.ref, sid=x.sid, source=x.source) for x in q.all()]
|
||||
return session.query(Story).\
|
||||
filter(Story.title != None).\
|
||||
filter(Story.meta['url'].as_string() == url).\
|
||||
order_by(Story.meta['date'].desc())
|
||||
|
||||
def get_stories(amount):
|
||||
def get_ref_by_sid(sid):
|
||||
session = Session()
|
||||
q = session.query(Reflist, Story.meta_json).\
|
||||
order_by(Reflist.rid.desc()).\
|
||||
x = session.query(Reflist).\
|
||||
filter(Reflist.sid == sid).\
|
||||
first()
|
||||
return dict(ref=x.ref, sid=x.sid, source=x.source, urlref=x.urlref)
|
||||
|
||||
def get_reflist():
|
||||
session = Session()
|
||||
q = session.query(Reflist).order_by(Reflist.rid.desc())
|
||||
return [dict(ref=x.ref, sid=x.sid, source=x.source, urlref=x.urlref) for x in q.all()]
|
||||
|
||||
def get_stories(maxage=0, skip=0, limit=20):
|
||||
time = datetime.now().timestamp() - maxage
|
||||
session = Session()
|
||||
q = session.query(Reflist, Story.meta).\
|
||||
join(Story).\
|
||||
filter(Story.title != None).\
|
||||
limit(amount)
|
||||
filter(maxage == 0 or Story.meta['date'].as_integer() > time).\
|
||||
order_by(Story.meta['date'].desc()).\
|
||||
offset(skip).\
|
||||
limit(limit)
|
||||
return [x[1] for x in q]
|
||||
|
||||
def put_ref(ref, sid, source):
|
||||
def put_ref(ref, sid, source, urlref):
|
||||
try:
|
||||
session = Session()
|
||||
r = Reflist(ref=ref, sid=sid, source=source)
|
||||
r = Reflist(ref=ref, sid=sid, source=source, urlref=urlref)
|
||||
session.add(r)
|
||||
session.commit()
|
||||
except:
|
||||
|
|
|
@ -6,64 +6,88 @@ logging.basicConfig(
|
|||
import requests
|
||||
import time
|
||||
from bs4 import BeautifulSoup
|
||||
import itertools
|
||||
|
||||
import settings
|
||||
from feeds import hackernews, reddit, tildes, manual, lobsters
|
||||
|
||||
OUTLINE_API = 'https://api.outline.com/v3/parse_article'
|
||||
READ_API = 'http://127.0.0.1:33843'
|
||||
from feeds import hackernews, reddit, tildes, substack, manual, lobsters
|
||||
from feeds.sitemap import Sitemap
|
||||
from feeds.category import Category
|
||||
from scrapers import outline
|
||||
from scrapers.declutter import declutter, headless, simple
|
||||
|
||||
INVALID_DOMAINS = ['youtube.com', 'bloomberg.com', 'wsj.com', 'sec.gov']
|
||||
TWO_DAYS = 60*60*24*2
|
||||
|
||||
def list():
|
||||
feed = []
|
||||
substacks = {}
|
||||
for key, value in settings.SUBSTACK.items():
|
||||
substacks[key] = substack.Publication(value['url'])
|
||||
categories = {}
|
||||
for key, value in settings.CATEGORY.items():
|
||||
categories[key] = Category(value)
|
||||
sitemaps = {}
|
||||
for key, value in settings.SITEMAP.items():
|
||||
sitemaps[key] = Sitemap(value)
|
||||
|
||||
def get_list():
|
||||
feeds = {}
|
||||
|
||||
if settings.NUM_HACKERNEWS:
|
||||
feed += [(x, 'hackernews') for x in hackernews.feed()[:settings.NUM_HACKERNEWS]]
|
||||
feeds['hackernews'] = [(x, 'hackernews', x) for x in hackernews.feed()[:settings.NUM_HACKERNEWS]]
|
||||
|
||||
if settings.NUM_LOBSTERS:
|
||||
feed += [(x, 'lobsters') for x in lobsters.feed()[:settings.NUM_LOBSTERS]]
|
||||
feed += [(x, 'lobsters', x) for x in lobsters.feed()[:settings.NUM_LOBSTERS]]
|
||||
|
||||
if settings.NUM_REDDIT:
|
||||
feed += [(x, 'reddit') for x in reddit.feed()[:settings.NUM_REDDIT]]
|
||||
feeds['reddit'] = [(x, 'reddit', x) for x in reddit.feed()[:settings.NUM_REDDIT]]
|
||||
|
||||
if settings.NUM_TILDES:
|
||||
feed += [(x, 'tildes') for x in tildes.feed()[:settings.NUM_TILDES]]
|
||||
feeds['tildes'] = [(x, 'tildes', x) for x in tildes.feed()[:settings.NUM_TILDES]]
|
||||
|
||||
if settings.NUM_SUBSTACK:
|
||||
feeds['substack'] = [(x, 'substack', x) for x in substack.top.feed()[:settings.NUM_SUBSTACK]]
|
||||
|
||||
for key, publication in substacks.items():
|
||||
count = settings.SUBSTACK[key]['count']
|
||||
feeds[key] = [(x, key, x) for x in publication.feed()[:count]]
|
||||
|
||||
for key, sites in categories.items():
|
||||
count = settings.CATEGORY[key].get('count') or 0
|
||||
excludes = settings.CATEGORY[key].get('excludes')
|
||||
tz = settings.CATEGORY[key].get('tz')
|
||||
feeds[key] = [(x, key, u) for x, u in sites.feed(excludes)[:count]]
|
||||
|
||||
for key, sites in sitemaps.items():
|
||||
count = settings.SITEMAP[key].get('count') or 0
|
||||
excludes = settings.SITEMAP[key].get('excludes')
|
||||
feeds[key] = [(x, key, u) for x, u in sites.feed(excludes)[:count]]
|
||||
|
||||
values = feeds.values()
|
||||
feed = itertools.chain.from_iterable(itertools.zip_longest(*values, fillvalue=None))
|
||||
feed = list(filter(None, feed))
|
||||
return feed
|
||||
|
||||
def get_article(url):
|
||||
try:
|
||||
params = {'source_url': url}
|
||||
headers = {'Referer': 'https://outline.com/'}
|
||||
r = requests.get(OUTLINE_API, params=params, headers=headers, timeout=20)
|
||||
if r.status_code == 429:
|
||||
logging.info('Rate limited by outline, sleeping 30s and skipping...')
|
||||
time.sleep(30)
|
||||
return ''
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
html = r.json()['data']['html']
|
||||
if 'URL is not supported by Outline' in html:
|
||||
raise Exception('URL not supported by Outline')
|
||||
return html
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logging.error('Problem outlining article: {}'.format(str(e)))
|
||||
scrapers = {
|
||||
'headless': headless,
|
||||
'simple': simple,
|
||||
'outline': outline,
|
||||
'declutter': declutter,
|
||||
}
|
||||
available = settings.SCRAPERS or ['headless', 'simple']
|
||||
if 'simple' not in available:
|
||||
available += ['simple']
|
||||
|
||||
logging.info('Trying our server instead...')
|
||||
|
||||
try:
|
||||
r = requests.post(READ_API, data=dict(url=url), timeout=20)
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
return r.text
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logging.error('Problem getting article: {}'.format(str(e)))
|
||||
return ''
|
||||
for scraper in available:
|
||||
if scraper not in scrapers.keys():
|
||||
continue
|
||||
try:
|
||||
details = scrapers[scraper].get_details(url)
|
||||
if details and details.get('content'):
|
||||
return details, scraper
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
pass
|
||||
return None, None
|
||||
|
||||
def get_content_type(url):
|
||||
try:
|
||||
|
@ -81,7 +105,7 @@ def get_content_type(url):
|
|||
except:
|
||||
return ''
|
||||
|
||||
def update_story(story, is_manual=False):
|
||||
def update_story(story, is_manual=False, urlref=None):
|
||||
res = {}
|
||||
|
||||
if story['source'] == 'hackernews':
|
||||
|
@ -92,6 +116,14 @@ def update_story(story, is_manual=False):
|
|||
res = reddit.story(story['ref'])
|
||||
elif story['source'] == 'tildes':
|
||||
res = tildes.story(story['ref'])
|
||||
elif story['source'] == 'substack':
|
||||
res = substack.top.story(story['ref'])
|
||||
elif story['source'] in categories.keys():
|
||||
res = categories[story['source']].story(story['ref'], urlref)
|
||||
elif story['source'] in sitemaps.keys():
|
||||
res = sitemaps[story['source']].story(story['ref'], urlref)
|
||||
elif story['source'] in substacks.keys():
|
||||
res = substacks[story['source']].story(story['ref'])
|
||||
elif story['source'] == 'manual':
|
||||
res = manual.story(story['ref'])
|
||||
|
||||
|
@ -101,11 +133,15 @@ def update_story(story, is_manual=False):
|
|||
logging.info('Story not ready yet')
|
||||
return False
|
||||
|
||||
if story['date'] and not is_manual and story['date'] + TWO_DAYS < time.time():
|
||||
if story['date'] and not is_manual and story['date'] + settings.MAX_STORY_AGE < time.time():
|
||||
logging.info('Story too old, removing')
|
||||
return False
|
||||
|
||||
if story.get('url', '') and not story.get('text', ''):
|
||||
has_url = story.get('url') or False
|
||||
has_text = story.get('text') or False
|
||||
#is_simple = story.get('scaper', '') == 'simple'
|
||||
|
||||
if has_url and not has_text:
|
||||
if not get_content_type(story['url']).startswith('text/'):
|
||||
logging.info('URL invalid file type / content type:')
|
||||
logging.info(story['url'])
|
||||
|
@ -117,8 +153,20 @@ def update_story(story, is_manual=False):
|
|||
return False
|
||||
|
||||
logging.info('Getting article ' + story['url'])
|
||||
story['text'] = get_article(story['url'])
|
||||
details, scraper = get_article(story['url'])
|
||||
if not details: return False
|
||||
story['scraper'] = scraper
|
||||
story['text'] = details.get('content', '')
|
||||
if not story['text']: return False
|
||||
story['last_update'] = time.time()
|
||||
story['excerpt'] = details.get('excerpt', '')
|
||||
story['scraper_link'] = details.get('scraper_link', '')
|
||||
meta = details.get('meta')
|
||||
if meta:
|
||||
og = meta.get('og')
|
||||
story['image'] = meta.get('image', '')
|
||||
if og:
|
||||
story['image'] = og.get('og:image', meta.get('image', ''))
|
||||
|
||||
return True
|
||||
|
||||
|
|
72
apiserver/feeds/category.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
sys.path.insert(0,'.')
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
import settings
|
||||
from utils import clean
|
||||
from misc.api import xml
|
||||
from misc.news import Base
|
||||
|
||||
def _filter_links(links, category_url, excludes=None):
|
||||
links = list(filter(None, [link if link.startswith(category_url) else None for link in links]))
|
||||
links = list(filter(None, [link if link != category_url else None for link in links]))
|
||||
links = list(set(links))
|
||||
if excludes:
|
||||
links = list(filter(None, [None if any(e in link for e in excludes) else link for link in links]))
|
||||
return links
|
||||
|
||||
def _get_category(category_url, excludes=None):
|
||||
base_url = '/'.join(category_url.split('/')[:3])
|
||||
markup = xml(lambda x: category_url)
|
||||
if not markup: return []
|
||||
soup = BeautifulSoup(markup, features='html.parser')
|
||||
links = soup.find_all('a', href=True)
|
||||
links = [link.get('href') for link in links]
|
||||
links = [f"{base_url}{link}" if link.startswith('/') else link for link in links]
|
||||
links = _filter_links(links, category_url, excludes)
|
||||
return links
|
||||
|
||||
class Category(Base):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.category_url = config.get('url')
|
||||
self.tz = config.get('tz')
|
||||
|
||||
def feed(self, excludes=None):
|
||||
links = []
|
||||
if isinstance(self.category_url, str):
|
||||
links += _get_category(self.category_url, excludes)
|
||||
elif isinstance(self.category_url, list):
|
||||
for url in self.category_url:
|
||||
links += _get_category(url, excludes)
|
||||
links = list(set(links))
|
||||
return [(self.get_id(link), link) for link in links]
|
||||
|
||||
|
||||
# scratchpad so I can quickly develop the parser
|
||||
if __name__ == '__main__':
|
||||
print("Category: RadioNZ")
|
||||
site = Category({ 'url': "https://www.rnz.co.nz/news/" })
|
||||
excludes = [
|
||||
'rnz.co.nz/news/sport',
|
||||
'rnz.co.nz/weather',
|
||||
'rnz.co.nz/news/weather',
|
||||
]
|
||||
posts = site.feed(excludes)
|
||||
print(posts[:5])
|
||||
print(site.story(posts[0][0], posts[0][1]))
|
||||
|
||||
print("Category: Newsroom")
|
||||
site = Category({ 'url': "https://www.newsroom.co.nz/news/", 'tz': 'Pacific/Auckland'})
|
||||
posts = site.feed()
|
||||
print(posts[:5])
|
||||
print(site.story(posts[0][0], posts[0][1]))
|
||||
|
||||
|
|
@ -40,7 +40,7 @@ def api(route, ref=None):
|
|||
return False
|
||||
|
||||
def feed():
|
||||
return [str(x) for x in api(API_TOPSTORIES) or []]
|
||||
return ['hn:'+str(x) for x in api(API_TOPSTORIES) or []]
|
||||
|
||||
def comment(i):
|
||||
if 'author' not in i:
|
||||
|
@ -60,6 +60,7 @@ def comment_count(i):
|
|||
return sum([comment_count(c) for c in i['comments']]) + alive
|
||||
|
||||
def story(ref):
|
||||
ref = ref.replace('hn:', '')
|
||||
r = api(API_ITEM, ref)
|
||||
if not r: return False
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@ import requests
|
|||
import time
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
import settings
|
||||
|
||||
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:77.0) Gecko/20100101 Firefox/77.0'
|
||||
|
||||
def api(route):
|
||||
|
@ -33,7 +35,7 @@ def story(ref):
|
|||
|
||||
s = {}
|
||||
s['author'] = 'manual submission'
|
||||
s['author_link'] = 'https://news.t0.vc'
|
||||
s['author_link'] = 'https://{}'.format(settings.HOSTNAME)
|
||||
s['score'] = 0
|
||||
s['date'] = int(time.time())
|
||||
s['title'] = str(soup.title.string) if soup.title else ref
|
||||
|
|
|
@ -73,7 +73,7 @@ def story(ref):
|
|||
s['comments'] = list(filter(bool, s['comments']))
|
||||
s['num_comments'] = r.num_comments
|
||||
|
||||
if s['score'] < 25 and s['num_comments'] < 10:
|
||||
if s['score'] < settings.REDDIT_SCORE_THRESHOLD and s['num_comments'] < settings.REDDIT_COMMENT_THRESHOLD:
|
||||
return False
|
||||
|
||||
if r.selftext:
|
||||
|
|
101
apiserver/feeds/sitemap.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
sys.path.insert(0,'.')
|
||||
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
import settings
|
||||
from utils import clean
|
||||
from misc.time import unix
|
||||
from misc.api import xml
|
||||
from misc.news import Base
|
||||
|
||||
def _get_sitemap_date(a):
|
||||
if a.find('lastmod'):
|
||||
return a.find('lastmod').text
|
||||
if a.find('news:publication_date'):
|
||||
return a.find('news:publication_date').text
|
||||
if a.find('ns2:publication_date'):
|
||||
return a.find('ns2:publication_date').text
|
||||
return ''
|
||||
|
||||
def _filter_links(links, excludes=None):
|
||||
too_old = datetime.now().timestamp() - settings.MAX_STORY_AGE
|
||||
links = list(filter(None, [a if _get_sitemap_date(a) else None for a in links]))
|
||||
links = list(filter(None, [a if unix(_get_sitemap_date(a)) > too_old else None for a in links]))
|
||||
links.sort(key=lambda a: unix(_get_sitemap_date(a)), reverse=True)
|
||||
|
||||
links = [x.find('loc').text for x in links] or []
|
||||
links = list(set(links))
|
||||
if excludes:
|
||||
links = list(filter(None, [None if any(e in link for e in excludes) else link for link in links]))
|
||||
return links
|
||||
|
||||
def _get_sitemap(feed_url, excludes=None):
|
||||
markup = xml(lambda x: feed_url)
|
||||
if not markup: return []
|
||||
soup = BeautifulSoup(markup, features='lxml')
|
||||
links = []
|
||||
feed_urls = []
|
||||
if soup.find('sitemapindex'):
|
||||
sitemap = soup.find('sitemapindex').findAll('sitemap')
|
||||
feed_urls = list(filter(None, [a if a.find('loc') else None for a in sitemap]))
|
||||
if soup.find('urlset'):
|
||||
sitemap = soup.find('urlset').findAll('url')
|
||||
links = list(filter(None, [a if a.find('loc') else None for a in sitemap]))
|
||||
|
||||
feed_urls = _filter_links(feed_urls, excludes)
|
||||
links = _filter_links(links, excludes)
|
||||
|
||||
for url in feed_urls:
|
||||
links += _get_sitemap(url, excludes)
|
||||
return list(set(links))
|
||||
|
||||
class Sitemap(Base):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.sitemap_url = config.get('url')
|
||||
self.tz = config.get('tz')
|
||||
|
||||
def feed(self, excludes=None):
|
||||
links = []
|
||||
if isinstance(self.sitemap_url, str):
|
||||
links += _get_sitemap(self.sitemap_url, excludes)
|
||||
elif isinstance(self.sitemap_url, list):
|
||||
for url in self.sitemap_url:
|
||||
links += _get_sitemap(url, excludes)
|
||||
links = list(set(links))
|
||||
return [(self.get_id(link), link) for link in links]
|
||||
|
||||
# scratchpad so I can quickly develop the parser
|
||||
if __name__ == '__main__':
|
||||
print("Sitemap: The Spinoff")
|
||||
site = Sitemap({ 'url': "https://thespinoff.co.nz/sitemap.xml" })
|
||||
excludes = [
|
||||
'thespinoff.co.nz/sitemap-misc.xml',
|
||||
'thespinoff.co.nz/sitemap-authors.xml',
|
||||
'thespinoff.co.nz/sitemap-tax-category.xml',
|
||||
]
|
||||
posts = site.feed(excludes)
|
||||
print(posts[:5])
|
||||
print(site.story(posts[0][0], posts[0][1]))
|
||||
|
||||
print("Sitemap: Newshub")
|
||||
site = Sitemap({
|
||||
'url': [
|
||||
'https://www.newshub.co.nz/home/politics.gnewssitemap.xml',
|
||||
'https://www.newshub.co.nz/home/new-zealand.gnewssitemap.xml',
|
||||
'https://www.newshub.co.nz/home/world.gnewssitemap.xml',
|
||||
'https://www.newshub.co.nz/home/money.gnewssitemap.xml',
|
||||
],
|
||||
})
|
||||
posts = site.feed()
|
||||
print(posts[:5])
|
||||
print(site.story(posts[0][0], posts[0][1]))
|
||||
|
174
apiserver/feeds/substack.py
Normal file
|
@ -0,0 +1,174 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
sys.path.insert(0,'.')
|
||||
|
||||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
import settings
|
||||
from misc.time import unix
|
||||
from misc.metadata import get_icons
|
||||
from misc.api import xml, json
|
||||
from utils import clean
|
||||
|
||||
SUBSTACK_REFERER = 'https://substack.com'
|
||||
SUBSTACK_API_TOP_POSTS = lambda x: "https://substack.com/api/v1/reader/top-posts"
|
||||
|
||||
def author_link(author_id, base_url):
|
||||
return f"{base_url}/people/{author_id}"
|
||||
def api_comments(post_id, base_url):
|
||||
return f"{base_url}/api/v1/post/{post_id}/comments?all_comments=true&sort=best_first"
|
||||
def api_stories(x, base_url):
|
||||
return f"{base_url}/api/v1/archive?sort=new&search=&offset=0&limit=100"
|
||||
|
||||
def comment(i):
|
||||
if 'body' not in i:
|
||||
return False
|
||||
|
||||
c = {}
|
||||
c['date'] = unix(i.get('date'))
|
||||
c['author'] = i.get('name', '')
|
||||
c['score'] = i.get('reactions').get('❤')
|
||||
c['text'] = clean(i.get('body', '') or '')
|
||||
c['comments'] = [comment(j) for j in i['children']]
|
||||
c['comments'] = list(filter(bool, c['comments']))
|
||||
|
||||
return c
|
||||
|
||||
class Publication:
|
||||
def __init__(self, domain):
|
||||
self.BASE_DOMAIN = domain
|
||||
|
||||
def ref_prefix(self, ref):
|
||||
return f"{self.BASE_DOMAIN}/#id:{ref}"
|
||||
|
||||
def strip_ref_prefix(self, ref):
|
||||
return ref.replace(f"{self.BASE_DOMAIN}/#id:", '')
|
||||
|
||||
def feed(self):
|
||||
too_old = datetime.now().timestamp() - settings.MAX_STORY_AGE
|
||||
stories = json(lambda x: api_stories(x, self.BASE_DOMAIN), headers={'Referer': self.BASE_DOMAIN})
|
||||
if not stories: return []
|
||||
stories = list(filter(None, [i if i.get("audience") == "everyone" else None for i in stories]))
|
||||
stories = list(filter(None, [i if unix(i.get('post_date')) > too_old else None for i in stories]))
|
||||
stories.sort(key=lambda a: unix(a.get('post_date')), reverse=True)
|
||||
|
||||
return [self.ref_prefix(str(i.get("id"))) for i in stories or []]
|
||||
|
||||
def story(self, ref):
|
||||
ref = self.strip_ref_prefix(ref)
|
||||
stories = json(lambda x: api_stories(x, self.BASE_DOMAIN), headers={'Referer': self.BASE_DOMAIN})
|
||||
if not stories: return False
|
||||
stories = list(filter(None, [i if i.get("audience") == "everyone" else None for i in stories]))
|
||||
stories = list(filter(None, [i if str(i.get('id')) == ref else None for i in stories]))
|
||||
|
||||
if len(stories) == 0:
|
||||
return False
|
||||
|
||||
r = stories[0]
|
||||
if not r:
|
||||
return False
|
||||
|
||||
s = {}
|
||||
s['author'] = ''
|
||||
s['author_link'] = ''
|
||||
|
||||
s['date'] = unix(r.get('post_date'))
|
||||
s['score'] = r.get('reactions').get('❤')
|
||||
s['title'] = r.get('title', '')
|
||||
s['link'] = r.get('canonical_url', '')
|
||||
s['url'] = r.get('canonical_url', '')
|
||||
comments = json(lambda x: api_comments(x, self.BASE_DOMAIN), r.get('id'), headers={'Referer': self.BASE_DOMAIN})
|
||||
s['comments'] = [] if not comments else [comment(i) for i in comments.get('comments')]
|
||||
s['comments'] = list(filter(bool, s['comments']))
|
||||
s['num_comments'] = r.get('comment_count', 0)
|
||||
|
||||
authors = list(filter(None, [self._bylines(byline) for byline in r.get('publishedBylines')]))
|
||||
if len(authors):
|
||||
s['author'] = authors[0].get('name')
|
||||
s['author_link'] = authors[0].get('link')
|
||||
|
||||
markup = xml(lambda x: s['link'])
|
||||
if markup:
|
||||
icons = get_icons(markup, url=s['link'])
|
||||
if icons:
|
||||
s['icon'] = icons[0]
|
||||
|
||||
return s
|
||||
|
||||
def _bylines(self, b):
|
||||
if 'id' not in b:
|
||||
return None
|
||||
a = {}
|
||||
a['name'] = b.get('name')
|
||||
a['link'] = author_link(b.get('id'), self.BASE_DOMAIN)
|
||||
return a
|
||||
|
||||
|
||||
class Top:
|
||||
def ref_prefix(self, base_url, ref):
|
||||
return f"{base_url}/#id:{ref}"
|
||||
|
||||
def strip_ref_prefix(self, ref):
|
||||
if '/#id:' in ref:
|
||||
base_url, item = ref.split(f"/#id:")
|
||||
return item
|
||||
return ref
|
||||
|
||||
def feed(self):
|
||||
too_old = datetime.now().timestamp() - settings.MAX_STORY_AGE
|
||||
stories = json(SUBSTACK_API_TOP_POSTS, headers={'Referer': SUBSTACK_REFERER})
|
||||
if not stories: return []
|
||||
stories = list(filter(None, [i if i.get("audience") == "everyone" else None for i in stories]))
|
||||
stories = list(filter(None, [i if unix(i.get('post_date')) > too_old else None for i in stories]))
|
||||
stories.sort(key=lambda a: unix(a.get('post_date')), reverse=True)
|
||||
stories = [self.ref_prefix(str(i.get("pub").get("base_url")), str(i.get("id"))) for i in stories]
|
||||
return stories
|
||||
|
||||
def story(self, ref):
|
||||
ref = self.strip_ref_prefix(ref)
|
||||
stories = json(SUBSTACK_API_TOP_POSTS, headers={'Referer': SUBSTACK_REFERER})
|
||||
if not stories: return False
|
||||
stories = list(filter(None, [i if i.get("audience") == "everyone" else None for i in stories]))
|
||||
stories = list(filter(None, [i if str(i.get('id')) == ref else None for i in stories]))
|
||||
|
||||
if len(stories) == 0:
|
||||
return False
|
||||
|
||||
r = stories[0]
|
||||
if not r:
|
||||
return False
|
||||
|
||||
s = {}
|
||||
pub = r.get('pub')
|
||||
base_url = pub.get('base_url')
|
||||
s['author'] = pub.get('author_name')
|
||||
s['author_link'] = author_link(pub.get('author_id'), base_url)
|
||||
|
||||
s['date'] = unix(r.get('post_date'))
|
||||
s['score'] = r.get('score')
|
||||
s['title'] = r.get('title', '')
|
||||
s['link'] = r.get('canonical_url', '')
|
||||
s['url'] = r.get('canonical_url', '')
|
||||
comments = json(lambda x: api_comments(x, base_url), r.get('id'), headers={'Referer': base_url})
|
||||
s['comments'] = [] if not comments else [comment(i) for i in comments.get('comments')]
|
||||
s['comments'] = list(filter(bool, s['comments']))
|
||||
s['num_comments'] = r.get('comment_count', 0)
|
||||
|
||||
return s
|
||||
|
||||
top = Top()
|
||||
|
||||
# scratchpad so I can quickly develop the parser
|
||||
if __name__ == '__main__':
|
||||
top_posts = top.feed()
|
||||
print(top.story(top_posts[0]))
|
||||
|
||||
webworm = Publication("https://www.webworm.co/")
|
||||
posts = webworm.feed()
|
||||
print(webworm.story(posts[0]))
|
40
apiserver/misc/api.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
|
||||
import requests
|
||||
|
||||
GOOGLEBOT_USER_AGENT = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
||||
GOOGLEBOT_IP = '66.249.66.1'
|
||||
TIMEOUT = 30
|
||||
|
||||
def xml(route, ref=None, headers=dict(), use_googlebot=True):
|
||||
try:
|
||||
if use_googlebot:
|
||||
headers['User-Agent'] = GOOGLEBOT_USER_AGENT
|
||||
headers['X-Forwarded-For'] = GOOGLEBOT_IP
|
||||
r = requests.get(route(ref), headers=headers, timeout=TIMEOUT)
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
return r.text
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logging.error('Problem hitting URL: {}'.format(str(e)))
|
||||
return False
|
||||
|
||||
def json(route, ref=None, headers=dict(), use_googlebot=True):
|
||||
try:
|
||||
if use_googlebot:
|
||||
headers['User-Agent'] = GOOGLEBOT_USER_AGENT
|
||||
headers['X-Forwarded-For'] = GOOGLEBOT_IP
|
||||
r = requests.get(route(ref), headers=headers, timeout=TIMEOUT)
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
return r.json()
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logging.error('Problem hitting URL: {}'.format(str(e)))
|
||||
return False
|
14
apiserver/misc/icons.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from bs4 import BeautifulSoup
|
||||
|
||||
def get_icons(markup):
|
||||
soup = BeautifulSoup(markup, features='html.parser')
|
||||
icon32 = soup.find_all('link', rel="icon", href=True, sizes="32x32")
|
||||
icon16 = soup.find_all('link', rel="icon", href=True, sizes="16x16")
|
||||
favicon = soup.find_all('link', rel="shortcut icon", href=True)
|
||||
others = soup.find_all('link', rel="icon", href=True)
|
||||
icons = icon32 + icon16 + favicon + others
|
||||
base_url = '/'.join(urlref.split('/')[:3])
|
||||
icons = list(set([i.get('href') for i in icons]))
|
||||
icons = [i if i.startswith('http') else base_url + i for i in icons]
|
||||
|
||||
return icons
|
84
apiserver/misc/metadata.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
def get_icons(markup, url):
|
||||
soup = BeautifulSoup(markup, features='html.parser')
|
||||
icon32 = soup.find_all('link', rel="icon", href=True, sizes="32x32")
|
||||
icon16 = soup.find_all('link', rel="icon", href=True, sizes="16x16")
|
||||
favicon = soup.find_all('link', rel="shortcut icon", href=True)
|
||||
others = soup.find_all('link', rel="icon", href=True)
|
||||
icons = icon32 + icon16 + favicon + others
|
||||
base_url = '/'.join(url.split('/')[:3])
|
||||
icons = list(set([i.get('href') for i in icons]))
|
||||
icons = [i if i.startswith('http') else base_url + i for i in icons]
|
||||
|
||||
return icons
|
||||
|
||||
def parse_extruct(s, data):
|
||||
rdfa_keys = {
|
||||
'title': [
|
||||
'http://ogp.me/ns#title',
|
||||
'https://ogp.me/ns#title',
|
||||
],
|
||||
'date': [
|
||||
'http://ogp.me/ns/article#modified_time',
|
||||
'https://ogp.me/ns/article#modified_time',
|
||||
'http://ogp.me/ns/article#published_time',
|
||||
'https://ogp.me/ns/article#published_time',
|
||||
]
|
||||
}
|
||||
for rdfa in data['rdfa']:
|
||||
for key, props in rdfa.items():
|
||||
for attribute, properties in rdfa_keys.items():
|
||||
for prop in properties:
|
||||
if prop in props:
|
||||
for values in props[prop]:
|
||||
s[attribute] = values['@value']
|
||||
|
||||
for og in data['opengraph']:
|
||||
titles = list(filter(None, [value if 'og:title' in key else None for key, value in og['properties']]))
|
||||
modified = list(filter(None, [value if 'article:modified_time' in key else None for key, value in og['properties']]))
|
||||
published = list(filter(None, [value if 'article:published_time' in key else None for key, value in og['properties']]))
|
||||
if len(modified):
|
||||
s['date'] = modified[0]
|
||||
if len(published):
|
||||
s['date'] = published[0]
|
||||
if len(titles):
|
||||
s['title'] = titles[0]
|
||||
|
||||
for md in data['microdata']:
|
||||
if md['type'] in ['https://schema.org/NewsArticle', 'http://schema.org/NewsArticle']:
|
||||
props = md['properties']
|
||||
s['title'] = props['headline']
|
||||
if props['dateModified']:
|
||||
s['date'] = props['dateModified']
|
||||
if props['datePublished']:
|
||||
s['date'] = props['datePublished']
|
||||
if 'author' in props and props['author']:
|
||||
if 'properties' in props['author']:
|
||||
s['author'] = props['author']['properties']['name']
|
||||
elif isinstance(props['author'], list):
|
||||
s['author'] = props['author'][0]['properties']['name']
|
||||
|
||||
for ld in data['json-ld']:
|
||||
if '@type' in ld and ld['@type'] in ['Article', 'NewsArticle']:
|
||||
s['title'] = ld['headline']
|
||||
if ld['dateModified']:
|
||||
s['date'] = ld['dateModified']
|
||||
if ld['datePublished']:
|
||||
s['date'] = ld['datePublished']
|
||||
if 'author' in ld and ld['author']:
|
||||
if 'name' in ld['author']:
|
||||
s['author'] = ld['author']['name']
|
||||
elif isinstance(ld['author'], list):
|
||||
s['author'] = ld['author'][0]['name']
|
||||
if '@graph' in ld:
|
||||
for gld in ld['@graph']:
|
||||
if '@type' in gld and gld['@type'] in ['Article', 'NewsArticle']:
|
||||
s['title'] = gld['headline']
|
||||
if gld['dateModified']:
|
||||
s['date'] = gld['dateModified']
|
||||
if gld['datePublished']:
|
||||
s['date'] = gld['datePublished']
|
||||
|
||||
return s
|
94
apiserver/misc/news.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
|
||||
import re
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from scrapers.declutter import declutter, headless
|
||||
import extruct
|
||||
|
||||
import settings
|
||||
from utils import clean
|
||||
from misc.metadata import parse_extruct, get_icons
|
||||
from misc.time import unix
|
||||
from misc.api import xml
|
||||
import misc.stuff as stuff
|
||||
|
||||
def clean_comment(comment):
|
||||
comment['text'] = clean(comment['text'])
|
||||
comment['comments'] = [clean_comments(c) for c in comment['comments']]
|
||||
return comment
|
||||
|
||||
def comment_count(i):
|
||||
alive = 1 if i['author'] else 0
|
||||
return sum([comment_count(c) for c in i['comments']]) + alive
|
||||
|
||||
class Base:
|
||||
def __init__(config):
|
||||
self.config = config
|
||||
self.url = config.get('url')
|
||||
self.tz = config.get('tz')
|
||||
|
||||
def get_id(self, link):
|
||||
patterns = self.config.get('patterns')
|
||||
if not patterns:
|
||||
return link
|
||||
patterns = [re.compile(p) for p in patterns]
|
||||
patterns = list(filter(None, [p.match(link) for p in patterns]))
|
||||
patterns = list(set([':'.join(p.groups()) for p in patterns]))
|
||||
if not patterns:
|
||||
return link
|
||||
return patterns[0]
|
||||
|
||||
def feed(self, excludes=None):
|
||||
return []
|
||||
|
||||
def story(self, ref, urlref):
|
||||
if urlref is None:
|
||||
return False
|
||||
markup = xml(lambda x: urlref)
|
||||
if not markup:
|
||||
return False
|
||||
|
||||
s = {}
|
||||
s['author_link'] = ''
|
||||
s['score'] = 0
|
||||
s['comments'] = []
|
||||
s['num_comments'] = 0
|
||||
s['link'] = urlref
|
||||
s['url'] = urlref
|
||||
s['date'] = 0
|
||||
s['title'] = ''
|
||||
|
||||
icons = get_icons(markup, url=urlref)
|
||||
if icons:
|
||||
s['icon'] = icons[0]
|
||||
|
||||
data = extruct.extract(markup)
|
||||
s = parse_extruct(s, data)
|
||||
if s['title']:
|
||||
s['title'] = clean(s['title'])
|
||||
if s['date']:
|
||||
s['date'] = unix(s['date'], tz=self.tz)
|
||||
|
||||
if 'disqus' in markup:
|
||||
try:
|
||||
s['comments'] = declutter.get_comments(urlref)
|
||||
s['comments'] = [clean_comments(c) for c in s['comments']]
|
||||
s['comments'] = list(filter(bool, s['comments']))
|
||||
s['num_comments'] = comment_count(s['comments'])
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
pass
|
||||
|
||||
if urlref.startswith('https://www.stuff.co.nz'):
|
||||
s['comments'] = stuff.get_comments(urlref)
|
||||
s['comments'] = list(filter(bool, s['comments']))
|
||||
s['num_comments'] = len(s['comments'])
|
||||
|
||||
if not s['date']:
|
||||
return False
|
||||
return s
|
65
apiserver/misc/stuff.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
sys.path.insert(0,'.')
|
||||
|
||||
from misc.time import unix
|
||||
from misc.api import xml
|
||||
from utils import clean
|
||||
|
||||
def _soup_get_text(soup):
|
||||
if not soup: return None
|
||||
if soup.text: return soup.text
|
||||
|
||||
s = soup.find(text=lambda tag: isinstance(tag, bs4.CData))
|
||||
if s and s.string: return s.string.strip()
|
||||
return None
|
||||
|
||||
def _parse_comment(soup):
|
||||
c = {
|
||||
'author': '',
|
||||
'authorLink': '',
|
||||
'score': 0,
|
||||
'date': 0,
|
||||
'text': '',
|
||||
'comments': [],
|
||||
}
|
||||
|
||||
if soup.find('link'):
|
||||
title = _soup_get_text(soup.find('link'))
|
||||
if title and 'By:' in title:
|
||||
c['author'] = title.strip('By:').strip()
|
||||
if soup.find('dc:creator'):
|
||||
c['author'] = _soup_get_text(soup.find('dc:creator'))
|
||||
if soup.find('link'):
|
||||
c['authorLink'] = _soup_get_text(soup.find('link'))
|
||||
if soup.find('description'):
|
||||
c['text'] = clean(_soup_get_text(soup.find('description')))
|
||||
if soup.find('pubdate'):
|
||||
c['date'] = unix(soup.find('pubdate').text)
|
||||
elif soup.find('pubDate'):
|
||||
c['date'] = unix(soup.find('pubDate').text)
|
||||
|
||||
return c
|
||||
|
||||
def get_comments(url):
|
||||
regex = r"https:\/\/www\.stuff\.co\.nz\/(.*\/\d+)/[^\/]+"
|
||||
p = re.compile(regex).match(url)
|
||||
path = p.groups()[0]
|
||||
comment_url = f'https://comments.us1.gigya.com/comments/rss/6201101/Stuff/stuff/{path}'
|
||||
markup = xml(lambda x: comment_url)
|
||||
if not markup: return []
|
||||
soup = BeautifulSoup(markup, features='html.parser')
|
||||
comments = soup.find_all('item')
|
||||
if not comments: return []
|
||||
comments = [_parse_comment(c) for c in comments]
|
||||
return comments
|
||||
|
||||
|
||||
# scratchpad so I can quickly develop the parser
|
||||
if __name__ == '__main__':
|
||||
comments = get_comments('https://www.stuff.co.nz/life-style/homed/houses/123418468/dear-jacinda-we-need-to-talk-about-housing')
|
||||
print(len(comments))
|
||||
print(comments[:5])
|
24
apiserver/misc/time.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
import pytz
|
||||
from datetime import timedelta
|
||||
import dateutil.parser
|
||||
|
||||
|
||||
TZINFOS = {
|
||||
'NZDT': pytz.timezone('Pacific/Auckland'),
|
||||
'NZST': pytz.timezone('Pacific/Auckland'),
|
||||
}
|
||||
|
||||
TZINFOS = {
|
||||
'NZDT': 13*60*60,
|
||||
'NZST': 12*60*60,
|
||||
}
|
||||
|
||||
def unix(date_str, tz=None, tzinfos=TZINFOS):
|
||||
try:
|
||||
dt = dateutil.parser.parse(date_str, tzinfos=tzinfos)
|
||||
if tz:
|
||||
dt = pytz.timezone(tz).localize(dt)
|
||||
return int(dt.timestamp())
|
||||
except:
|
||||
pass
|
||||
return 0
|
|
@ -4,6 +4,7 @@ certifi==2020.6.20
|
|||
chardet==3.0.4
|
||||
click==7.1.2
|
||||
commonmark==0.9.1
|
||||
extruct==0.10.0
|
||||
Flask==1.1.2
|
||||
Flask-Cors==3.0.8
|
||||
gevent==20.6.2
|
||||
|
@ -11,11 +12,13 @@ greenlet==0.4.16
|
|||
idna==2.10
|
||||
itsdangerous==1.1.0
|
||||
Jinja2==2.11.2
|
||||
lxml==4.6.1
|
||||
MarkupSafe==1.1.1
|
||||
packaging==20.4
|
||||
praw==6.4.0
|
||||
prawcore==1.4.0
|
||||
pyparsing==2.4.7
|
||||
pytz==2020.4
|
||||
requests==2.24.0
|
||||
six==1.15.0
|
||||
soupsieve==2.0.1
|
||||
|
@ -27,3 +30,4 @@ websocket-client==0.57.0
|
|||
Werkzeug==1.0.1
|
||||
zope.event==4.4
|
||||
zope.interface==5.1.0
|
||||
python-dateutil==2.8.1
|
||||
|
|
64
apiserver/scrapers/declutter.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
import requests
|
||||
|
||||
from settings import HEADLESS_READER_PORT, SIMPLE_READER_PORT
|
||||
|
||||
class Simple:
|
||||
def __init__(self, host, name, internal=True, timeout=90):
|
||||
self.host = host
|
||||
self.name = name
|
||||
self.internal = internal
|
||||
self.timeout = timeout
|
||||
self.variant = 'simple'
|
||||
|
||||
def as_readable(self, details):
|
||||
if not self.internal:
|
||||
details['scraper_link'] = self.host
|
||||
return details
|
||||
|
||||
def get_html(self, url):
|
||||
details = self.get_details(url)
|
||||
if not details:
|
||||
return ''
|
||||
return details['content']
|
||||
|
||||
def get_details(self, url):
|
||||
logging.info(f"{self.name} Scraper: {url}")
|
||||
details = self._json(f"{self.host}/{self.variant}/details", dict(url=url), "article")
|
||||
if not details: return None
|
||||
return self.as_readable(details)
|
||||
|
||||
|
||||
def _json(self, url, data, adjective):
|
||||
try:
|
||||
r = requests.post(url, data=data, timeout=self.timeout)
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
return r.json()
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logging.error('{}: Problem scraping {}: {}'.format(self.name, adjective, str(e)))
|
||||
return None
|
||||
|
||||
|
||||
class Headless(Simple):
|
||||
def __init__(self, host, name, internal=True, timeout=90):
|
||||
self.host = host
|
||||
self.name = name
|
||||
self.internal = internal
|
||||
self.timeout = timeout
|
||||
self.variant = 'headless'
|
||||
|
||||
def get_comments(self, url):
|
||||
logging.info(f"{self.name} Scraper: {url}")
|
||||
comments = self._json(f"{self.host}/{self.variant}/comments", dict(url=url), "comments")
|
||||
if not comments: return None
|
||||
return comments
|
||||
|
||||
declutter = Headless('https://declutter.1j.nz', 'Declutter scraper', internal=False)
|
||||
headless = Headless(f"http://127.0.0.1:{HEADLESS_READER_PORT or 33843}", 'Headless scraper')
|
||||
simple = Simple(f"http://127.0.0.1:{SIMPLE_READER_PORT or 33843}", 'Simple scraper')
|
64
apiserver/scrapers/outline.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.DEBUG)
|
||||
import requests
|
||||
|
||||
OUTLINE_REFERER = 'https://outline.com/'
|
||||
OUTLINE_API = 'https://api.outline.com/v3/parse_article'
|
||||
TIMEOUT = 20
|
||||
|
||||
def get_html(url):
|
||||
details = get_details(url)
|
||||
if not details:
|
||||
return ''
|
||||
return details['content']
|
||||
|
||||
def get_details(url):
|
||||
outline = _get_outline(url)
|
||||
if not outline:
|
||||
return None
|
||||
return as_readable(outline)
|
||||
|
||||
def as_readable(details):
|
||||
readable = {
|
||||
'title': details['title'],
|
||||
'byline': details['author'],
|
||||
'content': details['html'],
|
||||
'excerpt': _excerpt(details),
|
||||
'siteName': details['site_name'],
|
||||
'url': details['article_url'],
|
||||
'publisher': details['site_name'],
|
||||
'scraper_link': 'https://outline.com/' + details['short_code'],
|
||||
'meta': {}
|
||||
}
|
||||
readable['meta'].update(details['meta'])
|
||||
return readable
|
||||
|
||||
def _get_outline(url):
|
||||
try:
|
||||
logging.info(f"Outline Scraper: {url}")
|
||||
params = {'source_url': url}
|
||||
headers = {'Referer': OUTLINE_REFERER}
|
||||
r = requests.get(OUTLINE_API, params=params, headers=headers, timeout=TIMEOUT)
|
||||
if r.status_code == 429:
|
||||
logging.info('Rate limited by outline, skipping...')
|
||||
return None
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
data = r.json()['data']
|
||||
if 'URL is not supported by Outline' in data['html']:
|
||||
raise Exception('URL not supported by Outline')
|
||||
return data
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except BaseException as e:
|
||||
logging.error('Problem outlining article: {}'.format(str(e)))
|
||||
return None
|
||||
|
||||
def _excerpt(details):
|
||||
meta = details.get('meta')
|
||||
if not meta: return ''
|
||||
if meta.get('description'): return meta.get('description', '')
|
||||
if not meta.get('og'): return ''
|
||||
return meta.get('og').get('og:description', '')
|
|
@ -35,14 +35,11 @@ def update_rankings():
|
|||
|
||||
def update_attributes():
|
||||
try:
|
||||
json = ['title', 'url', 'author', 'link', 'id']
|
||||
json = ['title', 'url', 'author', 'link', 'id', 'source']
|
||||
r = requests.post(MEILI_URL + 'indexes/qotnews/settings/searchable-attributes', json=json, timeout=2)
|
||||
if r.status_code != 202:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
return r.json()
|
||||
r = requests.delete(MEILI_URL + 'indexes/qotnews/settings/displayed-attributes', timeout=2)
|
||||
if r.status_code != 202:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
requests.delete(MEILI_URL + 'indexes/qotnews/settings/displayed-attributes', timeout=2)
|
||||
return r.json()
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
|
@ -70,9 +67,9 @@ def put_story(story):
|
|||
logging.error('Problem putting MeiliSearch story: {}'.format(str(e)))
|
||||
return False
|
||||
|
||||
def search(q):
|
||||
def search(q, skip=0, limit=250):
|
||||
try:
|
||||
params = dict(q=q, limit=250)
|
||||
params = dict(q=q, offset=skip, limit=limit)
|
||||
r = requests.get(MEILI_URL + 'indexes/qotnews/search', params=params, timeout=2)
|
||||
if r.status_code != 200:
|
||||
raise Exception('Bad response code ' + str(r.status_code))
|
||||
|
|
|
@ -13,8 +13,10 @@ import json
|
|||
import threading
|
||||
import traceback
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
import settings
|
||||
import database
|
||||
import search
|
||||
import feed
|
||||
|
@ -27,9 +29,6 @@ from flask_cors import CORS
|
|||
database.init()
|
||||
search.init()
|
||||
|
||||
FEED_LENGTH = 75
|
||||
news_index = 0
|
||||
|
||||
def new_id():
|
||||
nid = gen_rand_id()
|
||||
while database.get_story(nid):
|
||||
|
@ -42,17 +41,20 @@ cors = CORS(flask_app)
|
|||
|
||||
@flask_app.route('/api')
|
||||
def api():
|
||||
stories = database.get_stories(FEED_LENGTH)
|
||||
# hacky nested json
|
||||
res = Response('{"stories":[' + ','.join(stories) + ']}')
|
||||
skip = request.args.get('skip', 0)
|
||||
limit = request.args.get('limit', 20)
|
||||
stories = database.get_stories(skip=skip, limit=limit)
|
||||
res = Response(json.dumps({"stories": stories}))
|
||||
res.headers['content-type'] = 'application/json'
|
||||
return res
|
||||
|
||||
@flask_app.route('/api/search', strict_slashes=False)
|
||||
def apisearch():
|
||||
q = request.args.get('q', '')
|
||||
skip = request.args.get('skip', 0)
|
||||
limit = request.args.get('limit', 20)
|
||||
if len(q) >= 3:
|
||||
results = search.search(q)
|
||||
results = search.search(q, skip=skip, limit=limit)
|
||||
else:
|
||||
results = []
|
||||
return dict(results=results)
|
||||
|
@ -76,7 +78,7 @@ def submit():
|
|||
elif 'reddit.com' in parse.hostname and 'comments' in url:
|
||||
source = 'reddit'
|
||||
ref = parse.path.split('/')[4]
|
||||
elif 'news.t0.vc' in parse.hostname:
|
||||
elif settings.HOSTNAME in parse.hostname:
|
||||
raise Exception('Invalid article')
|
||||
else:
|
||||
source = 'manual'
|
||||
|
@ -105,8 +107,11 @@ def submit():
|
|||
def story(sid):
|
||||
story = database.get_story(sid)
|
||||
if story:
|
||||
# hacky nested json
|
||||
res = Response('{"story":' + story.full_json + '}')
|
||||
related = []
|
||||
if story.meta['url']:
|
||||
related = database.get_stories_by_url(story.meta['url'])
|
||||
related = [r.meta for r in related]
|
||||
res = Response(json.dumps({"story": story.data, "related": related}))
|
||||
res.headers['content-type'] = 'application/json'
|
||||
return res
|
||||
else:
|
||||
|
@ -117,7 +122,7 @@ def story(sid):
|
|||
def index():
|
||||
return render_template('index.html',
|
||||
title='Feed',
|
||||
url='news.t0.vc',
|
||||
url=settings.HOSTNAME,
|
||||
description='Hacker News, Reddit, Lobsters, and Tildes articles rendered in reader mode')
|
||||
|
||||
@flask_app.route('/<sid>', strict_slashes=False)
|
||||
|
@ -130,7 +135,7 @@ def static_story(sid):
|
|||
|
||||
story = database.get_story(sid)
|
||||
if not story: return abort(404)
|
||||
story = json.loads(story.full_json)
|
||||
story = story.data
|
||||
|
||||
score = story['score']
|
||||
num_comments = story['num_comments']
|
||||
|
@ -147,65 +152,96 @@ def static_story(sid):
|
|||
url=url,
|
||||
description=description)
|
||||
|
||||
http_server = WSGIServer(('', 33842), flask_app)
|
||||
http_server = WSGIServer(('', settings.API_PORT or 33842), flask_app)
|
||||
|
||||
def _add_new_refs():
|
||||
added = []
|
||||
for ref, source, urlref in feed.get_list():
|
||||
if database.get_story_by_ref(ref):
|
||||
continue
|
||||
try:
|
||||
nid = new_id()
|
||||
database.put_ref(ref, nid, source, urlref)
|
||||
logging.info('Added ref ' + ref)
|
||||
added.append(ref)
|
||||
except database.IntegrityError:
|
||||
#logging.info('Unable to add ref ' + ref)
|
||||
continue
|
||||
return added
|
||||
|
||||
def _update_current_story(item):
|
||||
try:
|
||||
story = database.get_story(item['sid']).data
|
||||
except AttributeError:
|
||||
story = dict(id=item['sid'], ref=item['ref'], source=item['source'])
|
||||
|
||||
logging.info('Updating story: {}'.format(str(story['ref'])))
|
||||
|
||||
valid = feed.update_story(story, urlref=item['urlref'])
|
||||
if valid:
|
||||
try:
|
||||
database.put_story(story)
|
||||
search.put_story(story)
|
||||
except database.IntegrityError:
|
||||
logging.info('Unable to add story with ref ' + item['ref'])
|
||||
else:
|
||||
database.del_ref(item['ref'])
|
||||
logging.info('Removed ref {}'.format(item['ref']))
|
||||
|
||||
|
||||
def feed_thread():
|
||||
global news_index
|
||||
|
||||
new_refs = []
|
||||
update_refs = []
|
||||
last_check = datetime.now() - timedelta(minutes=20)
|
||||
try:
|
||||
while True:
|
||||
# onboard new stories
|
||||
if news_index == 0:
|
||||
for ref, source in feed.list():
|
||||
if database.get_story_by_ref(ref):
|
||||
continue
|
||||
try:
|
||||
nid = new_id()
|
||||
database.put_ref(ref, nid, source)
|
||||
logging.info('Added ref ' + ref)
|
||||
except database.IntegrityError:
|
||||
continue
|
||||
|
||||
ref_list = database.get_reflist(FEED_LENGTH)
|
||||
time_since_check = datetime.now() - last_check
|
||||
if not len(new_refs) and time_since_check > timedelta(minutes=15):
|
||||
added = _add_new_refs()
|
||||
ref_list = database.get_reflist()
|
||||
new_refs = list(filter(None, [i if i['ref'] in added else None for i in ref_list]))
|
||||
update_queue = list(filter(None, [i if i['ref'] not in added else None for i in ref_list]))
|
||||
current_queue_refs = [i['ref'] for i in update_refs]
|
||||
update_queue = list(filter(None, [i if i['ref'] not in current_queue_refs else None for i in update_queue]))
|
||||
update_refs += update_queue
|
||||
logging.info('Added {} new refs'.format(len(added)))
|
||||
logging.info('Have {} refs in update queue'.format(len(current_queue_refs)))
|
||||
logging.info('Fetched {} refs for update queue'.format(len(update_queue)))
|
||||
last_check = datetime.now()
|
||||
gevent.sleep(1)
|
||||
|
||||
# update new stories
|
||||
if len(new_refs):
|
||||
item = new_refs.pop(0)
|
||||
logging.info('Processing new story ref {}'.format(item['ref']))
|
||||
_update_current_story(item)
|
||||
gevent.sleep(1)
|
||||
|
||||
# update current stories
|
||||
if news_index < len(ref_list):
|
||||
item = ref_list[news_index]
|
||||
if len(update_refs):
|
||||
item = update_refs.pop(0)
|
||||
logging.info('Processing existing story ref {}'.format(item['ref']))
|
||||
_update_current_story(item)
|
||||
gevent.sleep(1)
|
||||
|
||||
try:
|
||||
story_json = database.get_story(item['sid']).full_json
|
||||
story = json.loads(story_json)
|
||||
except AttributeError:
|
||||
story = dict(id=item['sid'], ref=item['ref'], source=item['source'])
|
||||
|
||||
logging.info('Updating story: ' + str(story['ref']) + ', index: ' + str(news_index))
|
||||
|
||||
valid = feed.update_story(story)
|
||||
if valid:
|
||||
database.put_story(story)
|
||||
search.put_story(story)
|
||||
else:
|
||||
database.del_ref(item['ref'])
|
||||
logging.info('Removed ref {}'.format(item['ref']))
|
||||
else:
|
||||
logging.info('Skipping index: ' + str(news_index))
|
||||
|
||||
gevent.sleep(6)
|
||||
|
||||
news_index += 1
|
||||
if news_index == FEED_LENGTH: news_index = 0
|
||||
gevent.sleep(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logging.info('Ending feed thread...')
|
||||
except ValueError as e:
|
||||
logging.error('feed_thread error: {} {}'.format(e.__class__.__name__, e))
|
||||
http_server.stop()
|
||||
|
||||
http_server.stop()
|
||||
gevent.kill(feed_thread_ref)
|
||||
|
||||
|
||||
print('Starting Feed thread...')
|
||||
gevent.spawn(feed_thread)
|
||||
feed_thread_ref = gevent.spawn(feed_thread)
|
||||
|
||||
print('Starting HTTP thread...')
|
||||
try:
|
||||
http_server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
gevent.kill(feed_thread_ref)
|
||||
logging.info('Exiting...')
|
||||
|
|
|
@ -1,6 +1,14 @@
|
|||
# QotNews settings
|
||||
# edit this file and save it as settings.py
|
||||
|
||||
HOSTNAME = 'news.t0.vc'
|
||||
MAX_STORY_AGE = 3*24*60*60
|
||||
|
||||
SCRAPERS = ['headless', 'outline', 'declutter', 'simple']
|
||||
API_PORT = 33842
|
||||
SIMPLE_READER_PORT = 33843
|
||||
HEADLESS_READER_PORT = 33843
|
||||
|
||||
# Feed Lengths
|
||||
# Number of top items from each site to pull
|
||||
# set to 0 to disable that site
|
||||
|
@ -8,6 +16,46 @@ NUM_HACKERNEWS = 15
|
|||
NUM_LOBSTERS = 10
|
||||
NUM_REDDIT = 10
|
||||
NUM_TILDES = 5
|
||||
NUM_SUBSTACK = 10
|
||||
|
||||
SITEMAP = {}
|
||||
# SITEMAP['nzherald'] = {
|
||||
# 'url': "https://www.nzherald.co.nz/arcio/news-sitemap/",
|
||||
# 'count': 20,
|
||||
# 'patterns': [
|
||||
# r'^https:\/\/www\.(nzherald\.co\.nz)\/.*\/([^/]+)\/?$',
|
||||
# ],
|
||||
# 'excludes': [
|
||||
# 'driven.co.nz',
|
||||
# 'oneroof.co.nz',
|
||||
# 'nzherald.co.nz/sponsored-stories',
|
||||
# 'nzherald.co.nz/entertainment/',
|
||||
# 'nzherald.co.nz/lifestyle/',
|
||||
# 'nzherald.co.nz/travel/',
|
||||
# 'nzherald.co.nz/sport/',
|
||||
# 'nzherald.co.nz/promotions/',
|
||||
# 'nzherald.co.nzhttp',
|
||||
# 'herald-afternoon-quiz',
|
||||
# 'herald-morning-quiz'
|
||||
# ],
|
||||
# }
|
||||
|
||||
SUBSTACK = {}
|
||||
# SUBSTACK['webworm'] = { 'url': "https://www.webworm.co", 'count': 10},
|
||||
# SUBSTACK['the bulletin'] = { 'url': "https://thespinoff.substack.com", 'count': 10},
|
||||
|
||||
CATEGORY = {}
|
||||
# CATEGORY['radionz'] = {
|
||||
# 'url': "https://www.rnz.co.nz/news/",
|
||||
# 'count': 20,
|
||||
# 'patterns': [
|
||||
# r'https:\/\/www\.(rnz\.co\.nz)\/news\/[^\/]+\/(\d+)\/[^\/]+\/?'
|
||||
# ],
|
||||
# 'excludes': [
|
||||
# 'rnz.co.nz/news/sport',
|
||||
# 'rnz.co.nz/weather',
|
||||
# ],
|
||||
# }
|
||||
|
||||
# Reddit account info
|
||||
# leave blank if not using Reddit
|
||||
|
@ -15,6 +63,10 @@ REDDIT_CLIENT_ID = ''
|
|||
REDDIT_CLIENT_SECRET = ''
|
||||
REDDIT_USER_AGENT = ''
|
||||
|
||||
# Minimum points or number of comments before including a thread:
|
||||
REDDIT_COMMENT_THRESHOLD = 10
|
||||
REDDIT_SCORE_THRESHOLD = 25
|
||||
|
||||
SUBREDDITS = [
|
||||
'Economics',
|
||||
'AcademicPhilosophy',
|
||||
|
|
48
apiserver/update-story.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
import logging
|
||||
logging.basicConfig(
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
level=logging.INFO)
|
||||
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
|
||||
import database
|
||||
import feed
|
||||
import search
|
||||
|
||||
database.init()
|
||||
search.init()
|
||||
|
||||
def _update_current_story(story, item):
|
||||
logging.info('Updating story: {}'.format(str(story['ref'])))
|
||||
|
||||
if story.get('url', ''):
|
||||
story['text'] = ''
|
||||
|
||||
valid = feed.update_story(story, urlref=item['urlref'])
|
||||
if valid:
|
||||
database.put_story(story)
|
||||
search.put_story(story)
|
||||
else:
|
||||
database.del_ref(item['ref'])
|
||||
logging.info('Removed ref {}'.format(item['ref']))
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) == 2:
|
||||
sid = sys.argv[1]
|
||||
else:
|
||||
print('Usage: python delete-story.py [story id]')
|
||||
exit(1)
|
||||
|
||||
item = database.get_ref_by_sid(sid)
|
||||
|
||||
if item:
|
||||
story = database.get_story(item['sid']).data
|
||||
if story:
|
||||
print('Updating story:')
|
||||
_update_current_story(story, item)
|
||||
else:
|
||||
print('Story not found. Exiting.')
|
||||
else:
|
||||
print('Story not found. Exiting.')
|
|
@ -9,7 +9,7 @@ import string
|
|||
from bleach.sanitizer import Cleaner
|
||||
|
||||
def gen_rand_id():
|
||||
return ''.join(random.choice(string.ascii_uppercase) for _ in range(4))
|
||||
return ''.join(random.choice(string.ascii_uppercase) for _ in range(5))
|
||||
|
||||
def render_md(md):
|
||||
if md:
|
||||
|
|
1
readerserver
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit 507ac40695f61c4d0160f38ee0a02539c141ecc8
|
92
readerserver/.gitignore
vendored
|
@ -1,92 +0,0 @@
|
|||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# Editor
|
||||
*.swp
|
||||
*.swo
|
|
@ -1,53 +0,0 @@
|
|||
const express = require('express');
|
||||
const app = express();
|
||||
const port = 33843;
|
||||
|
||||
const request = require('request');
|
||||
const JSDOM = require('jsdom').JSDOM;
|
||||
const { Readability } = require('readability');
|
||||
|
||||
app.use(express.urlencoded({ extended: true }));
|
||||
|
||||
app.get('/', (req, res) => {
|
||||
res.send('<form method="POST" accept-charset="UTF-8"><input name="url"><button type="submit">SUBMIT</button></form>');
|
||||
});
|
||||
|
||||
const requestCallback = (url, res) => (error, response, body) => {
|
||||
if (!error && response.statusCode == 200) {
|
||||
console.log('Response OK.');
|
||||
|
||||
const doc = new JSDOM(body, {url: url});
|
||||
const reader = new Readability(doc.window.document);
|
||||
const article = reader.parse();
|
||||
|
||||
if (article && article.content) {
|
||||
res.send(article.content);
|
||||
} else {
|
||||
res.sendStatus(404);
|
||||
}
|
||||
} else {
|
||||
console.log('Response error:', error ? error.toString() : response.statusCode);
|
||||
res.sendStatus(response ? response.statusCode : 404);
|
||||
}
|
||||
};
|
||||
|
||||
app.post('/', (req, res) => {
|
||||
const url = req.body.url;
|
||||
const requestOptions = {
|
||||
url: url,
|
||||
//headers: {'User-Agent': 'Googlebot/2.1 (+http://www.google.com/bot.html)'},
|
||||
//headers: {'User-Agent': 'Twitterbot/1.0'},
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:77.0) Gecko/20100101 Firefox/77.0',
|
||||
'X-Forwarded-For': '66.249.66.1',
|
||||
},
|
||||
};
|
||||
|
||||
console.log('Parse request for:', url);
|
||||
|
||||
request(requestOptions, requestCallback(url, res));
|
||||
});
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`Example app listening on port ${port}!`);
|
||||
});
|
|
@ -1,13 +0,0 @@
|
|||
{
|
||||
"name": "readerserver",
|
||||
"version": "1.0.0",
|
||||
"main": "main.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dompurify": "^1.0.11",
|
||||
"express": "^4.17.1",
|
||||
"jsdom": "^15.1.1",
|
||||
"readability": "https://github.com/mozilla/readability",
|
||||
"request": "^2.88.0"
|
||||
}
|
||||
}
|
5
webapp/.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
.DS_Store
|
||||
/node_modules/
|
||||
/src/node_modules/@sapper/
|
||||
yarn-error.log
|
||||
/__sapper__/
|
152
webapp/README.md
Normal file
|
@ -0,0 +1,152 @@
|
|||
# sapper-template
|
||||
|
||||
The default template for setting up a [Sapper](https://github.com/sveltejs/sapper) project. Can use either Rollup or webpack as bundler.
|
||||
|
||||
|
||||
## Getting started
|
||||
|
||||
|
||||
### Using `degit`
|
||||
|
||||
To create a new Sapper project based on Rollup locally, run
|
||||
|
||||
```bash
|
||||
npx degit "sveltejs/sapper-template#rollup" my-app
|
||||
```
|
||||
|
||||
For a webpack-based project, instead run
|
||||
|
||||
```bash
|
||||
npx degit "sveltejs/sapper-template#webpack" my-app
|
||||
```
|
||||
|
||||
[`degit`](https://github.com/Rich-Harris/degit) is a scaffolding tool that lets you create a directory from a branch in a repository.
|
||||
|
||||
Replace `my-app` with the path where you wish to create the project.
|
||||
|
||||
|
||||
### Using GitHub templates
|
||||
|
||||
Alternatively, you can create the new project as a GitHub repository using GitHub's template feature.
|
||||
|
||||
Go to either [sapper-template-rollup](https://github.com/sveltejs/sapper-template-rollup) or [sapper-template-webpack](https://github.com/sveltejs/sapper-template-webpack) and click on "Use this template" to create a new project repository initialized by the template.
|
||||
|
||||
|
||||
### Running the project
|
||||
|
||||
Once you have created the project, install dependencies and run the project in development mode:
|
||||
|
||||
```bash
|
||||
cd my-app
|
||||
npm install # or yarn
|
||||
npm run dev
|
||||
```
|
||||
|
||||
This will start the development server on [localhost:3000](http://localhost:3000). Open it and click around.
|
||||
|
||||
You now have a fully functional Sapper project! To get started developing, consult [sapper.svelte.dev](https://sapper.svelte.dev).
|
||||
|
||||
### Using TypeScript
|
||||
|
||||
By default, the template uses plain JavaScript. If you wish to use TypeScript instead, you need some changes to the project:
|
||||
|
||||
* Add `typescript` as well as typings as dependences in `package.json`
|
||||
* Configure the bundler to use [`svelte-preprocess`](https://github.com/sveltejs/svelte-preprocess) and transpile the TypeScript code.
|
||||
* Add a `tsconfig.json` file
|
||||
* Update the project code to TypeScript
|
||||
|
||||
The template comes with a script that will perform these changes for you by running
|
||||
|
||||
```bash
|
||||
node scripts/setupTypeScript.js
|
||||
```
|
||||
|
||||
`@sapper` dependencies are resolved through `src/node_modules/@sapper`, which is created during the build. You therefore need to run or build the project once to avoid warnings about missing dependencies.
|
||||
|
||||
The script does not support webpack at the moment.
|
||||
|
||||
## Directory structure
|
||||
|
||||
Sapper expects to find two directories in the root of your project — `src` and `static`.
|
||||
|
||||
|
||||
### src
|
||||
|
||||
The [src](src) directory contains the entry points for your app — `client.js`, `server.js` and (optionally) a `service-worker.js` — along with a `template.html` file and a `routes` directory.
|
||||
|
||||
|
||||
#### src/routes
|
||||
|
||||
This is the heart of your Sapper app. There are two kinds of routes — *pages*, and *server routes*.
|
||||
|
||||
**Pages** are Svelte components written in `.svelte` files. When a user first visits the application, they will be served a server-rendered version of the route in question, plus some JavaScript that 'hydrates' the page and initialises a client-side router. From that point forward, navigating to other pages is handled entirely on the client for a fast, app-like feel. (Sapper will preload and cache the code for these subsequent pages, so that navigation is instantaneous.)
|
||||
|
||||
**Server routes** are modules written in `.js` files, that export functions corresponding to HTTP methods. Each function receives Express `request` and `response` objects as arguments, plus a `next` function. This is useful for creating a JSON API, for example.
|
||||
|
||||
There are three simple rules for naming the files that define your routes:
|
||||
|
||||
* A file called `src/routes/about.svelte` corresponds to the `/about` route. A file called `src/routes/blog/[slug].svelte` corresponds to the `/blog/:slug` route, in which case `params.slug` is available to the route
|
||||
* The file `src/routes/index.svelte` (or `src/routes/index.js`) corresponds to the root of your app. `src/routes/about/index.svelte` is treated the same as `src/routes/about.svelte`.
|
||||
* Files and directories with a leading underscore do *not* create routes. This allows you to colocate helper modules and components with the routes that depend on them — for example you could have a file called `src/routes/_helpers/datetime.js` and it would *not* create a `/_helpers/datetime` route.
|
||||
|
||||
|
||||
#### src/node_modules/images
|
||||
|
||||
Images added to `src/node_modules/images` can be imported into your code using `import 'images/<filename>'`. They will be given a dynamically generated filename containing a hash, allowing for efficient caching and serving the images on a CDN.
|
||||
|
||||
See [`index.svelte`](src/routes/index.svelte) for an example.
|
||||
|
||||
|
||||
#### src/node_modules/@sapper
|
||||
|
||||
This directory is managed by Sapper and generated when building. It contains all the code you import from `@sapper` modules.
|
||||
|
||||
|
||||
### static
|
||||
|
||||
The [static](static) directory contains static assets that should be served publicly. Files in this directory will be available directly under the root URL, e.g. an `image.jpg` will be available as `/image.jpg`.
|
||||
|
||||
The default [service-worker.js](src/service-worker.js) will preload and cache these files, by retrieving a list of `files` from the generated manifest:
|
||||
|
||||
```js
|
||||
import { files } from '@sapper/service-worker';
|
||||
```
|
||||
|
||||
If you have static files you do not want to cache, you should exclude them from this list after importing it (and before passing it to `cache.addAll`).
|
||||
|
||||
Static files are served using [sirv](https://github.com/lukeed/sirv).
|
||||
|
||||
|
||||
## Bundler configuration
|
||||
|
||||
Sapper uses Rollup or webpack to provide code-splitting and dynamic imports, as well as compiling your Svelte components. With webpack, it also provides hot module reloading. As long as you don't do anything daft, you can edit the configuration files to add whatever plugins you'd like.
|
||||
|
||||
|
||||
## Production mode and deployment
|
||||
|
||||
To start a production version of your app, run `npm run build && npm start`. This will disable live reloading, and activate the appropriate bundler plugins.
|
||||
|
||||
You can deploy your application to any environment that supports Node 10 or above. As an example, to deploy to [Vercel Now](https://vercel.com) when using `sapper export`, run these commands:
|
||||
|
||||
```bash
|
||||
npm install -g vercel
|
||||
vercel
|
||||
```
|
||||
|
||||
If your app can't be exported to a static site, you can use the [now-sapper](https://github.com/thgh/now-sapper) builder. You can find instructions on how to do so in its [README](https://github.com/thgh/now-sapper#basic-usage).
|
||||
|
||||
|
||||
## Using external components
|
||||
|
||||
When using Svelte components installed from npm, such as [@sveltejs/svelte-virtual-list](https://github.com/sveltejs/svelte-virtual-list), Svelte needs the original component source (rather than any precompiled JavaScript that ships with the component). This allows the component to be rendered server-side, and also keeps your client-side app smaller.
|
||||
|
||||
Because of that, it's essential that the bundler doesn't treat the package as an *external dependency*. You can either modify the `external` option under `server` in [rollup.config.js](rollup.config.js) or the `externals` option in [webpack.config.js](webpack.config.js), or simply install the package to `devDependencies` rather than `dependencies`, which will cause it to get bundled (and therefore compiled) with your app:
|
||||
|
||||
```bash
|
||||
npm install -D @sveltejs/svelte-virtual-list
|
||||
```
|
||||
|
||||
|
||||
## Bugs and feedback
|
||||
|
||||
Sapper is in early development, and may have the odd rough edge here and there. Please be vocal over on the [Sapper issue tracker](https://github.com/sveltejs/sapper/issues).
|
33
webapp/package.json
Normal file
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"name": "TODO",
|
||||
"description": "TODO",
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"dev": "sapper dev",
|
||||
"build": "sapper build",
|
||||
"export": "sapper export",
|
||||
"start": "node __sapper__/build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@polka/redirect": "^1.0.0-next.0",
|
||||
"body-parser": "^1.19.0",
|
||||
"compression": "^1.7.1",
|
||||
"date-fns": "^2.16.1",
|
||||
"dompurify": "^2.2.2",
|
||||
"form-data": "^3.0.0",
|
||||
"isomorphic-fetch": "^3.0.0",
|
||||
"jsdom": "^16.4.0",
|
||||
"lodash": "^4.17.20",
|
||||
"node-fetch": "^2.6.1",
|
||||
"polka": "next",
|
||||
"sirv": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"file-loader": "^6.0.0",
|
||||
"sapper": "^0.28.0",
|
||||
"svelte": "^3.17.3",
|
||||
"svelte-loader": "^2.9.0",
|
||||
"webpack": "^4.7.0",
|
||||
"webpack-modules": "^1.0.0"
|
||||
}
|
||||
}
|
307
webapp/scripts/setupTypeScript.js
Normal file
|
@ -0,0 +1,307 @@
|
|||
/**
|
||||
* Run this script to convert the project to TypeScript. This is only guaranteed to work
|
||||
* on the unmodified default template; if you have done code changes you are likely need
|
||||
* to touch up the generated project manually.
|
||||
*/
|
||||
|
||||
// @ts-check
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { argv } = require('process');
|
||||
|
||||
const projectRoot = argv[2] || path.join(__dirname, '..');
|
||||
|
||||
const isRollup = fs.existsSync(path.join(projectRoot, "rollup.config.js"));
|
||||
|
||||
function warn(message) {
|
||||
console.warn('Warning: ' + message);
|
||||
}
|
||||
|
||||
function replaceInFile(fileName, replacements) {
|
||||
if (fs.existsSync(fileName)) {
|
||||
let contents = fs.readFileSync(fileName, 'utf8');
|
||||
let hadUpdates = false;
|
||||
|
||||
replacements.forEach(([from, to]) => {
|
||||
const newContents = contents.replace(from, to);
|
||||
|
||||
const isAlreadyApplied = typeof to !== 'string' || contents.includes(to);
|
||||
|
||||
if (newContents !== contents) {
|
||||
contents = newContents;
|
||||
hadUpdates = true;
|
||||
} else if (!isAlreadyApplied) {
|
||||
warn(`Wanted to update "${from}" in ${fileName}, but did not find it.`);
|
||||
}
|
||||
});
|
||||
|
||||
if (hadUpdates) {
|
||||
fs.writeFileSync(fileName, contents);
|
||||
} else {
|
||||
console.log(`${fileName} had already been updated.`);
|
||||
}
|
||||
} else {
|
||||
warn(`Wanted to update ${fileName} but the file did not exist.`);
|
||||
}
|
||||
}
|
||||
|
||||
function createFile(fileName, contents) {
|
||||
if (fs.existsSync(fileName)) {
|
||||
warn(`Wanted to create ${fileName}, but it already existed. Leaving existing file.`);
|
||||
} else {
|
||||
fs.writeFileSync(fileName, contents);
|
||||
}
|
||||
}
|
||||
|
||||
function addDepsToPackageJson() {
|
||||
const pkgJSONPath = path.join(projectRoot, 'package.json');
|
||||
const packageJSON = JSON.parse(fs.readFileSync(pkgJSONPath, 'utf8'));
|
||||
packageJSON.devDependencies = Object.assign(packageJSON.devDependencies, {
|
||||
...(isRollup ? { '@rollup/plugin-typescript': '^6.0.0' } : { 'ts-loader': '^8.0.4' }),
|
||||
'@tsconfig/svelte': '^1.0.10',
|
||||
'@types/compression': '^1.7.0',
|
||||
'@types/node': '^14.11.1',
|
||||
'@types/polka': '^0.5.1',
|
||||
'svelte-check': '^1.0.46',
|
||||
'svelte-preprocess': '^4.3.0',
|
||||
tslib: '^2.0.1',
|
||||
typescript: '^4.0.3'
|
||||
});
|
||||
|
||||
// Add script for checking
|
||||
packageJSON.scripts = Object.assign(packageJSON.scripts, {
|
||||
validate: 'svelte-check --ignore src/node_modules/@sapper'
|
||||
});
|
||||
|
||||
// Write the package JSON
|
||||
fs.writeFileSync(pkgJSONPath, JSON.stringify(packageJSON, null, ' '));
|
||||
}
|
||||
|
||||
function changeJsExtensionToTs(dir) {
|
||||
const elements = fs.readdirSync(dir, { withFileTypes: true });
|
||||
|
||||
for (let i = 0; i < elements.length; i++) {
|
||||
if (elements[i].isDirectory()) {
|
||||
changeJsExtensionToTs(path.join(dir, elements[i].name));
|
||||
} else if (elements[i].name.match(/^[^_]((?!json).)*js$/)) {
|
||||
fs.renameSync(path.join(dir, elements[i].name), path.join(dir, elements[i].name).replace('.js', '.ts'));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateSingleSvelteFile({ view, vars, contextModule }) {
|
||||
replaceInFile(path.join(projectRoot, 'src', `${view}.svelte`), [
|
||||
[/(?:<script)(( .*?)*?)>/gm, (m, attrs) => `<script${attrs}${!attrs.includes('lang="ts"') ? ' lang="ts"' : ''}>`],
|
||||
...(vars ? vars.map(({ name, type }) => [`export let ${name};`, `export let ${name}: ${type};`]) : []),
|
||||
...(contextModule ? contextModule.map(({ js, ts }) => [js, ts]) : [])
|
||||
]);
|
||||
}
|
||||
|
||||
// Switch the *.svelte file to use TS
|
||||
function updateSvelteFiles() {
|
||||
[
|
||||
{
|
||||
view: 'components/Nav',
|
||||
vars: [{ name: 'segment', type: 'string' }]
|
||||
},
|
||||
{
|
||||
view: 'routes/_layout',
|
||||
vars: [{ name: 'segment', type: 'string' }]
|
||||
},
|
||||
{
|
||||
view: 'routes/_error',
|
||||
vars: [
|
||||
{ name: 'status', type: 'number' },
|
||||
{ name: 'error', type: 'Error' }
|
||||
]
|
||||
},
|
||||
{
|
||||
view: 'routes/blog/index',
|
||||
vars: [{ name: 'posts', type: '{ slug: string; title: string, html: any }[]' }],
|
||||
contextModule: [
|
||||
{
|
||||
js: '.then(r => r.json())',
|
||||
ts: '.then((r: { json: () => any; }) => r.json())'
|
||||
},
|
||||
{
|
||||
js: '.then(posts => {',
|
||||
ts: '.then((posts: { slug: string; title: string, html: any }[]) => {'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
view: 'routes/blog/[slug]',
|
||||
vars: [{ name: 'post', type: '{ slug: string; title: string, html: any }' }]
|
||||
}
|
||||
].forEach(updateSingleSvelteFile);
|
||||
}
|
||||
|
||||
function updateRollupConfig() {
|
||||
// Edit rollup config
|
||||
replaceInFile(path.join(projectRoot, 'rollup.config.js'), [
|
||||
// Edit imports
|
||||
[
|
||||
/'rollup-plugin-terser';\n(?!import sveltePreprocess)/,
|
||||
`'rollup-plugin-terser';
|
||||
import sveltePreprocess from 'svelte-preprocess';
|
||||
import typescript from '@rollup/plugin-typescript';
|
||||
`
|
||||
],
|
||||
// Edit inputs
|
||||
[
|
||||
/(?<!THIS_IS_UNDEFINED[^\n]*\n\s*)onwarn\(warning\);/,
|
||||
`(warning.code === 'THIS_IS_UNDEFINED') ||\n\tonwarn(warning);`
|
||||
],
|
||||
[/input: config.client.input\(\)(?!\.replace)/, `input: config.client.input().replace(/\\.js$/, '.ts')`],
|
||||
[
|
||||
/input: config.server.input\(\)(?!\.replace)/,
|
||||
`input: { server: config.server.input().server.replace(/\\.js$/, ".ts") }`
|
||||
],
|
||||
[
|
||||
/input: config.serviceworker.input\(\)(?!\.replace)/,
|
||||
`input: config.serviceworker.input().replace(/\\.js$/, '.ts')`
|
||||
],
|
||||
// Add preprocess to the svelte config, this is tricky because there's no easy signifier.
|
||||
// Instead we look for 'hydratable: true,'
|
||||
[/hydratable: true(?!,\n\s*preprocess)/g, 'hydratable: true,\n\t\t\t\tpreprocess: sveltePreprocess()'],
|
||||
// Add TypeScript
|
||||
[/commonjs\(\)(?!,\n\s*typescript)/g, 'commonjs(),\n\t\t\ttypescript({ sourceMap: dev })']
|
||||
]);
|
||||
}
|
||||
|
||||
function updateWebpackConfig() {
|
||||
// Edit webpack config
|
||||
replaceInFile(path.join(projectRoot, 'webpack.config.js'), [
|
||||
// Edit imports
|
||||
[
|
||||
/require\('webpack-modules'\);\n(?!const sveltePreprocess)/,
|
||||
`require('webpack-modules');\nconst sveltePreprocess = require('svelte-preprocess');\n`
|
||||
],
|
||||
// Edit extensions
|
||||
[
|
||||
/\['\.mjs', '\.js', '\.json', '\.svelte', '\.html'\]/,
|
||||
`['.mjs', '.js', '.ts', '.json', '.svelte', '.html']`
|
||||
],
|
||||
// Edit entries
|
||||
[
|
||||
/entry: config\.client\.entry\(\)/,
|
||||
`entry: { main: config.client.entry().main.replace(/\\.js$/, '.ts') }`
|
||||
],
|
||||
[
|
||||
/entry: config\.server\.entry\(\)/,
|
||||
`entry: { server: config.server.entry().server.replace(/\\.js$/, '.ts') }`
|
||||
],
|
||||
[
|
||||
/entry: config\.serviceworker\.entry\(\)/,
|
||||
`entry: { 'service-worker': config.serviceworker.entry()['service-worker'].replace(/\\.js$/, '.ts') }`
|
||||
],
|
||||
// Add preprocess to the svelte config, this is tricky because there's no easy signifier.
|
||||
// Instead we look for 'hydratable: true,'
|
||||
[
|
||||
/hydratable: true(?!,\n\s*preprocess)/g,
|
||||
'hydratable: true,\n\t\t\t\t\t\t\tpreprocess: sveltePreprocess()'
|
||||
],
|
||||
// Add TypeScript rules for client and server
|
||||
[
|
||||
/module: {\n\s*rules: \[\n\s*(?!{\n\s*test: \/\\\.ts\$\/)/g,
|
||||
`module: {\n\t\t\trules: [\n\t\t\t\t{\n\t\t\t\t\ttest: /\\.ts$/,\n\t\t\t\t\tloader: 'ts-loader'\n\t\t\t\t},\n\t\t\t\t`
|
||||
],
|
||||
// Add TypeScript rules for serviceworker
|
||||
[
|
||||
/output: config\.serviceworker\.output\(\),\n\s*(?!module)/,
|
||||
`output: config.serviceworker.output(),\n\t\tmodule: {\n\t\t\trules: [\n\t\t\t\t{\n\t\t\t\t\ttest: /\\.ts$/,\n\t\t\t\t\tloader: 'ts-loader'\n\t\t\t\t}\n\t\t\t]\n\t\t},\n\t\t`
|
||||
],
|
||||
// Edit outputs
|
||||
[
|
||||
/output: config\.serviceworker\.output\(\),\n\s*(?!resolve)/,
|
||||
`output: config.serviceworker.output(),\n\t\tresolve: { extensions: ['.mjs', '.js', '.ts', '.json'] },\n\t\t`
|
||||
]
|
||||
]);
|
||||
}
|
||||
|
||||
function updateServiceWorker() {
|
||||
replaceInFile(path.join(projectRoot, 'src', 'service-worker.ts'), [
|
||||
[`shell.concat(files);`, `(shell as string[]).concat(files as string[]);`],
|
||||
[`self.skipWaiting();`, `((self as any) as ServiceWorkerGlobalScope).skipWaiting();`],
|
||||
[`self.clients.claim();`, `((self as any) as ServiceWorkerGlobalScope).clients.claim();`],
|
||||
[`fetchAndCache(request)`, `fetchAndCache(request: Request)`],
|
||||
[`self.addEventListener('activate', event =>`, `self.addEventListener('activate', (event: ExtendableEvent) =>`],
|
||||
[`self.addEventListener('install', event =>`, `self.addEventListener('install', (event: ExtendableEvent) =>`],
|
||||
[`addEventListener('fetch', event =>`, `addEventListener('fetch', (event: FetchEvent) =>`],
|
||||
]);
|
||||
}
|
||||
|
||||
function createTsConfig() {
|
||||
const tsconfig = `{
|
||||
"extends": "@tsconfig/svelte/tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "ES2017", "WebWorker"]
|
||||
},
|
||||
"include": ["src/**/*", "src/node_modules/**/*"],
|
||||
"exclude": ["node_modules/*", "__sapper__/*", "static/*"]
|
||||
}`;
|
||||
|
||||
createFile(path.join(projectRoot, 'tsconfig.json'), tsconfig);
|
||||
}
|
||||
|
||||
// Adds the extension recommendation
|
||||
function configureVsCode() {
|
||||
const dir = path.join(projectRoot, '.vscode');
|
||||
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
|
||||
createFile(path.join(projectRoot, '.vscode', 'extensions.json'), `{"recommendations": ["svelte.svelte-vscode"]}`);
|
||||
}
|
||||
|
||||
function deleteThisScript() {
|
||||
fs.unlinkSync(path.join(__filename));
|
||||
|
||||
// Check for Mac's DS_store file, and if it's the only one left remove it
|
||||
const remainingFiles = fs.readdirSync(path.join(__dirname));
|
||||
if (remainingFiles.length === 1 && remainingFiles[0] === '.DS_store') {
|
||||
fs.unlinkSync(path.join(__dirname, '.DS_store'));
|
||||
}
|
||||
|
||||
// Check if the scripts folder is empty
|
||||
if (fs.readdirSync(path.join(__dirname)).length === 0) {
|
||||
// Remove the scripts folder
|
||||
fs.rmdirSync(path.join(__dirname));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Adding TypeScript with ${isRollup ? "Rollup" : "webpack" }...`);
|
||||
|
||||
addDepsToPackageJson();
|
||||
|
||||
changeJsExtensionToTs(path.join(projectRoot, 'src'));
|
||||
|
||||
updateSvelteFiles();
|
||||
|
||||
if (isRollup) {
|
||||
updateRollupConfig();
|
||||
} else {
|
||||
updateWebpackConfig();
|
||||
}
|
||||
|
||||
updateServiceWorker();
|
||||
|
||||
createTsConfig();
|
||||
|
||||
configureVsCode();
|
||||
|
||||
// Delete this script, but not during testing
|
||||
if (!argv[2]) {
|
||||
deleteThisScript();
|
||||
}
|
||||
|
||||
console.log('Converted to TypeScript.');
|
||||
|
||||
if (fs.existsSync(path.join(projectRoot, 'node_modules'))) {
|
||||
console.log(`
|
||||
Next:
|
||||
1. run 'npm install' again to install TypeScript dependencies
|
||||
2. run 'npm run build' for the @sapper imports in your project to work
|
||||
`);
|
||||
}
|
39
webapp/src/ambient.d.ts
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* These declarations tell TypeScript that we allow import of images, e.g.
|
||||
* ```
|
||||
<script lang='ts'>
|
||||
import successkid from 'images/successkid.jpg';
|
||||
</script>
|
||||
|
||||
<img src="{successkid}">
|
||||
```
|
||||
*/
|
||||
declare module "*.gif" {
|
||||
const value: string;
|
||||
export = value;
|
||||
}
|
||||
|
||||
declare module "*.jpg" {
|
||||
const value: string;
|
||||
export = value;
|
||||
}
|
||||
|
||||
declare module "*.jpeg" {
|
||||
const value: string;
|
||||
export = value;
|
||||
}
|
||||
|
||||
declare module "*.png" {
|
||||
const value: string;
|
||||
export = value;
|
||||
}
|
||||
|
||||
declare module "*.svg" {
|
||||
const value: string;
|
||||
export = value;
|
||||
}
|
||||
|
||||
declare module "*.webp" {
|
||||
const value: string;
|
||||
export = value;
|
||||
}
|
5
webapp/src/client.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
import * as sapper from '@sapper/app';
|
||||
|
||||
sapper.start({
|
||||
target: document.querySelector('#sapper')
|
||||
});
|
81
webapp/src/components/Article.svelte
Normal file
|
@ -0,0 +1,81 @@
|
|||
<script>
|
||||
import StoryInfo from "../components/StoryInfo.svelte";
|
||||
import StoryMeta from "../components/StoryMeta.svelte";
|
||||
|
||||
export let story;
|
||||
</script>
|
||||
|
||||
<style>
|
||||
@import url(/fonts/Fonts.css);
|
||||
.article :global(h1),
|
||||
.article :global(h2),
|
||||
.article :global(h3),
|
||||
.article :global(h4),
|
||||
.article :global(h5),
|
||||
.article :global(h6) {
|
||||
margin: 0 0 0.5em 0;
|
||||
font-weight: 400;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.article :global(h1) {
|
||||
font-size: 2rem;
|
||||
}
|
||||
|
||||
@media only screen and (min-device-width: 320px) and (max-device-width: 480px) {
|
||||
.article :global(h1) {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
}
|
||||
.article-title {
|
||||
text-align: left;
|
||||
}
|
||||
.article-header {
|
||||
padding: 0 0 1rem;
|
||||
}
|
||||
.article-body {
|
||||
max-width: 45rem;
|
||||
margin: 0 auto;
|
||||
font: 1.2rem/1.5 "Apparatus SIL", sans-serif;
|
||||
text-rendering: optimizeLegibility;
|
||||
}
|
||||
.article-body :global(figure) {
|
||||
margin: 0;
|
||||
}
|
||||
.article-body :global(figcaption p),
|
||||
.article-body :global(figcaption) {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.article-body :global(figcaption) {
|
||||
font-style: italic;
|
||||
margin: 0 1rem;
|
||||
font-size: 0.9em;
|
||||
text-align: justify;
|
||||
}
|
||||
.article-body :global(figure),
|
||||
.article-body :global(video),
|
||||
.article-body :global(img) {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
</style>
|
||||
|
||||
<article class="article">
|
||||
<header class="article-header">
|
||||
<h1 class="article-title">
|
||||
{@html story.title}
|
||||
</h1>
|
||||
|
||||
<section class="article-info">
|
||||
<StoryInfo {story} />
|
||||
</section>
|
||||
<aside class="article-info">
|
||||
<StoryMeta {story} />
|
||||
</aside>
|
||||
</header>
|
||||
|
||||
<section class="article-body">
|
||||
{@html story.text}
|
||||
</section>
|
||||
</article>
|
106
webapp/src/components/Comment.svelte
Normal file
|
@ -0,0 +1,106 @@
|
|||
<script>
|
||||
import Time from "../components/Time.svelte";
|
||||
|
||||
export let story;
|
||||
export let comment;
|
||||
export let showComments = true;
|
||||
|
||||
let author = (comment.author || "").replace(" ", "");
|
||||
let id = `${author}-${comment.date}`;
|
||||
|
||||
function toggleComments() {
|
||||
showComments = !showComments;
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.comment {
|
||||
margin: 0.5rem 0;
|
||||
}
|
||||
.comment:not(:first-of-type) {
|
||||
margin: 0.5rem 0;
|
||||
border-top: solid 1px #ddd;
|
||||
padding: 0.5rem 0 0;
|
||||
}
|
||||
.comment-info {
|
||||
color: #222;
|
||||
}
|
||||
.comment-author {
|
||||
font-weight: 600;
|
||||
padding: 0 0.4em 0.2em;
|
||||
border-radius: 0.5em;
|
||||
background: #f1f1f1;
|
||||
color: #000;
|
||||
}
|
||||
.comment-author.is-op {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
.comment-text {
|
||||
padding: 0 0.5rem;
|
||||
color: #000;
|
||||
}
|
||||
.comment-text.is-collapsed {
|
||||
height: 3rem;
|
||||
overflow: hidden;
|
||||
color: #888;
|
||||
}
|
||||
.comment-children {
|
||||
margin-left: 0.5rem;
|
||||
padding-left: 0.5rem;
|
||||
border-left: solid 1px #000;
|
||||
}
|
||||
.toggle-children {
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0 0.25rem;
|
||||
color: inherit;
|
||||
cursor: pointer;
|
||||
}
|
||||
.time-link {
|
||||
text-decoration: none;
|
||||
}
|
||||
.time-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
.is-lighter {
|
||||
color: #888;
|
||||
}
|
||||
</style>
|
||||
|
||||
<article class="comment" id="comment-{id}">
|
||||
<header class="comment-info">
|
||||
<span
|
||||
class={comment.author === story.author ? 'comment-author is-op' : 'comment-author'}>{comment.author || '[Deleted]'}</span>
|
||||
<a class="time-link" href="{story.id}#comment-{id}">
|
||||
<Time date={comment.date} />
|
||||
</a>
|
||||
{#if comment.comments.length}
|
||||
<button
|
||||
class="toggle-children"
|
||||
on:click={toggleComments}>{#if showComments}
|
||||
[–]
|
||||
{:else}[+]{/if}</button>
|
||||
{/if}
|
||||
</header>
|
||||
|
||||
<section class={showComments ? 'comment-text' : 'comment-text is-collapsed'}>
|
||||
{@html comment.text}
|
||||
</section>
|
||||
|
||||
{#if !showComments}
|
||||
<div class="comment-children">
|
||||
<button
|
||||
class="toggle-children is-lighter"
|
||||
on:click={toggleComments}>[expand]</button>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if showComments && comment.comments.length}
|
||||
<footer class="comment-children">
|
||||
{#each comment.comments as child}
|
||||
<svelte:self {story} comment={child} />
|
||||
{/each}
|
||||
</footer>
|
||||
{/if}
|
||||
</article>
|
16
webapp/src/components/Html.svelte
Normal file
|
@ -0,0 +1,16 @@
|
|||
<script>
|
||||
import DOMPurify from "dompurify";
|
||||
import { onMount } from "svelte";
|
||||
|
||||
export let html;
|
||||
export let text;
|
||||
let purify;
|
||||
|
||||
onMount(() => {
|
||||
purify = (html) => DOMPurify.sanitize(html);
|
||||
});
|
||||
</script>
|
||||
|
||||
{#if purify}
|
||||
{@html html}
|
||||
{:else if text}{text}{/if}
|
156
webapp/src/components/Nav.svelte
Normal file
|
@ -0,0 +1,156 @@
|
|||
<script>
|
||||
import debounce from "lodash/debounce";
|
||||
import { goto, prefetch, stores } from "@sapper/app";
|
||||
export let segment;
|
||||
|
||||
const { page } = stores();
|
||||
|
||||
let search;
|
||||
let isSearching;
|
||||
|
||||
let __handleSearch = debounce(_handleSearch, 300, {
|
||||
trailing: true,
|
||||
leading: false,
|
||||
});
|
||||
let handleSearch = (e) => {
|
||||
isSearching = true;
|
||||
__handleSearch(e);
|
||||
};
|
||||
|
||||
page.subscribe((page) => {
|
||||
setTimeout(() => {
|
||||
if (segment === "search") {
|
||||
search && search.focus();
|
||||
}
|
||||
}, 0);
|
||||
});
|
||||
|
||||
async function _handleSearch(event) {
|
||||
const url = `/search?q=${event.target.value}`;
|
||||
await prefetch(url);
|
||||
await goto(url);
|
||||
isSearching = false;
|
||||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
[aria-current] {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
[aria-current]::after {
|
||||
position: absolute;
|
||||
content: "";
|
||||
width: calc(100% - 1em);
|
||||
height: 2px;
|
||||
background-color: rgb(255, 62, 0);
|
||||
display: block;
|
||||
bottom: -1px;
|
||||
}
|
||||
|
||||
.navigation {
|
||||
border-bottom: 1px solid rgba(255, 62, 0, 0.1);
|
||||
font-weight: 300;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.navigation-container {
|
||||
margin: 0 auto;
|
||||
padding: 0;
|
||||
max-width: 64rem;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
/* @media (max-device-width: 480px) {
|
||||
.navigation-container {
|
||||
justify-content: space-evenly;
|
||||
}
|
||||
} */
|
||||
.navigation-container > * {
|
||||
vertical-align: middle;
|
||||
}
|
||||
.navigation-list {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
.navigation-item {
|
||||
list-style: none;
|
||||
}
|
||||
.navigation-link {
|
||||
text-decoration: none;
|
||||
padding: 1em 0.5em;
|
||||
display: block;
|
||||
}
|
||||
.navigation-input {
|
||||
line-height: 2;
|
||||
vertical-align: middle;
|
||||
width: 30rem;
|
||||
max-width: 45vw;
|
||||
font-size: 1.1rem;
|
||||
padding: 0.25em 0.5em;
|
||||
margin: 0.25em 0.5em;
|
||||
border-radius: 5px;
|
||||
border: solid 1px #aaa;
|
||||
}
|
||||
input:focus {
|
||||
box-shadow: 0 0 0.25rem rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
.is-searching {
|
||||
padding-right: 0.5rem;
|
||||
background-image: url(/svg-loaders/black/grid.svg);
|
||||
background-size: 1.2em 1.2em;
|
||||
background-position: right 0.5em center;
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
</style>
|
||||
|
||||
<svelte:head>
|
||||
<link rel="preload" href="/svg-loaders/black/grid.svg" as="image" />
|
||||
</svelte:head>
|
||||
|
||||
<nav class="navigation">
|
||||
<div class="navigation-container">
|
||||
<ul class="navigation-list" role="menu">
|
||||
<li class="navigation-item">
|
||||
<a
|
||||
class="navigation-link"
|
||||
aria-current={segment === undefined ? 'page' : undefined}
|
||||
rel="prefetch"
|
||||
href=".">
|
||||
{#if [undefined, 'submit'].includes(segment)}
|
||||
Qot. news
|
||||
{:else}← News feed{/if}
|
||||
</a>
|
||||
</li>
|
||||
{#if [undefined, 'submit'].includes(segment)}
|
||||
<li class="navigation-item">
|
||||
<a
|
||||
class="navigation-link"
|
||||
aria-current={segment === 'submit' ? 'page' : undefined}
|
||||
rel="prefetch"
|
||||
href="/submit">
|
||||
Submit
|
||||
</a>
|
||||
</li>
|
||||
{/if}
|
||||
</ul>
|
||||
<form action="/search" method="GET" rel="prefetch" role="search">
|
||||
<input
|
||||
class="navigation-input {(isSearching && 'is-searching') || ''}"
|
||||
id="search"
|
||||
bind:this={search}
|
||||
type="text"
|
||||
name="q"
|
||||
value={$page.query.q || ''}
|
||||
placeholder="Search..."
|
||||
on:keypress={handleSearch} />
|
||||
</form>
|
||||
</div>
|
||||
</nav>
|
62
webapp/src/components/Pagination.svelte
Normal file
|
@ -0,0 +1,62 @@
|
|||
<script>
|
||||
import { stores } from "@sapper/app";
|
||||
export let href;
|
||||
export let search;
|
||||
export let count;
|
||||
|
||||
const { page } = stores();
|
||||
|
||||
let skip = 0;
|
||||
let limit = 20;
|
||||
let prevLink = "";
|
||||
let nextLink = "";
|
||||
|
||||
page.subscribe((p) => {
|
||||
count = Number(count);
|
||||
skip = Number(p.query.skip) || 0;
|
||||
limit = Number(p.query.limit) || 20;
|
||||
|
||||
let previous = new URLSearchParams(search || "");
|
||||
let next = new URLSearchParams(search || "");
|
||||
|
||||
previous.append("skip", skip - Math.min(skip, limit));
|
||||
previous.append("limit", limit);
|
||||
|
||||
next.append("skip", skip + limit);
|
||||
next.append("limit", limit);
|
||||
|
||||
prevLink = href + "?" + previous.toString();
|
||||
nextLink = href + "?" + next.toString();
|
||||
});
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.pagination {
|
||||
margin: 3rem 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.pagination-link {
|
||||
font-size: 1.5rem;
|
||||
text-decoration: none;
|
||||
}
|
||||
.pagination-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
.pagination-link.is-next {
|
||||
margin-left: auto;
|
||||
}
|
||||
</style>
|
||||
|
||||
<div class="pagination">
|
||||
{#if skip > 0}
|
||||
<a class="pagination-link is-prev" href={prevLink} rel="prefetch">←
|
||||
Previous</a>
|
||||
{/if}
|
||||
{#if count >= limit}
|
||||
<a class="pagination-link is-next" href={nextLink} rel="prefetch">Next
|
||||
→</a>
|
||||
{/if}
|
||||
</div>
|
18
webapp/src/components/StoryInfo.svelte
Normal file
|
@ -0,0 +1,18 @@
|
|||
<script>
|
||||
import Time from "../components/Time.svelte";
|
||||
export let story;
|
||||
</script>
|
||||
|
||||
<Time date={story.date} />
|
||||
{#if story.author && story.author_link}
|
||||
by
|
||||
<a class="author" href={story.author_link}>{story.author}</a>
|
||||
{:else if story.author}by <span class="author">{story.author}</span>{/if}
|
||||
on
|
||||
<a class="source" href={story.link || story.url}>{story.source}</a>
|
||||
{#if story.score}• {story.score} points{/if}
|
||||
{#if Number(story.num_comments)}
|
||||
•
|
||||
<a rel="prefetch" href="/{story.id}#comments">{story.num_comments}
|
||||
comments</a>
|
||||
{/if}
|
57
webapp/src/components/StoryList.svelte
Normal file
|
@ -0,0 +1,57 @@
|
|||
<script>
|
||||
import { getLogoUrl } from "../utils/logos.js";
|
||||
import StoryInfo from "../components/StoryInfo.svelte";
|
||||
export let stories;
|
||||
|
||||
const host = (url) => new URL(url).hostname.replace(/^www\./, "");
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.story-item {
|
||||
margin: 0.5rem 0 0;
|
||||
padding-left: 1.2em;
|
||||
}
|
||||
.story-icon,
|
||||
.story-title {
|
||||
font-size: 1.2rem;
|
||||
}
|
||||
.story-icon {
|
||||
margin-left: -1.2rem;
|
||||
}
|
||||
.story-source::before {
|
||||
content: "(";
|
||||
}
|
||||
.story-source::after {
|
||||
content: ")";
|
||||
}
|
||||
|
||||
.story-item :global(a) {
|
||||
text-decoration: none;
|
||||
}
|
||||
.story-item :global(a:hover) {
|
||||
text-decoration: underline;
|
||||
}
|
||||
</style>
|
||||
|
||||
{#each stories as story}
|
||||
<article class="story-item">
|
||||
<header class="story-header">
|
||||
<img
|
||||
src={getLogoUrl(story)}
|
||||
alt="logo"
|
||||
class="story-icon"
|
||||
style="height: 1rem; width: 1rem;" />
|
||||
<a class="story-title" rel="prefetch" href="/{story.id}">
|
||||
{@html story.title}
|
||||
</a>
|
||||
<a
|
||||
class="story-source"
|
||||
href={story.url || story.link}>{host(story.url || story.link)}</a>
|
||||
</header>
|
||||
<aside class="story-info">
|
||||
<StoryInfo {story} />
|
||||
</aside>
|
||||
</article>
|
||||
{/each}
|
||||
|
||||
<slot />
|
30
webapp/src/components/StoryMeta.svelte
Normal file
|
@ -0,0 +1,30 @@
|
|||
<script>
|
||||
export let story;
|
||||
|
||||
let host = new URL(story.url || story.link).hostname.replace(/^www\./, "");
|
||||
</script>
|
||||
|
||||
<style>
|
||||
ul {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
li {
|
||||
display: inline-block;
|
||||
list-style-type: circle;
|
||||
}
|
||||
li:not(:first-of-type)::before {
|
||||
content: " | ";
|
||||
}
|
||||
</style>
|
||||
|
||||
<ul>
|
||||
{#if story.url}
|
||||
<li>source: <a class="article-source" href={story.url}>{host}</a></li>
|
||||
{/if}
|
||||
{#if story.scraper && story.scraper_link}
|
||||
<li>scraper: <a href={story.scraper_link}>{story.scraper}</a></li>
|
||||
{:else if story.scraper}
|
||||
<li>scraper: {story.scraper}</li>
|
||||
{/if}
|
||||
</ul>
|
11
webapp/src/components/Time.svelte
Normal file
|
@ -0,0 +1,11 @@
|
|||
<script>
|
||||
import fromUnixTime from "date-fns/fromUnixTime";
|
||||
import formatDistanceToNow from "date-fns/formatDistanceToNow";
|
||||
export let date;
|
||||
let d = fromUnixTime(date);
|
||||
let datetime = d.toISOString();
|
||||
let title = d.toLocaleString();
|
||||
let dateString = formatDistanceToNow(d, { addSuffix: true });
|
||||
</script>
|
||||
|
||||
<time {datetime} {title}>{dateString}</time>
|
BIN
webapp/src/node_modules/images/successkid.jpg
generated
vendored
Normal file
After Width: | Height: | Size: 77 KiB |
17
webapp/src/routes/[id].json.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
import fetch from 'isomorphic-fetch';
|
||||
|
||||
import { purify, purifyArray } from './_purify';
|
||||
|
||||
const API_URL = process.env.API_URL || 'http://localhost:33842';
|
||||
|
||||
export async function get(req, res) {
|
||||
const response = await fetch(`${API_URL}/api/${req.params.id}`);
|
||||
res.writeHead(response.status, { 'Content-Type': response.headers.get('Content-Type') });
|
||||
if (!response.ok) {
|
||||
return res.end(await response.text());
|
||||
}
|
||||
const data = await response.json();
|
||||
data.story = purify(data.story);
|
||||
data.related = purifyArray(data.related);
|
||||
res.end(JSON.stringify(data));
|
||||
}
|
84
webapp/src/routes/[id].svelte
Normal file
|
@ -0,0 +1,84 @@
|
|||
<script context="module">
|
||||
export async function preload({ params }) {
|
||||
const res = await this.fetch(`${params.id}.json`);
|
||||
const data = await res.json();
|
||||
|
||||
if (res.status === 200) {
|
||||
return { story: data.story, related: data.related };
|
||||
} else {
|
||||
this.error(res.status, data.message);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import fromUnixTime from "date-fns/fromUnixTime";
|
||||
import Comment from "../components/Comment.svelte";
|
||||
import Article from "../components/Article.svelte";
|
||||
export let story;
|
||||
export let related;
|
||||
|
||||
let others = related.filter(
|
||||
(r) => r.id !== story.id && Number(r.num_comments)
|
||||
);
|
||||
let hasComments = related.some((r) => Number(r.num_comments));
|
||||
</script>
|
||||
|
||||
<style>
|
||||
.spacer {
|
||||
margin: 3rem 0;
|
||||
}
|
||||
.single {
|
||||
max-width: 56rem;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
||||
|
||||
<svelte:head>
|
||||
<title>{story.title}</title>
|
||||
<meta property="og:title" content={story.title} />
|
||||
<meta property="og:type" content="article" />
|
||||
<meta
|
||||
property="article:published_time"
|
||||
content={fromUnixTime(story.date).toISOString()} />
|
||||
<meta property="article:author" content={story.author || story.source} />
|
||||
<meta property="og:description" content={story.excerpt || story.title} />
|
||||
{#if story.image}
|
||||
<meta property="og:image" content={story.image} />
|
||||
{/if}
|
||||
</svelte:head>
|
||||
|
||||
<section class="single">
|
||||
<Article {story} />
|
||||
|
||||
{#if hasComments}
|
||||
<hr class="spacer" />
|
||||
|
||||
<section id="comments">
|
||||
<header>
|
||||
<h2>Comments</h2>
|
||||
|
||||
{#if others.length}
|
||||
<h3>
|
||||
Other discussions:
|
||||
{#each others as r}
|
||||
{#if r.num_comments}
|
||||
<a href="/{r.id}#comments" rel="prefetch">
|
||||
{r.source}
|
||||
({r.num_comments})
|
||||
</a>
|
||||
{/if}
|
||||
{/each}
|
||||
</h3>
|
||||
{/if}
|
||||
</header>
|
||||
{#if story.comments.length}
|
||||
<div class="comments">
|
||||
{#each story.comments as comment}
|
||||
<Comment {story} {comment} />
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</section>
|
||||
{/if}
|
||||
</section>
|
40
webapp/src/routes/_error.svelte
Normal file
|
@ -0,0 +1,40 @@
|
|||
<script>
|
||||
export let status;
|
||||
export let error;
|
||||
|
||||
const dev = process.env.NODE_ENV === 'development';
|
||||
</script>
|
||||
|
||||
<style>
|
||||
h1, p {
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2.8em;
|
||||
font-weight: 700;
|
||||
margin: 0 0 0.5em 0;
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 1em auto;
|
||||
}
|
||||
|
||||
@media (min-width: 480px) {
|
||||
h1 {
|
||||
font-size: 4em;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<svelte:head>
|
||||
<title>{status}</title>
|
||||
</svelte:head>
|
||||
|
||||
<h1>{status}</h1>
|
||||
|
||||
<p>{error.message}</p>
|
||||
|
||||
{#if dev && error.stack}
|
||||
<pre>{error.stack}</pre>
|
||||
{/if}
|
21
webapp/src/routes/_layout.svelte
Normal file
|
@ -0,0 +1,21 @@
|
|||
<script>
|
||||
import Nav from "../components/Nav.svelte";
|
||||
export let segment;
|
||||
</script>
|
||||
|
||||
<style>
|
||||
main {
|
||||
position: relative;
|
||||
max-width: 64rem;
|
||||
background-color: white;
|
||||
padding: 0.5rem;
|
||||
margin: 0 auto;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
</style>
|
||||
|
||||
<Nav {segment} />
|
||||
|
||||
<main>
|
||||
<slot {segment} />
|
||||
</main>
|
25
webapp/src/routes/_purify.js
Normal file
|
@ -0,0 +1,25 @@
|
|||
import createDOMPurify from 'dompurify';
|
||||
import { JSDOM } from 'jsdom';
|
||||
|
||||
export const purify = (story, DOMPurify) => {
|
||||
if (!DOMPurify) {
|
||||
DOMPurify = createDOMPurify(new JSDOM('').window);
|
||||
}
|
||||
if (story.title) {
|
||||
story.title = DOMPurify.sanitize(story.title);
|
||||
}
|
||||
if (story.text) {
|
||||
story.text = DOMPurify.sanitize(story.text);
|
||||
}
|
||||
return story;
|
||||
};
|
||||
|
||||
export const purifyArray = (array, DOMPurify) => {
|
||||
if (array instanceof Array) {
|
||||
if (!DOMPurify) {
|
||||
DOMPurify = createDOMPurify(new JSDOM('').window);
|
||||
}
|
||||
return array.map(story => purify(story, DOMPurify));
|
||||
}
|
||||
return array;
|
||||
};
|
20
webapp/src/routes/index.json.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
import fetch from 'isomorphic-fetch';
|
||||
|
||||
import { purifyArray } from './_purify';
|
||||
|
||||
const API_URL = process.env.API_URL || 'http://localhost:33842';
|
||||
|
||||
export async function get(req, res) {
|
||||
const { skip, limit } = {
|
||||
skip: req.query.skip || 0,
|
||||
limit: req.query.limit || 20,
|
||||
};
|
||||
const response = await fetch(`${API_URL}/api?skip=${skip}&limit=${limit}`);
|
||||
res.writeHead(response.status, { 'Content-Type': response.headers.get('Content-Type') });
|
||||
if (!response.ok) {
|
||||
return res.end(await response.text());
|
||||
}
|
||||
const data = await response.json();
|
||||
data.stories = purifyArray(data.stories);
|
||||
res.end(JSON.stringify(data));
|
||||
}
|
33
webapp/src/routes/index.svelte
Normal file
|
@ -0,0 +1,33 @@
|
|||
<script context="module">
|
||||
export async function preload(page) {
|
||||
const { skip, limit } = {
|
||||
skip: page.query.skip || 0,
|
||||
limit: page.query.limit || 20,
|
||||
};
|
||||
const res = await this.fetch(`index.json?skip=${skip}&limit=${limit}`);
|
||||
const data = await res.json();
|
||||
|
||||
if (res.status === 200) {
|
||||
return { stories: data.stories, skip, limit };
|
||||
} else {
|
||||
this.error(res.status, data.message);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import StoryList from "../components/StoryList.svelte";
|
||||
import Pagination from "../components/Pagination.svelte";
|
||||
|
||||
export let stories;
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>QotNews</title>
|
||||
<meta property="og:title" content="QotNews" />
|
||||
<meta property="og:type" content="website" />
|
||||
</svelte:head>
|
||||
|
||||
<StoryList {stories}>
|
||||
<Pagination href="/" count={stories.length} />
|
||||
</StoryList>
|
20
webapp/src/routes/search.json.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
import fetch from 'isomorphic-fetch';
|
||||
|
||||
import { purifyArray } from './_purify';
|
||||
|
||||
const API_URL = process.env.API_URL || 'http://localhost:33842';
|
||||
|
||||
export async function get(req, res) {
|
||||
const { skip, limit } = {
|
||||
skip: req.query.skip || 0,
|
||||
limit: req.query.limit || 20,
|
||||
};
|
||||
const response = await fetch(`${API_URL}/api/search?q=${req.query.q}&skip=${skip}&limit=${limit}`);
|
||||
res.writeHead(response.status, { 'Content-Type': response.headers.get('Content-Type') });
|
||||
if (!response.ok) {
|
||||
return res.end(await response.text());
|
||||
}
|
||||
const data = await response.json();
|
||||
data.results = purifyArray(data.results);
|
||||
res.end(JSON.stringify(data));
|
||||
}
|
42
webapp/src/routes/search.svelte
Normal file
|
@ -0,0 +1,42 @@
|
|||
<script context="module">
|
||||
export async function preload(page) {
|
||||
const { skip, limit, q } = {
|
||||
skip: page.query.skip || 0,
|
||||
limit: page.query.limit || 20,
|
||||
q: page.query.q || "",
|
||||
};
|
||||
const res = await this.fetch(
|
||||
`search.json?q=${q}&skip=${skip}&limit=${limit}`
|
||||
);
|
||||
const data = await res.json();
|
||||
|
||||
if (res.status === 200) {
|
||||
return { stories: data.results, skip, limit };
|
||||
} else {
|
||||
this.error(res.status, data.message);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import { stores } from "@sapper/app";
|
||||
import StoryList from "../components/StoryList.svelte";
|
||||
import Pagination from "../components/Pagination.svelte";
|
||||
|
||||
export let stories;
|
||||
|
||||
const { page } = stores();
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>QotNews</title>
|
||||
<meta property="og:title" content="QotNews" />
|
||||
<meta property="og:type" content="website" />
|
||||
</svelte:head>
|
||||
|
||||
<StoryList {stories}>
|
||||
<Pagination
|
||||
href="/search"
|
||||
search="q={$page.query.q}"
|
||||
count={stories.length} />
|
||||
</StoryList>
|
17
webapp/src/routes/submit.json.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
import FormData from 'form-data';
|
||||
import fetch from 'isomorphic-fetch';
|
||||
import redirect from '@polka/redirect';
|
||||
|
||||
const API_URL = process.env.API_URL || 'http://localhost:33842';
|
||||
|
||||
export async function post(req, res) {
|
||||
const body = new FormData();
|
||||
body.append('url', req.body.url);
|
||||
const response = await fetch(`${API_URL}/api/submit`, { method: "POST", body });
|
||||
if (req.body.redirect) {
|
||||
const { nid } = await response.json();
|
||||
return redirect(res, 302, `/${nid}`);
|
||||
}
|
||||
res.writeHead(response.status, { 'Content-Type': response.headers.get('Content-Type') });
|
||||
res.end(await response.text());
|
||||
}
|
147
webapp/src/routes/submit.svelte
Normal file
|
@ -0,0 +1,147 @@
|
|||
<script>
|
||||
import { onMount } from "svelte";
|
||||
import { goto, prefetch } from "@sapper/app";
|
||||
|
||||
let input;
|
||||
let handleSubmit;
|
||||
let hasError;
|
||||
let isLoading;
|
||||
|
||||
onMount(() => {
|
||||
setTimeout(() => {
|
||||
input && input.focus();
|
||||
}, 0);
|
||||
handleSubmit = async () => {
|
||||
isLoading = true;
|
||||
hasError = false;
|
||||
const url = input.value;
|
||||
const response = await fetch(`submit.json`, {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
method: "POST",
|
||||
body: JSON.stringify({ url }),
|
||||
});
|
||||
if (!response.ok) {
|
||||
hasError = true;
|
||||
isLoading = false;
|
||||
return;
|
||||
}
|
||||
const { nid } = await response.json();
|
||||
await prefetch(`/${nid}`);
|
||||
await goto(`/${nid}`);
|
||||
};
|
||||
});
|
||||
</script>
|
||||
|
||||
<style>
|
||||
section {
|
||||
max-width: 45rem;
|
||||
margin: 5rem auto 0;
|
||||
}
|
||||
form {
|
||||
text-align: center;
|
||||
width: 95%;
|
||||
border: solid 1px #aaa;
|
||||
margin: 3.5rem auto;
|
||||
border-radius: 5px;
|
||||
overflow: hidden;
|
||||
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
form:focus-within {
|
||||
box-shadow: 0 0 0.25rem rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
|
||||
input {
|
||||
width: 85%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5rem;
|
||||
margin: 0;
|
||||
font-size: 1.25rem;
|
||||
line-height: 1.5;
|
||||
border: none;
|
||||
border-radius: 0;
|
||||
background: #fff;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
form:has(input:focus) {
|
||||
box-shadow: inset 0 0 0.2rem rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
button {
|
||||
width: 15%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5rem;
|
||||
margin: 0;
|
||||
font-size: 1.25rem;
|
||||
line-height: 1.5;
|
||||
border: none;
|
||||
border-left: solid 1px #aaa;
|
||||
border-radius: 0;
|
||||
background: #f1f1f1;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.loading,
|
||||
.is-loading form,
|
||||
.is-loading .error {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.is-loading .loading {
|
||||
display: block;
|
||||
margin: 3.5rem auto 0;
|
||||
}
|
||||
|
||||
.error {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.has-error .error {
|
||||
box-sizing: border-box;
|
||||
height: 3rem;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
color: darkred;
|
||||
display: block;
|
||||
}
|
||||
.has-error form {
|
||||
margin-top: 5rem;
|
||||
}
|
||||
</style>
|
||||
|
||||
<svelte:head>
|
||||
<title>QotNews</title>
|
||||
<meta property="og:title" content="QotNews" />
|
||||
<meta property="og:type" content="website" />
|
||||
<link rel="preload" href="/loading.svg" as="image" />
|
||||
</svelte:head>
|
||||
|
||||
<section class="{isLoading ? 'is-loading' : ''} {hasError ? 'has-error' : ''}">
|
||||
<img
|
||||
class="loading"
|
||||
src="/loading.svg"
|
||||
alt="loading..."
|
||||
width="200"
|
||||
height="200" />
|
||||
|
||||
<form
|
||||
action="submit.json"
|
||||
method="POST"
|
||||
on:submit|preventDefault={handleSubmit}
|
||||
autocomplete="off">
|
||||
<input
|
||||
type="text"
|
||||
name="url"
|
||||
placeholder="Enter article link"
|
||||
pattern="^https?:\/\/(www\.)?.*"
|
||||
value=""
|
||||
bind:this={input}
|
||||
required />
|
||||
<button value="true" name="redirect" type="submit">Go</button>
|
||||
</form>
|
||||
|
||||
<p class="error">Something went wrong.</p>
|
||||
</section>
|
20
webapp/src/server.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
import sirv from 'sirv';
|
||||
import polka from 'polka';
|
||||
import compression from 'compression';
|
||||
import * as sapper from '@sapper/server';
|
||||
import { json, urlencoded } from 'body-parser';
|
||||
|
||||
const { PORT, NODE_ENV } = process.env;
|
||||
const dev = NODE_ENV === 'development';
|
||||
|
||||
polka()
|
||||
.use(
|
||||
json(),
|
||||
urlencoded(),
|
||||
compression({ threshold: 0 }),
|
||||
sirv('static', { dev }),
|
||||
sapper.middleware(),
|
||||
)
|
||||
.listen(PORT, err => {
|
||||
if (err) console.log('error', err);
|
||||
});
|
86
webapp/src/service-worker.js
Normal file
|
@ -0,0 +1,86 @@
|
|||
import { timestamp, files, shell } from '@sapper/service-worker';
|
||||
|
||||
const ASSETS = `cache${timestamp}`;
|
||||
|
||||
// `shell` is an array of all the files generated by the bundler,
|
||||
// `files` is an array of everything in the `static` directory
|
||||
const to_cache = shell.concat(files);
|
||||
const staticAssets = new Set(to_cache);
|
||||
|
||||
self.addEventListener('install', event => {
|
||||
event.waitUntil(
|
||||
caches
|
||||
.open(ASSETS)
|
||||
.then(cache => cache.addAll(to_cache))
|
||||
.then(() => {
|
||||
self.skipWaiting();
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
self.addEventListener('activate', event => {
|
||||
event.waitUntil(
|
||||
caches.keys().then(async keys => {
|
||||
// delete old caches
|
||||
for (const key of keys) {
|
||||
if (key !== ASSETS) await caches.delete(key);
|
||||
}
|
||||
|
||||
self.clients.claim();
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Fetch the asset from the network and store it in the cache.
|
||||
* Fall back to the cache if the user is offline.
|
||||
*/
|
||||
async function fetchAndCache(request) {
|
||||
const cache = await caches.open(`offline${timestamp}`)
|
||||
|
||||
try {
|
||||
const response = await fetch(request);
|
||||
cache.put(request, response.clone());
|
||||
return response;
|
||||
} catch (err) {
|
||||
const response = await cache.match(request);
|
||||
if (response) return response;
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
self.addEventListener('fetch', event => {
|
||||
if (event.request.method !== 'GET' || event.request.headers.has('range')) return;
|
||||
|
||||
const url = new URL(event.request.url);
|
||||
|
||||
// don't try to handle e.g. data: URIs
|
||||
const isHttp = url.protocol.startsWith('http');
|
||||
const isDevServerRequest = url.hostname === self.location.hostname && url.port !== self.location.port;
|
||||
const isStaticAsset = url.host === self.location.host && staticAssets.has(url.pathname);
|
||||
const skipBecauseUncached = event.request.cache === 'only-if-cached' && !isStaticAsset;
|
||||
|
||||
if (isHttp && !isDevServerRequest && !skipBecauseUncached) {
|
||||
event.respondWith(
|
||||
(async () => {
|
||||
// always serve static files and bundler-generated assets from cache.
|
||||
// if your application has other URLs with data that will never change,
|
||||
// set this variable to true for them and they will only be fetched once.
|
||||
const cachedAsset = isStaticAsset && await caches.match(event.request);
|
||||
|
||||
// for pages, you might want to serve a shell `service-worker-index.html` file,
|
||||
// which Sapper has generated for you. It's not right for every
|
||||
// app, but if it's right for yours then uncomment this section
|
||||
/*
|
||||
if (!cachedAsset && url.origin === self.origin && routes.find(route => route.pattern.test(url.pathname))) {
|
||||
return caches.match('/service-worker-index.html');
|
||||
}
|
||||
*/
|
||||
|
||||
return cachedAsset || fetchAndCache(event.request);
|
||||
})()
|
||||
);
|
||||
}
|
||||
});
|
21
webapp/src/template.html
Normal file
|
@ -0,0 +1,21 @@
|
|||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
||||
<meta name="theme-color" content="#333333">
|
||||
|
||||
%sapper.base%
|
||||
|
||||
<link rel="stylesheet" href="global.css">
|
||||
<link rel="manifest" href="manifest.json" crossorigin="use-credentials">
|
||||
<link rel="icon" type="image/png" href="favicon.png">
|
||||
|
||||
%sapper.scripts%
|
||||
%sapper.styles%
|
||||
%sapper.head%
|
||||
</head>
|
||||
<body>
|
||||
<div id="sapper">%sapper.html%</div>
|
||||
</body>
|
||||
</html>
|
11
webapp/src/utils/logos.js
Normal file
BIN
webapp/static/favicon.png
Normal file
After Width: | Height: | Size: 6.5 KiB |
BIN
webapp/static/fonts/AppSILB.ttf
Normal file
BIN
webapp/static/fonts/AppSILBI.ttf
Normal file
BIN
webapp/static/fonts/AppSILI.ttf
Normal file
BIN
webapp/static/fonts/AppSILR.ttf
Normal file
28
webapp/static/fonts/Fonts.css
Normal file
|
@ -0,0 +1,28 @@
|
|||
@font-face {
|
||||
font-family: 'Apparatus SIL';
|
||||
src: url('AppSILR.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Apparatus SIL';
|
||||
font-style: italic;
|
||||
src: url('AppSILI.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Apparatus SIL';
|
||||
font-weight: bold;
|
||||
src: url('AppSILB.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Apparatus SIL';
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
src: url('AppSILBI.ttf') format('truetype');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Icomoon';
|
||||
src: url('icomoon.ttf') format('truetype');
|
||||
}
|
BIN
webapp/static/fonts/icomoon.ttf
Normal file
29
webapp/static/global.css
Normal file
|
@ -0,0 +1,29 @@
|
|||
body {
|
||||
margin: 0;
|
||||
font-family: Roboto, -apple-system, BlinkMacSystemFont, Segoe UI, Oxygen,
|
||||
Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif;
|
||||
font-size: 16px;
|
||||
line-height: 1.5;
|
||||
color: #333;
|
||||
|
||||
margin-bottom: 50vh;
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
pre,
|
||||
code {
|
||||
font-family: menlo, inconsolata, monospace;
|
||||
font-size: calc(1em - 2px);
|
||||
color: #555;
|
||||
background-color: #f0f0f0;
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
pre {
|
||||
max-width: 100%;
|
||||
overflow: auto;
|
||||
}
|
9
webapp/static/loading.svg
Normal file
|
@ -0,0 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg class="lds-double-ring" width="200px" height="200px" style="background:rgba(0, 0, 0, 0) none repeat scroll 0% 0%" preserveAspectRatio="xMidYMid" viewBox="0 0 100 100" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="50" cy="50" r="45" fill="none" stroke="#000" stroke-dasharray="70.68583470577035 70.68583470577035" stroke-linecap="round" stroke-width="3" ng-attr-r="{{config.radius}}" ng-attr-stroke="{{config.c1}}" ng-attr-stroke-dasharray="{{config.dasharray}}" ng-attr-stroke-width="{{config.width}}">
|
||||
<animateTransform attributeName="transform" begin="0s" calcMode="linear" dur="3.6s" keyTimes="0;1" repeatCount="indefinite" type="rotate" values="0 50 50;360 50 50"/>
|
||||
</circle>
|
||||
<circle cx="50" cy="50" r="41" fill="none" stroke="#000" stroke-dasharray="64.40264939859075 64.40264939859075" stroke-dashoffset="64.403" stroke-linecap="round" stroke-width="3" ng-attr-r="{{config.radius2}}" ng-attr-stroke="{{config.c2}}" ng-attr-stroke-dasharray="{{config.dasharray2}}" ng-attr-stroke-dashoffset="{{config.dashoffset2}}" ng-attr-stroke-width="{{config.width}}">
|
||||
<animateTransform attributeName="transform" begin="0s" calcMode="linear" dur="3.6s" keyTimes="0;1" repeatCount="indefinite" type="rotate" values="0 50 50;-360 50 50"/>
|
||||
</circle>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
BIN
webapp/static/logo-192.png
Normal file
After Width: | Height: | Size: 4.6 KiB |
BIN
webapp/static/logo-512.png
Normal file
After Width: | Height: | Size: 14 KiB |
20
webapp/static/manifest.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"background_color": "#ffffff",
|
||||
"theme_color": "#333333",
|
||||
"name": "Qot. news",
|
||||
"short_name": "Qot. news",
|
||||
"display": "minimal-ui",
|
||||
"start_url": "/",
|
||||
"icons": [
|
||||
{
|
||||
"src": "logo-192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
21
webapp/static/svg-loaders/LICENSE.md
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Sam Herbert
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
29
webapp/static/svg-loaders/audio.svg
Normal file
|
@ -0,0 +1,29 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="55" height="80" viewBox="0 0 55 80" xmlns="http://www.w3.org/2000/svg" fill="#FFF">
|
||||
<g transform="matrix(1 0 0 -1 0 80)">
|
||||
<rect width="10" height="20" rx="3">
|
||||
<animate attributeName="height"
|
||||
begin="0s" dur="4.3s"
|
||||
values="20;45;57;80;64;32;66;45;64;23;66;13;64;56;34;34;2;23;76;79;20" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="15" width="10" height="80" rx="3">
|
||||
<animate attributeName="height"
|
||||
begin="0s" dur="2s"
|
||||
values="80;55;33;5;75;23;73;33;12;14;60;80" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="30" width="10" height="50" rx="3">
|
||||
<animate attributeName="height"
|
||||
begin="0s" dur="1.4s"
|
||||
values="50;34;78;23;56;23;34;76;80;54;21;50" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="45" width="10" height="30" rx="3">
|
||||
<animate attributeName="height"
|
||||
begin="0s" dur="2s"
|
||||
values="30;45;13;80;56;72;45;76;34;23;67;30" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
47
webapp/static/svg-loaders/ball-triangle.svg
Normal file
|
@ -0,0 +1,47 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<!-- Todo: add easing -->
|
||||
<svg width="57" height="57" viewBox="0 0 57 57" xmlns="http://www.w3.org/2000/svg" stroke="#fff">
|
||||
<g fill="none" fill-rule="evenodd">
|
||||
<g transform="translate(1 1)" stroke-width="2">
|
||||
<circle cx="5" cy="50" r="5">
|
||||
<animate attributeName="cy"
|
||||
begin="0s" dur="2.2s"
|
||||
values="50;5;50;50"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="cx"
|
||||
begin="0s" dur="2.2s"
|
||||
values="5;27;49;5"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="27" cy="5" r="5">
|
||||
<animate attributeName="cy"
|
||||
begin="0s" dur="2.2s"
|
||||
from="5" to="5"
|
||||
values="5;50;50;5"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="cx"
|
||||
begin="0s" dur="2.2s"
|
||||
from="27" to="27"
|
||||
values="27;49;5;27"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="49" cy="50" r="5">
|
||||
<animate attributeName="cy"
|
||||
begin="0s" dur="2.2s"
|
||||
values="50;50;5;50"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="cx"
|
||||
from="49" to="49"
|
||||
begin="0s" dur="2.2s"
|
||||
values="49;5;27;49"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.9 KiB |
52
webapp/static/svg-loaders/bars.svg
Normal file
|
@ -0,0 +1,52 @@
|
|||
<svg width="135" height="140" viewBox="0 0 135 140" xmlns="http://www.w3.org/2000/svg" fill="#fff">
|
||||
<rect y="10" width="15" height="120" rx="6">
|
||||
<animate attributeName="height"
|
||||
begin="0.5s" dur="1s"
|
||||
values="120;110;100;90;80;70;60;50;40;140;120" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="y"
|
||||
begin="0.5s" dur="1s"
|
||||
values="10;15;20;25;30;35;40;45;50;0;10" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="30" y="10" width="15" height="120" rx="6">
|
||||
<animate attributeName="height"
|
||||
begin="0.25s" dur="1s"
|
||||
values="120;110;100;90;80;70;60;50;40;140;120" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="y"
|
||||
begin="0.25s" dur="1s"
|
||||
values="10;15;20;25;30;35;40;45;50;0;10" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="60" width="15" height="140" rx="6">
|
||||
<animate attributeName="height"
|
||||
begin="0s" dur="1s"
|
||||
values="120;110;100;90;80;70;60;50;40;140;120" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="y"
|
||||
begin="0s" dur="1s"
|
||||
values="10;15;20;25;30;35;40;45;50;0;10" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="90" y="10" width="15" height="120" rx="6">
|
||||
<animate attributeName="height"
|
||||
begin="0.25s" dur="1s"
|
||||
values="120;110;100;90;80;70;60;50;40;140;120" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="y"
|
||||
begin="0.25s" dur="1s"
|
||||
values="10;15;20;25;30;35;40;45;50;0;10" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
<rect x="120" y="10" width="15" height="120" rx="6">
|
||||
<animate attributeName="height"
|
||||
begin="0.5s" dur="1s"
|
||||
values="120;110;100;90;80;70;60;50;40;140;120" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="y"
|
||||
begin="0.5s" dur="1s"
|
||||
values="10;15;20;25;30;35;40;45;50;0;10" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</rect>
|
||||
</svg>
|
After Width: | Height: | Size: 2.3 KiB |
56
webapp/static/svg-loaders/black/grid.svg
Normal file
|
@ -0,0 +1,56 @@
|
|||
<svg width="105" height="105" viewBox="0 0 105 105" xmlns="http://www.w3.org/2000/svg" fill="#000">
|
||||
<circle cx="12.5" cy="12.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="12.5" cy="52.5" r="12.5" fill-opacity=".5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="100ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="52.5" cy="12.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="300ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="52.5" cy="52.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="600ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="92.5" cy="12.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="800ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="92.5" cy="52.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="400ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="12.5" cy="92.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="700ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="52.5" cy="92.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="500ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="92.5" cy="92.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="200ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</svg>
|
After Width: | Height: | Size: 2.0 KiB |
20
webapp/static/svg-loaders/circles.svg
Normal file
|
@ -0,0 +1,20 @@
|
|||
<svg width="135" height="135" viewBox="0 0 135 135" xmlns="http://www.w3.org/2000/svg" fill="#fff">
|
||||
<path d="M67.447 58c5.523 0 10-4.477 10-10s-4.477-10-10-10-10 4.477-10 10 4.477 10 10 10zm9.448 9.447c0 5.523 4.477 10 10 10 5.522 0 10-4.477 10-10s-4.478-10-10-10c-5.523 0-10 4.477-10 10zm-9.448 9.448c-5.523 0-10 4.477-10 10 0 5.522 4.477 10 10 10s10-4.478 10-10c0-5.523-4.477-10-10-10zM58 67.447c0-5.523-4.477-10-10-10s-10 4.477-10 10 4.477 10 10 10 10-4.477 10-10z">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="0 67 67"
|
||||
to="-360 67 67"
|
||||
dur="2.5s"
|
||||
repeatCount="indefinite"/>
|
||||
</path>
|
||||
<path d="M28.19 40.31c6.627 0 12-5.374 12-12 0-6.628-5.373-12-12-12-6.628 0-12 5.372-12 12 0 6.626 5.372 12 12 12zm30.72-19.825c4.686 4.687 12.284 4.687 16.97 0 4.686-4.686 4.686-12.284 0-16.97-4.686-4.687-12.284-4.687-16.97 0-4.687 4.686-4.687 12.284 0 16.97zm35.74 7.705c0 6.627 5.37 12 12 12 6.626 0 12-5.373 12-12 0-6.628-5.374-12-12-12-6.63 0-12 5.372-12 12zm19.822 30.72c-4.686 4.686-4.686 12.284 0 16.97 4.687 4.686 12.285 4.686 16.97 0 4.687-4.686 4.687-12.284 0-16.97-4.685-4.687-12.283-4.687-16.97 0zm-7.704 35.74c-6.627 0-12 5.37-12 12 0 6.626 5.373 12 12 12s12-5.374 12-12c0-6.63-5.373-12-12-12zm-30.72 19.822c-4.686-4.686-12.284-4.686-16.97 0-4.686 4.687-4.686 12.285 0 16.97 4.686 4.687 12.284 4.687 16.97 0 4.687-4.685 4.687-12.283 0-16.97zm-35.74-7.704c0-6.627-5.372-12-12-12-6.626 0-12 5.373-12 12s5.374 12 12 12c6.628 0 12-5.373 12-12zm-19.823-30.72c4.687-4.686 4.687-12.284 0-16.97-4.686-4.686-12.284-4.686-16.97 0-4.687 4.686-4.687 12.284 0 16.97 4.686 4.687 12.284 4.687 16.97 0z">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="0 67 67"
|
||||
to="360 67 67"
|
||||
dur="8s"
|
||||
repeatCount="indefinite"/>
|
||||
</path>
|
||||
</svg>
|
After Width: | Height: | Size: 1.9 KiB |
56
webapp/static/svg-loaders/grid.svg
Normal file
|
@ -0,0 +1,56 @@
|
|||
<svg width="105" height="105" viewBox="0 0 105 105" xmlns="http://www.w3.org/2000/svg" fill="#fff">
|
||||
<circle cx="12.5" cy="12.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="12.5" cy="52.5" r="12.5" fill-opacity=".5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="100ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="52.5" cy="12.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="300ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="52.5" cy="52.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="600ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="92.5" cy="12.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="800ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="92.5" cy="52.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="400ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="12.5" cy="92.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="700ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="52.5" cy="92.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="500ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="92.5" cy="92.5" r="12.5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="200ms" dur="1s"
|
||||
values="1;.2;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</svg>
|
After Width: | Height: | Size: 2.0 KiB |
18
webapp/static/svg-loaders/hearts.svg
Normal file
|
@ -0,0 +1,18 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="140" height="64" viewBox="0 0 140 64" xmlns="http://www.w3.org/2000/svg" fill="#fff">
|
||||
<path d="M30.262 57.02L7.195 40.723c-5.84-3.976-7.56-12.06-3.842-18.063 3.715-6 11.467-7.65 17.306-3.68l4.52 3.76 2.6-5.274c3.717-6.002 11.47-7.65 17.305-3.68 5.84 3.97 7.56 12.054 3.842 18.062L34.49 56.118c-.897 1.512-2.793 1.915-4.228.9z" fill-opacity=".5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.4s"
|
||||
values="0.5;1;0.5"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</path>
|
||||
<path d="M105.512 56.12l-14.44-24.272c-3.716-6.008-1.996-14.093 3.843-18.062 5.835-3.97 13.588-2.322 17.306 3.68l2.6 5.274 4.52-3.76c5.84-3.97 13.592-2.32 17.307 3.68 3.718 6.003 1.998 14.088-3.842 18.064L109.74 57.02c-1.434 1.014-3.33.61-4.228-.9z" fill-opacity=".5">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0.7s" dur="1.4s"
|
||||
values="0.5;1;0.5"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</path>
|
||||
<path d="M67.408 57.834l-23.01-24.98c-5.864-6.15-5.864-16.108 0-22.248 5.86-6.14 15.37-6.14 21.234 0L70 16.168l4.368-5.562c5.863-6.14 15.375-6.14 21.235 0 5.863 6.14 5.863 16.098 0 22.247l-23.007 24.98c-1.43 1.556-3.757 1.556-5.188 0z" />
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
17
webapp/static/svg-loaders/oval.svg
Normal file
|
@ -0,0 +1,17 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="38" height="38" viewBox="0 0 38 38" xmlns="http://www.w3.org/2000/svg" stroke="#fff">
|
||||
<g fill="none" fill-rule="evenodd">
|
||||
<g transform="translate(1 1)" stroke-width="2">
|
||||
<circle stroke-opacity=".5" cx="18" cy="18" r="18"/>
|
||||
<path d="M36 18c0-9.94-8.06-18-18-18">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="0 18 18"
|
||||
to="360 18 18"
|
||||
dur="1s"
|
||||
repeatCount="indefinite"/>
|
||||
</path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 694 B |
37
webapp/static/svg-loaders/puff.svg
Normal file
|
@ -0,0 +1,37 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="44" height="44" viewBox="0 0 44 44" xmlns="http://www.w3.org/2000/svg" stroke="#fff">
|
||||
<g fill="none" fill-rule="evenodd" stroke-width="2">
|
||||
<circle cx="22" cy="22" r="1">
|
||||
<animate attributeName="r"
|
||||
begin="0s" dur="1.8s"
|
||||
values="1; 20"
|
||||
calcMode="spline"
|
||||
keyTimes="0; 1"
|
||||
keySplines="0.165, 0.84, 0.44, 1"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="stroke-opacity"
|
||||
begin="0s" dur="1.8s"
|
||||
values="1; 0"
|
||||
calcMode="spline"
|
||||
keyTimes="0; 1"
|
||||
keySplines="0.3, 0.61, 0.355, 1"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="22" cy="22" r="1">
|
||||
<animate attributeName="r"
|
||||
begin="-0.9s" dur="1.8s"
|
||||
values="1; 20"
|
||||
calcMode="spline"
|
||||
keyTimes="0; 1"
|
||||
keySplines="0.165, 0.84, 0.44, 1"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="stroke-opacity"
|
||||
begin="-0.9s" dur="1.8s"
|
||||
values="1; 0"
|
||||
calcMode="spline"
|
||||
keyTimes="0; 1"
|
||||
keySplines="0.3, 0.61, 0.355, 1"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.4 KiB |
42
webapp/static/svg-loaders/rings.svg
Normal file
|
@ -0,0 +1,42 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="45" height="45" viewBox="0 0 45 45" xmlns="http://www.w3.org/2000/svg" stroke="#fff">
|
||||
<g fill="none" fill-rule="evenodd" transform="translate(1 1)" stroke-width="2">
|
||||
<circle cx="22" cy="22" r="6" stroke-opacity="0">
|
||||
<animate attributeName="r"
|
||||
begin="1.5s" dur="3s"
|
||||
values="6;22"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="stroke-opacity"
|
||||
begin="1.5s" dur="3s"
|
||||
values="1;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="stroke-width"
|
||||
begin="1.5s" dur="3s"
|
||||
values="2;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="22" cy="22" r="6" stroke-opacity="0">
|
||||
<animate attributeName="r"
|
||||
begin="3s" dur="3s"
|
||||
values="6;22"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="stroke-opacity"
|
||||
begin="3s" dur="3s"
|
||||
values="1;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="stroke-width"
|
||||
begin="3s" dur="3s"
|
||||
values="2;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="22" cy="22" r="8">
|
||||
<animate attributeName="r"
|
||||
begin="0s" dur="1.5s"
|
||||
values="6;1;2;3;4;5;6"
|
||||
calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.7 KiB |
55
webapp/static/svg-loaders/spinning-circles.svg
Normal file
|
@ -0,0 +1,55 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="58" height="58" viewBox="0 0 58 58" xmlns="http://www.w3.org/2000/svg">
|
||||
<g fill="none" fill-rule="evenodd">
|
||||
<g transform="translate(2 1)" stroke="#FFF" stroke-width="1.5">
|
||||
<circle cx="42.601" cy="11.462" r="5" fill-opacity="1" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="1;0;0;0;0;0;0;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="49.063" cy="27.063" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;1;0;0;0;0;0;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="42.601" cy="42.663" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;0;1;0;0;0;0;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="27" cy="49.125" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;0;0;1;0;0;0;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="11.399" cy="42.663" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;0;0;0;1;0;0;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="4.938" cy="27.063" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;0;0;0;0;1;0;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="11.399" cy="11.462" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;0;0;0;0;0;1;0" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="27" cy="5" r="5" fill-opacity="0" fill="#fff">
|
||||
<animate attributeName="fill-opacity"
|
||||
begin="0s" dur="1.3s"
|
||||
values="0;0;0;0;0;0;0;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.7 KiB |
32
webapp/static/svg-loaders/tail-spin.svg
Normal file
|
@ -0,0 +1,32 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="38" height="38" viewBox="0 0 38 38" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<linearGradient x1="8.042%" y1="0%" x2="65.682%" y2="23.865%" id="a">
|
||||
<stop stop-color="#fff" stop-opacity="0" offset="0%"/>
|
||||
<stop stop-color="#fff" stop-opacity=".631" offset="63.146%"/>
|
||||
<stop stop-color="#fff" offset="100%"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g fill="none" fill-rule="evenodd">
|
||||
<g transform="translate(1 1)">
|
||||
<path d="M36 18c0-9.94-8.06-18-18-18" id="Oval-2" stroke="url(#a)" stroke-width="2">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="0 18 18"
|
||||
to="360 18 18"
|
||||
dur="0.9s"
|
||||
repeatCount="indefinite" />
|
||||
</path>
|
||||
<circle fill="#fff" cx="36" cy="18" r="1">
|
||||
<animateTransform
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="0 18 18"
|
||||
to="360 18 18"
|
||||
dur="0.9s"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.3 KiB |
33
webapp/static/svg-loaders/three-dots.svg
Normal file
|
@ -0,0 +1,33 @@
|
|||
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
|
||||
<svg width="120" height="30" viewBox="0 0 120 30" xmlns="http://www.w3.org/2000/svg" fill="#fff">
|
||||
<circle cx="15" cy="15" r="15">
|
||||
<animate attributeName="r" from="15" to="15"
|
||||
begin="0s" dur="0.8s"
|
||||
values="15;9;15" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="fill-opacity" from="1" to="1"
|
||||
begin="0s" dur="0.8s"
|
||||
values="1;.5;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="60" cy="15" r="9" fill-opacity="0.3">
|
||||
<animate attributeName="r" from="9" to="9"
|
||||
begin="0s" dur="0.8s"
|
||||
values="9;15;9" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="fill-opacity" from="0.5" to="0.5"
|
||||
begin="0s" dur="0.8s"
|
||||
values=".5;1;.5" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
<circle cx="105" cy="15" r="15">
|
||||
<animate attributeName="r" from="15" to="15"
|
||||
begin="0s" dur="0.8s"
|
||||
values="15;9;15" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
<animate attributeName="fill-opacity" from="1" to="1"
|
||||
begin="0s" dur="0.8s"
|
||||
values="1;.5;1" calcMode="linear"
|
||||
repeatCount="indefinite" />
|
||||
</circle>
|
||||
</svg>
|
After Width: | Height: | Size: 1.5 KiB |
5
webapp/unit-start.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/bash
|
||||
|
||||
#yarn run install
|
||||
#yarn run build
|
||||
yarn run start
|
90
webapp/webpack.config.js
Normal file
|
@ -0,0 +1,90 @@
|
|||
const webpack = require('webpack');
|
||||
const WebpackModules = require('webpack-modules');
|
||||
const path = require('path');
|
||||
const config = require('sapper/config/webpack.js');
|
||||
const pkg = require('./package.json');
|
||||
|
||||
const mode = process.env.NODE_ENV;
|
||||
const dev = mode === 'development';
|
||||
|
||||
const alias = { svelte: path.resolve('node_modules', 'svelte') };
|
||||
const extensions = ['.mjs', '.js', '.json', '.svelte', '.html'];
|
||||
const mainFields = ['svelte', 'module', 'browser', 'main'];
|
||||
const fileLoaderRule = {
|
||||
test: /\.(png|jpe?g|gif)$/i,
|
||||
use: [
|
||||
'file-loader',
|
||||
]
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
client: {
|
||||
entry: config.client.entry(),
|
||||
output: config.client.output(),
|
||||
resolve: { alias, extensions, mainFields },
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.(svelte|html)$/,
|
||||
use: {
|
||||
loader: 'svelte-loader',
|
||||
options: {
|
||||
dev,
|
||||
hydratable: true,
|
||||
hotReload: false // pending https://github.com/sveltejs/svelte/issues/2377
|
||||
}
|
||||
}
|
||||
},
|
||||
fileLoaderRule
|
||||
]
|
||||
},
|
||||
mode,
|
||||
plugins: [
|
||||
// pending https://github.com/sveltejs/svelte/issues/2377
|
||||
// dev && new webpack.HotModuleReplacementPlugin(),
|
||||
new webpack.DefinePlugin({
|
||||
'process.browser': true,
|
||||
'process.env.NODE_ENV': JSON.stringify(mode)
|
||||
}),
|
||||
].filter(Boolean),
|
||||
devtool: dev && 'inline-source-map'
|
||||
},
|
||||
|
||||
server: {
|
||||
entry: config.server.entry(),
|
||||
output: config.server.output(),
|
||||
target: 'node',
|
||||
resolve: { alias, extensions, mainFields },
|
||||
externals: Object.keys(pkg.dependencies).concat('encoding'),
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.(svelte|html)$/,
|
||||
use: {
|
||||
loader: 'svelte-loader',
|
||||
options: {
|
||||
css: false,
|
||||
generate: 'ssr',
|
||||
hydratable: true,
|
||||
dev
|
||||
}
|
||||
}
|
||||
},
|
||||
fileLoaderRule
|
||||
]
|
||||
},
|
||||
mode,
|
||||
plugins: [
|
||||
new WebpackModules()
|
||||
],
|
||||
performance: {
|
||||
hints: false // it doesn't matter if server.js is large
|
||||
}
|
||||
},
|
||||
|
||||
serviceworker: {
|
||||
entry: config.serviceworker.entry(),
|
||||
output: config.serviceworker.output(),
|
||||
mode
|
||||
}
|
||||
};
|
3307
webapp/yarn.lock
Normal file
|
@ -5,13 +5,20 @@ import './Style-light.css';
|
|||
import './Style-dark.css';
|
||||
import './fonts/Fonts.css';
|
||||
import { ForwardDot } from './utils.js';
|
||||
import Feed from './Feed.js';
|
||||
import Article from './Article.js';
|
||||
import Comments from './Comments.js';
|
||||
import Search from './Search.js';
|
||||
import Submit from './Submit.js';
|
||||
import Results from './Results.js';
|
||||
import ScrollToTop from './ScrollToTop.js';
|
||||
import Feed from './pages/Feed.js';
|
||||
import Article from './pages/Article.js';
|
||||
import Comments from './pages/Comments.js';
|
||||
import Results from './pages/Results.js';
|
||||
|
||||
const pagingKey = (props) => {
|
||||
const query = new URLSearchParams(props.location.search);
|
||||
const skip = query.get('skip') || 0;
|
||||
const limit = query.get('limit') || 20;
|
||||
return `skip=${skip}&limit=${limit}`;
|
||||
}
|
||||
|
||||
class App extends React.Component {
|
||||
constructor(props) {
|
||||
|
@ -65,12 +72,12 @@ class App extends React.Component {
|
|||
<Route path='/(|search)' component={Submit} />
|
||||
</div>
|
||||
|
||||
<Route path='/' exact render={(props) => <Feed {...props} updateCache={this.updateCache} />} />
|
||||
<Route path='/' exact render={(props) => <Feed {...props} updateCache={this.updateCache} key={pagingKey(props)} />} />
|
||||
<Switch>
|
||||
<Route path='/search' component={Results} />
|
||||
<Route path='/:id' exact render={(props) => <Article {...props} cache={this.cache} />} />
|
||||
</Switch>
|
||||
<Route path='/:id/c' exact render={(props) => <Comments {...props} cache={this.cache} />} />
|
||||
<Route path='/:id/c' exact render={(props) => <Comments {...props} cache={this.cache} key={`${props.match.params.id}`} />} />
|
||||
|
||||
<ForwardDot />
|
||||
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
import React from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Helmet } from 'react-helmet';
|
||||
import localForage from 'localforage';
|
||||
import { sourceLink, infoLine, logos } from './utils.js';
|
||||
|
||||
class Feed extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
this.state = {
|
||||
stories: JSON.parse(localStorage.getItem('stories')) || false,
|
||||
error: false,
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
fetch('/api')
|
||||
.then(res => res.json())
|
||||
.then(
|
||||
(result) => {
|
||||
const updated = !this.state.stories || this.state.stories[0].id !== result.stories[0].id;
|
||||
console.log('updated:', updated);
|
||||
|
||||
this.setState({ stories: result.stories });
|
||||
localStorage.setItem('stories', JSON.stringify(result.stories));
|
||||
|
||||
if (updated) {
|
||||
localForage.clear();
|
||||
result.stories.forEach((x, i) => {
|
||||
fetch('/api/' + x.id)
|
||||
.then(res => res.json())
|
||||
.then(result => {
|
||||
localForage.setItem(x.id, result.story)
|
||||
.then(console.log('preloaded', x.id, x.title));
|
||||
this.props.updateCache(x.id, result.story);
|
||||
}, error => {}
|
||||
);
|
||||
});
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
this.setState({ error: true });
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const stories = this.state.stories;
|
||||
const error = this.state.error;
|
||||
|
||||
return (
|
||||
<div className='container'>
|
||||
<Helmet>
|
||||
<title>Feed - QotNews</title>
|
||||
</Helmet>
|
||||
{error && <p>Connection error?</p>}
|
||||
{stories ?
|
||||
<div>
|
||||
{stories.map(x =>
|
||||
<div className='item' key={x.id}>
|
||||
<div className='title'>
|
||||
<Link className='link' to={'/' + x.id}>
|
||||
<img className='source-logo' src={logos[x.source]} alt='source logo' /> {x.title}
|
||||
</Link>
|
||||
|
||||
<span className='source'>
|
||||
({sourceLink(x)})
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{infoLine(x)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
:
|
||||
<p>loading...</p>
|
||||
}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default Feed;
|
|
@ -1,225 +1,241 @@
|
|||
body {
|
||||
text-rendering: optimizeLegibility;
|
||||
font: 1rem/1.3 sans-serif;
|
||||
color: #000000;
|
||||
margin-bottom: 100vh;
|
||||
word-break: break-word;
|
||||
font-kerning: normal;
|
||||
text-rendering: optimizeLegibility;
|
||||
font: 1rem/1.3 sans-serif;
|
||||
color: #000000;
|
||||
margin-bottom: 100vh;
|
||||
word-break: break-word;
|
||||
font-kerning: normal;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #000000;
|
||||
text-decoration: none;
|
||||
outline: none;
|
||||
color: #000000;
|
||||
text-decoration: none;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
input {
|
||||
font-size: 1.05rem;
|
||||
background-color: transparent;
|
||||
border: 1px solid #828282;
|
||||
margin: 0.25rem;
|
||||
padding: 6px;
|
||||
border-radius: 4px;
|
||||
font-size: 1.05rem;
|
||||
background-color: transparent;
|
||||
border: 1px solid #828282;
|
||||
margin: 0.25rem;
|
||||
padding: 6px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.container {
|
||||
margin: 1rem auto;
|
||||
max-width: 64rem;
|
||||
margin: 1rem auto;
|
||||
max-width: 64rem;
|
||||
}
|
||||
|
||||
.menu {
|
||||
font-size: 1.1rem;
|
||||
padding: 0 1rem;
|
||||
font-size: 1.1rem;
|
||||
padding: 0 1rem;
|
||||
}
|
||||
|
||||
.slogan {
|
||||
color: #828282;
|
||||
color: #828282;
|
||||
}
|
||||
|
||||
.theme {
|
||||
float: right;
|
||||
float: right;
|
||||
}
|
||||
|
||||
.item {
|
||||
display: table;
|
||||
color: #828282;
|
||||
margin-bottom: 0.7rem;
|
||||
display: table;
|
||||
color: #828282;
|
||||
margin-bottom: 0.7rem;
|
||||
}
|
||||
|
||||
.item .source-logo {
|
||||
width: 0.9rem;
|
||||
height: 0.9rem;
|
||||
width: 0.9rem;
|
||||
height: 0.9rem;
|
||||
}
|
||||
|
||||
.item a {
|
||||
color: #828282;
|
||||
color: #828282;
|
||||
}
|
||||
.item a:hover {
|
||||
text-decoration: underline;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.item a.link {
|
||||
font-size: 1.1rem;
|
||||
color: #000000;
|
||||
font-size: 1.1rem;
|
||||
color: #000000;
|
||||
}
|
||||
.item a.link:visited {
|
||||
color: #828282;
|
||||
color: #828282;
|
||||
}
|
||||
.item a.link:hover {
|
||||
text-decoration: none;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
span.source {
|
||||
margin-left: 0.4rem;
|
||||
margin-left: 0.4rem;
|
||||
}
|
||||
|
||||
.item .info a.hot {
|
||||
color: #444444;
|
||||
color: #444444;
|
||||
}
|
||||
|
||||
.article {
|
||||
padding-bottom: 3rem;
|
||||
padding-bottom: 3rem;
|
||||
}
|
||||
|
||||
.article-container {
|
||||
margin: 1rem auto;
|
||||
max-width: 38rem;
|
||||
margin: 1rem auto;
|
||||
max-width: 38rem;
|
||||
}
|
||||
|
||||
.article a {
|
||||
border-bottom: 1px solid #222222;
|
||||
border-bottom: 1px solid #222222;
|
||||
}
|
||||
|
||||
.article h1 {
|
||||
font-size: 1.6rem;
|
||||
font-size: 1.6rem;
|
||||
}
|
||||
|
||||
.article h2 {
|
||||
font-size: 1.4rem;
|
||||
font-size: 1.4rem;
|
||||
}
|
||||
|
||||
.article h3, .article h4 {
|
||||
font-size: 1.3rem;
|
||||
.article h3,
|
||||
.article h4 {
|
||||
font-size: 1.3rem;
|
||||
}
|
||||
|
||||
.article img {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.article figure, .article video {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
margin: 0;
|
||||
.article figure,
|
||||
.article video {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.article table {
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
.article iframe {
|
||||
display: none;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.article u {
|
||||
border-bottom: 1px solid #222;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px solid #222;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.article .info {
|
||||
color: #828282;
|
||||
color: #828282;
|
||||
}
|
||||
|
||||
.article .info a {
|
||||
border-bottom: none;
|
||||
color: #828282;
|
||||
border-bottom: none;
|
||||
color: #828282;
|
||||
}
|
||||
.article .info a:hover {
|
||||
text-decoration: underline;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.story-text {
|
||||
font: 1.2rem/1.5 'Apparatus SIL', sans-serif;
|
||||
margin-top: 1em;
|
||||
font: 1.2rem/1.5 "Apparatus SIL", sans-serif;
|
||||
margin-top: 1em;
|
||||
}
|
||||
|
||||
.comments {
|
||||
margin-left: -1.25rem;
|
||||
margin-left: -1.25rem;
|
||||
}
|
||||
|
||||
.comment {
|
||||
padding-left: 1.25rem;
|
||||
padding-left: 1.25rem;
|
||||
}
|
||||
|
||||
.comment.lined {
|
||||
border-left: 1px solid #cccccc;
|
||||
border-left: 1px solid #cccccc;
|
||||
}
|
||||
|
||||
.comment .text {
|
||||
margin-top: -0.5rem;
|
||||
margin-top: -0.5rem;
|
||||
}
|
||||
|
||||
.comment .text.hidden > p {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: none;
|
||||
color: #828282;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: none;
|
||||
color: #828282;
|
||||
}
|
||||
|
||||
.comment .text.hidden > p:first-child {
|
||||
display: block;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.comment .collapser {
|
||||
padding-left: 0.5rem;
|
||||
padding-right: 1.5rem;
|
||||
padding-left: 0.5rem;
|
||||
padding-right: 1.5rem;
|
||||
}
|
||||
|
||||
.comment .pointer {
|
||||
cursor: pointer;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.toggleDot {
|
||||
position: fixed;
|
||||
bottom: 1rem;
|
||||
left: 1rem;
|
||||
height: 3rem;
|
||||
width: 3rem;
|
||||
background-color: #828282;
|
||||
border-radius: 50%;
|
||||
position: fixed;
|
||||
bottom: 1rem;
|
||||
left: 1rem;
|
||||
height: 3rem;
|
||||
width: 3rem;
|
||||
background-color: #828282;
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
.toggleDot .button {
|
||||
font: 2rem/1 'icomoon';
|
||||
position: relative;
|
||||
top: 0.5rem;
|
||||
left: 0.55rem;
|
||||
font: 2rem/1 "icomoon";
|
||||
position: relative;
|
||||
top: 0.5rem;
|
||||
left: 0.55rem;
|
||||
}
|
||||
|
||||
.forwardDot {
|
||||
cursor: pointer;
|
||||
position: fixed;
|
||||
bottom: 1rem;
|
||||
right: 1rem;
|
||||
height: 3rem;
|
||||
width: 3rem;
|
||||
background-color: #828282;
|
||||
border-radius: 50%;
|
||||
cursor: pointer;
|
||||
position: fixed;
|
||||
bottom: 1rem;
|
||||
right: 1rem;
|
||||
height: 3rem;
|
||||
width: 3rem;
|
||||
background-color: #828282;
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
.forwardDot .button {
|
||||
font: 2.5rem/1 'icomoon';
|
||||
position: relative;
|
||||
top: 0.25rem;
|
||||
left: 0.3rem;
|
||||
font: 2.5rem/1 "icomoon";
|
||||
position: relative;
|
||||
top: 0.25rem;
|
||||
left: 0.3rem;
|
||||
}
|
||||
|
||||
.search form {
|
||||
display: inline;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.indented {
|
||||
padding: 0 0 0 1rem;
|
||||
}
|
||||
|
||||
.pagination {
|
||||
margin: 3rem 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.pagination-link.is-right {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
|
34
webclient/src/components/StoryItem.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
import React from "react";
|
||||
import { Link } from "react-router-dom";
|
||||
import { sourceLink, infoLine, getLogoUrl } from "../utils.js";
|
||||
|
||||
export class StoryItem extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
}
|
||||
|
||||
render() {
|
||||
const story = this.props.story;
|
||||
const { id, title } = story;
|
||||
|
||||
return (
|
||||
<div className="item" key={id}>
|
||||
<div className="title">
|
||||
<Link className="link" to={"/" + id}>
|
||||
<img
|
||||
className="source-logo"
|
||||
src={getLogoUrl(story)}
|
||||
alt="source logo"
|
||||
/>
|
||||
{" "}
|
||||
{title}
|
||||
</Link>
|
||||
|
||||
<span className="source">({sourceLink(story)})</span>
|
||||
</div>
|
||||
|
||||
{infoLine(story)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import React from 'react';
|
||||
import { Helmet } from 'react-helmet';
|
||||
import localForage from 'localforage';
|
||||
import { sourceLink, infoLine, ToggleDot } from './utils.js';
|
||||
import { sourceLink, infoLine, otherDiscussions, ToggleDot } from '../utils.js';
|
||||
|
||||
class Article extends React.Component {
|
||||
constructor(props) {
|
||||
|
@ -14,29 +14,25 @@ class Article extends React.Component {
|
|||
|
||||
this.state = {
|
||||
story: cache[id] || false,
|
||||
related: [],
|
||||
error: false,
|
||||
pConv: [],
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
componentDidMount() {
|
||||
const id = this.props.match ? this.props.match.params.id : 'CLOL';
|
||||
|
||||
localForage.getItem(id)
|
||||
.then(
|
||||
(value) => {
|
||||
if (value) {
|
||||
this.setState({ story: value });
|
||||
}
|
||||
}
|
||||
);
|
||||
localForage.getItem(id).then((value) => value ? this.setState({ story: value }) : null);
|
||||
localForage.getItem(`related-${id}`).then((value) => value ? this.setState({ related: value }) : null);
|
||||
|
||||
fetch('/api/' + id)
|
||||
.then(res => res.json())
|
||||
.then(
|
||||
(result) => {
|
||||
this.setState({ story: result.story });
|
||||
this.setState({ story: result.story, related: result.related });
|
||||
localForage.setItem(id, result.story);
|
||||
localForage.setItem(`related-${id}`, result.related);
|
||||
},
|
||||
(error) => {
|
||||
this.setState({ error: true });
|
||||
|
@ -45,12 +41,13 @@ class Article extends React.Component {
|
|||
}
|
||||
|
||||
pConvert = (n) => {
|
||||
this.setState({ pConv: [...this.state.pConv, n]});
|
||||
this.setState({ pConv: [...this.state.pConv, n] });
|
||||
}
|
||||
|
||||
render() {
|
||||
const id = this.props.match ? this.props.match.params.id : 'CLOL';
|
||||
const story = this.state.story;
|
||||
const related = this.state.related.filter(r => r.id != id);
|
||||
const error = this.state.error;
|
||||
const pConv = this.state.pConv;
|
||||
let nodes = null;
|
||||
|
@ -77,6 +74,7 @@ class Article extends React.Component {
|
|||
</div>
|
||||
|
||||
{infoLine(story)}
|
||||
{otherDiscussions(related)}
|
||||
|
||||
{nodes ?
|
||||
<div className='story-text'>
|
||||
|
@ -85,10 +83,10 @@ class Article extends React.Component {
|
|||
v.innerHTML.split('\n\n').map(x =>
|
||||
<p dangerouslySetInnerHTML={{ __html: x }} />
|
||||
)
|
||||
:
|
||||
:
|
||||
(v.nodeName === '#text' ?
|
||||
<p>{v.data}</p>
|
||||
:
|
||||
:
|
||||
<>
|
||||
<v.localName dangerouslySetInnerHTML={v.innerHTML ? { __html: v.innerHTML } : null} />
|
||||
{v.localName == 'pre' && <button onClick={() => this.pConvert(k)}>Convert Code to Paragraph</button>}
|
||||
|
@ -96,11 +94,11 @@ class Article extends React.Component {
|
|||
)
|
||||
)}
|
||||
</div>
|
||||
:
|
||||
:
|
||||
<p>Problem getting article :(</p>
|
||||
}
|
||||
</div>
|
||||
:
|
||||
:
|
||||
<p>loading...</p>
|
||||
}
|
||||
<ToggleDot id={id} article={false} />
|
|
@ -4,9 +4,9 @@ import { HashLink } from 'react-router-hash-link';
|
|||
import { Helmet } from 'react-helmet';
|
||||
import moment from 'moment';
|
||||
import localForage from 'localforage';
|
||||
import { infoLine, ToggleDot } from './utils.js';
|
||||
import { infoLine, otherDiscussions, ToggleDot } from '../utils.js';
|
||||
|
||||
class Article extends React.Component {
|
||||
class Comments extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
|
@ -17,6 +17,7 @@ class Article extends React.Component {
|
|||
|
||||
this.state = {
|
||||
story: cache[id] || false,
|
||||
related: [],
|
||||
error: false,
|
||||
collapsed: [],
|
||||
expanded: [],
|
||||
|
@ -26,24 +27,21 @@ class Article extends React.Component {
|
|||
componentDidMount() {
|
||||
const id = this.props.match.params.id;
|
||||
|
||||
localForage.getItem(id)
|
||||
.then(
|
||||
(value) => {
|
||||
this.setState({ story: value });
|
||||
}
|
||||
);
|
||||
localForage.getItem(id).then((value) => this.setState({ story: value }));
|
||||
localForage.getItem(`related-${id}`).then((value) => value ? this.setState({ related: value }) : null);
|
||||
|
||||
fetch('/api/' + id)
|
||||
.then(res => res.json())
|
||||
.then(
|
||||
(result) => {
|
||||
this.setState({ story: result.story }, () => {
|
||||
this.setState({ story: result.story, related: result.related }, () => {
|
||||
const hash = window.location.hash.substring(1);
|
||||
if (hash) {
|
||||
document.getElementById(hash).scrollIntoView();
|
||||
}
|
||||
});
|
||||
localForage.setItem(id, result.story);
|
||||
localForage.setItem(`related-${id}`, result.related);
|
||||
},
|
||||
(error) => {
|
||||
this.setState({ error: true });
|
||||
|
@ -72,7 +70,7 @@ class Article extends React.Component {
|
|||
}
|
||||
|
||||
displayComment(story, c, level) {
|
||||
const cid = c.author+c.date;
|
||||
const cid = c.author + c.date;
|
||||
|
||||
const collapsed = this.state.collapsed.includes(cid);
|
||||
const expanded = this.state.expanded.includes(cid);
|
||||
|
@ -85,19 +83,22 @@ class Article extends React.Component {
|
|||
<div className='info'>
|
||||
<p>
|
||||
{c.author === story.author ? '[OP]' : ''} {c.author || '[Deleted]'}
|
||||
{' '} | <HashLink to={'#'+cid} id={cid}>{moment.unix(c.date).fromNow()}</HashLink>
|
||||
{' '} | <HashLink to={'#' + cid} id={cid}>{moment.unix(c.date).fromNow()}</HashLink>
|
||||
|
||||
{hidden || hasChildren &&
|
||||
<span className='collapser pointer' onClick={() => this.collapseComment(cid)}>–</span>
|
||||
}
|
||||
{hasChildren && (
|
||||
hidden ?
|
||||
<span className='collapser expander pointer' onClick={() => this.expandComment(cid)}>+</span>
|
||||
:
|
||||
<span className='collapser pointer' onClick={() => this.collapseComment(cid)}>–</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className={collapsed ? 'text hidden' : 'text'} dangerouslySetInnerHTML={{ __html: c.text }} />
|
||||
<div className={collapsed ? 'text hidden' : 'text'} dangerouslySetInnerHTML={{ __html: c.text }} />
|
||||
|
||||
{hidden && hasChildren ?
|
||||
<div className='comment lined info pointer' onClick={() => this.expandComment(cid)}>[show {this.countComments(c)-1} more]</div>
|
||||
:
|
||||
<div className='comment lined info pointer' onClick={() => this.expandComment(cid)}>[show {this.countComments(c) - 1} more]</div>
|
||||
:
|
||||
c.comments.map(i => this.displayComment(story, i, level + 1))
|
||||
}
|
||||
</div>
|
||||
|
@ -107,6 +108,7 @@ class Article extends React.Component {
|
|||
render() {
|
||||
const id = this.props.match.params.id;
|
||||
const story = this.state.story;
|
||||
const related = this.state.related.filter(r => r.id != id);
|
||||
const error = this.state.error;
|
||||
|
||||
return (
|
||||
|
@ -125,12 +127,13 @@ class Article extends React.Component {
|
|||
</div>
|
||||
|
||||
{infoLine(story)}
|
||||
{otherDiscussions(related)}
|
||||
|
||||
<div className='comments'>
|
||||
{story.comments.map(c => this.displayComment(story, c, 0))}
|
||||
</div>
|
||||
</div>
|
||||
:
|
||||
:
|
||||
<p>loading...</p>
|
||||
}
|
||||
<ToggleDot id={id} article={true} />
|
||||
|
@ -139,4 +142,4 @@ class Article extends React.Component {
|
|||
}
|
||||
}
|
||||
|
||||
export default Article;
|
||||
export default Comments;
|
79
webclient/src/pages/Feed.js
Normal file
|
@ -0,0 +1,79 @@
|
|||
import React from 'react';
|
||||
import { Helmet } from 'react-helmet';
|
||||
import localForage from 'localforage';
|
||||
import { Link } from "react-router-dom";
|
||||
import { StoryItem } from '../components/StoryItem.js';
|
||||
|
||||
class Feed extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
const query = new URLSearchParams(this.props.location.search);
|
||||
|
||||
this.state = {
|
||||
stories: JSON.parse(localStorage.getItem('stories')) || false,
|
||||
error: false,
|
||||
skip: +query.get('skip') || 0,
|
||||
limit: +query.get('limit') || 20
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
fetch(`/api?skip=${this.state.skip}&limit=${this.state.limit}`)
|
||||
.then(res => res.json())
|
||||
.then(
|
||||
(result) => {
|
||||
const updated = !this.state.stories || this.state.stories[0].id !== result.stories[0].id;
|
||||
console.log('updated:', updated);
|
||||
|
||||
const { stories } = result;
|
||||
this.setState({ stories });
|
||||
localStorage.setItem('stories', JSON.stringify(stories));
|
||||
|
||||
if (updated) {
|
||||
localForage.clear();
|
||||
stories.forEach((x, i) => {
|
||||
fetch('/api/' + x.id)
|
||||
.then(res => res.json())
|
||||
.then(({ story, related }) => {
|
||||
Promise.all([
|
||||
localForage.setItem(x.id, story),
|
||||
localForage.setItem(`related-${x.id}`, related)
|
||||
]).then(console.log('preloaded', x.id, x.title));
|
||||
this.props.updateCache(x.id, story);
|
||||
this.props.updateCache(`related-${x.id}`, related);
|
||||
}, error => { }
|
||||
);
|
||||
});
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
this.setState({ error: true });
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const stories = this.state.stories;
|
||||
const error = this.state.error;
|
||||
const skip = this.state.skip;
|
||||
const limit = this.state.limit;
|
||||
|
||||
return (
|
||||
<div className='container'>
|
||||
<Helmet>
|
||||
<title>Feed - QotNews</title>
|
||||
</Helmet>
|
||||
{error && <p>Connection error?</p>}
|
||||
{stories ? stories.map(story => <StoryItem story={story}></StoryItem>) : <p>loading...</p>}
|
||||
|
||||
<div className="pagination">
|
||||
{Number(skip) > 0 && <Link className="pagination-link" to={`/?skip=${Number(skip) - Math.min(Number(skip), Number(limit))}&limit=${limit}`}>Previous</Link>}
|
||||
{stories.length == Number(limit) && <Link className="pagination-link is-right" to={`/?skip=${Number(skip) + Number(limit)}&limit=${limit}`}>Next</Link>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default Feed;
|
|
@ -1,8 +1,8 @@
|
|||
import React from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Helmet } from 'react-helmet';
|
||||
import { sourceLink, infoLine, logos } from './utils.js';
|
||||
import AbortController from 'abort-controller';
|
||||
import { Link } from "react-router-dom";
|
||||
import { StoryItem } from '../components/StoryItem.js';
|
||||
|
||||
class Results extends React.Component {
|
||||
constructor(props) {
|
||||
|
@ -25,7 +25,10 @@ class Results extends React.Component {
|
|||
const signal = this.controller.signal;
|
||||
|
||||
const search = this.props.location.search;
|
||||
fetch('/api/search' + search, { method: 'get', signal: signal })
|
||||
const params = new URLSearchParams(search);
|
||||
params.set('skip', params.get('skip') || 0);
|
||||
params.set('limit', params.get('limit') || 20);
|
||||
fetch('/api/search?' + params.toString(), { method: 'get', signal: signal })
|
||||
.then(res => res.json())
|
||||
.then(
|
||||
(result) => {
|
||||
|
@ -53,6 +56,13 @@ class Results extends React.Component {
|
|||
const stories = this.state.stories;
|
||||
const error = this.state.error;
|
||||
|
||||
const search = this.props.location.search;
|
||||
const params = new URLSearchParams(search);
|
||||
|
||||
const q = params.get('q') || '';
|
||||
const skip = params.get('skip') || 0;
|
||||
const limit = params.get('limit') || 20;
|
||||
|
||||
return (
|
||||
<div className='container'>
|
||||
<Helmet>
|
||||
|
@ -63,30 +73,17 @@ class Results extends React.Component {
|
|||
<>
|
||||
<p>Search results:</p>
|
||||
<div className='comment lined'>
|
||||
{stories.length ?
|
||||
stories.map(x =>
|
||||
<div className='item' key={x.id}>
|
||||
<div className='title'>
|
||||
<Link className='link' to={'/' + x.id}>
|
||||
<img className='source-logo' src={logos[x.source]} alt='source logo' /> {x.title}
|
||||
</Link>
|
||||
|
||||
<span className='source'>
|
||||
({sourceLink(x)})
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{infoLine(x)}
|
||||
</div>
|
||||
)
|
||||
:
|
||||
<p>none</p>
|
||||
}
|
||||
{stories ? stories.map(story => <StoryItem story={story}></StoryItem>) : <p>loading...</p>}
|
||||
</div>
|
||||
</>
|
||||
:
|
||||
:
|
||||
<p>loading...</p>
|
||||
}
|
||||
|
||||
<div className="pagination">
|
||||
{Number(skip) > 0 && <Link className="pagination-link" to={`/search?q=${q}&skip=${Number(skip) - Math.min(Number(skip), Number(limit))}&limit=${limit}`}>Previous</Link>}
|
||||
{stories.length == Number(limit) && <Link className="pagination-link is-right" to={`/search?q=${q}&skip=${Number(skip) + Number(limit)}&limit=${limit}`}>Next</Link>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|