Compare commits

..

36 Commits

Author SHA1 Message Date
9ec61ea5bc Ignore dead and political stories 2025-05-27 18:47:17 +00:00
bdc7a6c10d Fix Better HN api content extraction 2025-02-01 22:39:13 +00:00
4858516b01 Add Better HN as an API backup 2025-02-01 21:42:06 +00:00
f10e6063fc Bug fixes 2025-02-01 20:31:35 +00:00
249a616531 Alert on story update error 2024-03-16 20:41:24 +00:00
ab92bd5441 Adjust score and comment thresholds 2024-03-08 03:08:18 +00:00
6b16a768a7 Fix deletion script 2024-03-08 03:08:03 +00:00
57de076fec Increase database timeout 2024-02-27 18:48:56 +00:00
074b898508 Fix lobsters comment parsing 2024-02-27 18:47:00 +00:00
f049d194ab Move scripts into own folder 2024-02-27 18:32:29 +00:00
c2b9a1cb7a Update readability 2024-02-27 18:32:19 +00:00
4435f49e17 Make "dark" theme grey, add "black" theme 2023-09-13 01:19:47 +00:00
494d89ac30 Disable lobsters 2023-09-13 01:02:15 +00:00
e79fca6ecc Replace "indent_level" with "depth" in lobsters API
See:
fe09e5aa31
2023-08-31 07:35:44 +00:00
c65fb69092 Handle Lobsters comment parsing TypeErrors
Too lazy to debug this:

2023-08-29 12:56:35,111 - root - INFO - Updating lobsters story: yktkwr, index: 55
Traceback (most recent call last):
  File "src/gevent/greenlet.py", line 854, in gevent._gevent_cgreenlet.Greenlet.run
  File "/home/tanner/qotnews/apiserver/server.py", line 194, in feed_thread
    valid = feed.update_story(story)
  File "/home/tanner/qotnews/apiserver/feed.py", line 74, in update_story
    res = lobsters.story(story['ref'])
  File "/home/tanner/qotnews/apiserver/feeds/lobsters.py", line 103, in story
    s['comments'] = iter_comments(r['comments'])
  File "/home/tanner/qotnews/apiserver/feeds/lobsters.py", line 76, in iter_comments
    parent_stack = parent_stack[:indent-1]
TypeError: unsupported operand type(s) for -: 'NoneType' and 'int'
2023-08-29T12:56:35Z <Greenlet at 0x7f92ad840ae0: feed_thread> failed with TypeError
2023-08-31 07:30:39 +00:00
632d028e4c Add Tildes group whitelist 2023-07-13 22:54:36 +00:00
ea8e9e5a23 Increase again 2023-06-13 17:11:50 +00:00
2838ea9b41 Increase Tildes story score requirement 2023-06-11 01:01:31 +00:00
f15d108971 Catch all possible Reddit API exceptions 2023-03-15 21:16:37 +00:00
f777348af8 Fix darkmode fullscreen button color 2022-08-11 19:36:36 +00:00
486404a413 Fix fix-stories bug 2022-08-10 04:06:39 +00:00
7c9c07a4cf Hide fullscreen button if it's not available 2022-08-10 04:05:25 +00:00
08d02f6013 Add fullscreen mode 2022-08-08 23:21:49 +00:00
1b54342702 Add red theme 2022-08-08 20:14:57 +00:00
9e9571a3c0 Write fixed stories to database 2022-07-05 00:57:56 +00:00
dc83a70887 Begin script to fix bad gzip text 2022-07-04 20:32:01 +00:00
2e2c9ae837 Move FEED_LENGTH to settings.py, use for search results 2022-07-04 19:08:24 +00:00
61021d8f91 Small UI changes 2022-07-04 19:08:24 +00:00
e65047fead Add accept gzip header to readability server 2022-07-04 19:07:31 +00:00
8e775c189f Add test file 2022-07-04 05:56:06 +00:00
3d9274309a Fix requests text encoding slowness 2022-07-04 05:55:52 +00:00
7bdbbf10b2 Return search results directly from the server 2022-07-04 04:33:01 +00:00
6aa0f78536 Remove Article / Comments, etc thing after name 2022-07-04 04:33:01 +00:00
bf3663bbec Remove hard-coded title 2022-06-30 00:12:22 +00:00
e6589dc61c Adjust title 2022-06-30 00:05:15 +00:00
307e8349f3 Change header based on page 2022-06-30 00:00:30 +00:00
31 changed files with 603 additions and 136 deletions

View File

@@ -5,7 +5,7 @@ from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import IntegrityError
engine = create_engine('sqlite:///data/qotnews.sqlite', connect_args={'timeout': 180})
engine = create_engine('sqlite:///data/qotnews.sqlite', connect_args={'timeout': 360})
Session = sessionmaker(bind=engine)
Base = declarative_base()
@@ -101,7 +101,22 @@ def del_ref(ref):
finally:
session.close()
def count_stories():
try:
session = Session()
return session.query(Story).count()
finally:
session.close()
def get_story_list():
try:
session = Session()
return session.query(Story.sid).all()
finally:
session.close()
if __name__ == '__main__':
init()
print(get_story_by_ref('hgi3sy'))
#print(get_story_by_ref('hgi3sy'))
print(len(get_reflist(99999)))

View File

@@ -9,6 +9,7 @@ from bs4 import BeautifulSoup
import settings
from feeds import hackernews, reddit, tildes, manual, lobsters
import utils
INVALID_DOMAINS = ['youtube.com', 'bloomberg.com', 'wsj.com', 'sec.gov']
TWO_DAYS = 60*60*24*2
@@ -68,6 +69,7 @@ def get_content_type(url):
def update_story(story, is_manual=False):
res = {}
try:
if story['source'] == 'hackernews':
res = hackernews.story(story['ref'])
elif story['source'] == 'lobsters':
@@ -78,6 +80,10 @@ def update_story(story, is_manual=False):
res = tildes.story(story['ref'])
elif story['source'] == 'manual':
res = manual.story(story['ref'])
except BaseException as e:
utils.alert_tanner('Problem updating {} story, ref {}: {}'.format(story['source'], story['ref'], str(e)))
logging.exception(e)
return False
if res:
story.update(res) # join dicts
@@ -100,6 +106,12 @@ def update_story(story, is_manual=False):
logging.info(story['url'])
return False
if 'trump' in story['title'].lower() or 'musk' in story['title'].lower():
logging.info('Trump / Musk story, skipping')
logging.info(story['url'])
return False
logging.info('Getting article ' + story['url'])
story['text'] = get_article(story['url'])
if not story['text']: return False

View File

@@ -12,7 +12,8 @@ import requests
from utils import clean
API_TOPSTORIES = lambda x: 'https://hacker-news.firebaseio.com/v0/topstories.json'
API_ITEM = lambda x : 'https://hn.algolia.com/api/v1/items/{}'.format(x)
ALG_API_ITEM = lambda x : 'https://hn.algolia.com/api/v1/items/{}'.format(x)
BHN_API_ITEM = lambda x : 'https://api.hnpwa.com/v0/item/{}.json'.format(x)
SITE_LINK = lambda x : 'https://news.ycombinator.com/item?id={}'.format(x)
SITE_AUTHOR_LINK = lambda x : 'https://news.ycombinator.com/user?id={}'.format(x)
@@ -42,7 +43,7 @@ def api(route, ref=None):
def feed():
return [str(x) for x in api(API_TOPSTORIES) or []]
def comment(i):
def alg_comment(i):
if 'author' not in i:
return False
@@ -51,19 +52,19 @@ def comment(i):
c['score'] = i.get('points', 0)
c['date'] = i.get('created_at_i', 0)
c['text'] = clean(i.get('text', '') or '')
c['comments'] = [comment(j) for j in i['children']]
c['comments'] = [alg_comment(j) for j in i['children']]
c['comments'] = list(filter(bool, c['comments']))
return c
def comment_count(i):
def alg_comment_count(i):
alive = 1 if i['author'] else 0
return sum([comment_count(c) for c in i['comments']]) + alive
return sum([alg_comment_count(c) for c in i['comments']]) + alive
def story(ref):
r = api(API_ITEM, ref)
def alg_story(ref):
r = api(ALG_API_ITEM, ref)
if not r:
logging.info('Bad Hackernews API response.')
return False
logging.info('Bad Algolia Hackernews API response.')
return None
if 'deleted' in r:
logging.info('Story was deleted.')
@@ -80,17 +81,85 @@ def story(ref):
s['title'] = r.get('title', '')
s['link'] = SITE_LINK(ref)
s['url'] = r.get('url', '')
s['comments'] = [comment(i) for i in r['children']]
s['comments'] = [alg_comment(i) for i in r['children']]
s['comments'] = list(filter(bool, s['comments']))
s['num_comments'] = comment_count(s) - 1
s['num_comments'] = alg_comment_count(s) - 1
if 'text' in r and r['text']:
s['text'] = clean(r['text'] or '')
return s
def bhn_comment(i):
if 'user' not in i:
return False
c = {}
c['author'] = i.get('user', '')
c['score'] = 0 # Not present?
c['date'] = i.get('time', 0)
c['text'] = clean(i.get('content', '') or '')
c['comments'] = [bhn_comment(j) for j in i['comments']]
c['comments'] = list(filter(bool, c['comments']))
return c
def bhn_story(ref):
r = api(BHN_API_ITEM, ref)
if not r:
logging.info('Bad BetterHN Hackernews API response.')
return None
if 'deleted' in r: # TODO: verify
logging.info('Story was deleted.')
return False
elif r.get('dead', False):
logging.info('Story was deleted.')
return False
elif r.get('type', '') != 'link':
logging.info('Type "{}" is not "link".'.format(r.get('type', '')))
return False
s = {}
s['author'] = r.get('user', '')
s['author_link'] = SITE_AUTHOR_LINK(r.get('user', ''))
s['score'] = r.get('points', 0)
s['date'] = r.get('time', 0)
s['title'] = r.get('title', '')
s['link'] = SITE_LINK(ref)
s['url'] = r.get('url', '')
if s['url'].startswith('item'):
s['url'] = SITE_LINK(ref)
s['comments'] = [bhn_comment(i) for i in r['comments']]
s['comments'] = list(filter(bool, s['comments']))
s['num_comments'] = r.get('comments_count', 0)
if 'content' in r and r['content']:
s['text'] = clean(r['content'] or '')
return s
def story(ref):
s = alg_story(ref)
if s is None:
s = bhn_story(ref)
if not s:
return False
if s['score'] < 25 and s['num_comments'] < 10:
logging.info('Score ({}) or num comments ({}) below threshold.'.format(s['score'], s['num_comments']))
return False
return s
# scratchpad so I can quickly develop the parser
if __name__ == '__main__':
print(feed())
#print(story(20763961))
#print(story(20802050))
#print(story(42899834)) # type "job"
#print(story(42900076)) # Ask HN
#print(story(42898201)) # Show HN
#print(story(42899703)) # normal
print(story(42902678)) # bad title?

View File

@@ -50,7 +50,7 @@ def unix(date_str):
def make_comment(i):
c = {}
try:
c['author'] = i['commenting_user']['username']
c['author'] = i['commenting_user']
except KeyError:
c['author'] = ''
c['score'] = i.get('score', 0)
@@ -67,13 +67,13 @@ def iter_comments(flat_comments):
parent_stack = []
for comment in flat_comments:
c = make_comment(comment)
indent = comment['indent_level']
indent = comment['depth']
if indent == 1:
if indent == 0:
nested_comments.append(c)
parent_stack = [c]
else:
parent_stack = parent_stack[:indent-1]
parent_stack = parent_stack[:indent]
p = parent_stack[-1]
p['comments'].append(c)
parent_stack.append(c)
@@ -87,7 +87,7 @@ def story(ref):
s = {}
try:
s['author'] = r['submitter_user']['username']
s['author'] = r['submitter_user']
s['author_link'] = SITE_AUTHOR_LINK(s['author'])
except KeyError:
s['author'] = ''
@@ -103,6 +103,10 @@ def story(ref):
s['comments'] = iter_comments(r['comments'])
s['num_comments'] = r['comment_count']
if s['score'] < 15 and s['num_comments'] < 10:
logging.info('Score ({}) or num comments ({}) below threshold.'.format(s['score'], s['num_comments']))
return False
if 'description' in r and r['description']:
s['text'] = clean(r['description'] or '')
@@ -112,5 +116,5 @@ def story(ref):
if __name__ == '__main__':
#print(feed())
import json
print(json.dumps(story('fzvd1v')))
#print(story(20802050))
print(json.dumps(story('fzvd1v'), indent=4))
#print(json.dumps(story('ixyv5u'), indent=4))

View File

@@ -32,10 +32,7 @@ def feed():
return [x.id for x in reddit.subreddit(subs).hot()]
except KeyboardInterrupt:
raise
except PRAWException as e:
logging.critical('Problem hitting reddit API: {}'.format(str(e)))
return []
except PrawcoreException as e:
except BaseException as e:
logging.critical('Problem hitting reddit API: {}'.format(str(e)))
return []

View File

@@ -107,7 +107,20 @@ def story(ref):
ch = a.find('header', class_='topic-comments-header')
s['num_comments'] = int(ch.h2.string.split(' ')[0]) if ch else 0
if s['score'] < 8 and s['num_comments'] < 6:
if s['group'].split('.')[0] not in [
'~arts',
'~comp',
'~creative',
'~design',
'~engineering',
'~finance',
'~science',
'~tech',
]:
logging.info('Group ({}) not in whitelist.'.format(s['group']))
return False
if s['score'] < 15 and s['num_comments'] < 10:
logging.info('Score ({}) or num comments ({}) below threshold.'.format(s['score'], s['num_comments']))
return False

View File

@@ -1,6 +1,8 @@
import database
import search
import sys
import settings
import logging
import json
import requests
@@ -21,7 +23,7 @@ def database_del_story(sid):
def search_del_story(sid):
try:
r = requests.delete(search.MEILI_URL + 'indexes/qotnews/documents/'+sid, timeout=2)
r = requests.delete(settings.MEILI_URL + 'indexes/qotnews/documents/'+sid, timeout=2)
if r.status_code != 202:
raise Exception('Bad response code ' + str(r.status_code))
return r.json()

View File

@@ -0,0 +1,58 @@
import time
import json
import logging
import feed
import database
import search
database.init()
def fix_gzip_bug(story_list):
FIX_THRESHOLD = 150
count = 1
for sid in story_list:
try:
sid = sid[0]
story = database.get_story(sid)
full_json = json.loads(story.full_json)
meta_json = json.loads(story.meta_json)
text = full_json.get('text', '')
count = text.count('<EFBFBD>')
if not count: continue
ratio = count / len(text) * 1000
print('Bad story:', sid, 'Num ?:', count, 'Ratio:', ratio)
if ratio < FIX_THRESHOLD: continue
print('Attempting to fix...')
valid = feed.update_story(meta_json, is_manual=True)
if valid:
database.put_story(meta_json)
search.put_story(meta_json)
print('Success')
else:
print('Story was not valid')
time.sleep(3)
except KeyboardInterrupt:
raise
except BaseException as e:
logging.exception(e)
breakpoint()
if __name__ == '__main__':
num_stories = database.count_stories()
print('Fix {} stories?'.format(num_stories))
print('Press ENTER to continue, ctrl-c to cancel')
input()
story_list = database.get_story_list()
fix_gzip_bug(story_list)

View File

@@ -23,28 +23,14 @@ def put_stories(stories):
def get_update(update_id):
return search.meili_api(requests.get, 'tasks/{}'.format(update_id))
def count_stories():
try:
session = database.Session()
return session.query(database.Story).count()
finally:
session.close()
def get_story_list():
try:
session = database.Session()
return session.query(database.Story.sid).all()
finally:
session.close()
if __name__ == '__main__':
num_stories = count_stories()
num_stories = database.count_stories()
print('Reindex {} stories?'.format(num_stories))
print('Press ENTER to continue, ctrl-c to cancel')
input()
story_list = get_story_list()
story_list = database.get_story_list()
count = 1
while len(story_list):
@@ -59,8 +45,7 @@ if __name__ == '__main__':
story = database.get_story(sid)
print('Indexing {}/{} id: {} title: {}'.format(count, num_stories, sid[0], story.title))
story_obj = json.loads(story.meta_json)
to_add = dict(title=story_obj['title'], id=story_obj['id'], date=story_obj['date'])
stories.append(to_add)
stories.append(story_obj)
count += 1
res = put_stories(stories)

View File

@@ -0,0 +1,23 @@
import time
import requests
def test_search_api():
num_tests = 100
total_time = 0
for i in range(num_tests):
start = time.time()
res = requests.get('http://127.0.0.1:33842/api/search?q=iphone')
res.raise_for_status()
duration = time.time() - start
total_time += duration
avg_time = total_time / num_tests
print('Average search time:', avg_time)
if __name__ == '__main__':
test_search_api()

View File

@@ -8,12 +8,16 @@ import settings
SEARCH_ENABLED = bool(settings.MEILI_URL)
def meili_api(method, route, json=None, params=None):
def meili_api(method, route, json=None, params=None, parse_json=True):
try:
r = method(settings.MEILI_URL + route, json=json, params=params, timeout=4)
if r.status_code > 299:
raise Exception('Bad response code ' + str(r.status_code))
if parse_json:
return r.json()
else:
r.encoding = 'utf-8'
return r.text
except KeyboardInterrupt:
raise
except BaseException as e:
@@ -29,9 +33,9 @@ def update_rankings():
return meili_api(requests.post, 'indexes/qotnews/settings/ranking-rules', json=json)
def update_attributes():
json = ['title']
json = ['title', 'url', 'author']
r = meili_api(requests.post, 'indexes/qotnews/settings/searchable-attributes', json=json)
json = ['id']
json = ['id', 'ref', 'source', 'author', 'author_link', 'score', 'date', 'title', 'link', 'url', 'num_comments']
r = meili_api(requests.post, 'indexes/qotnews/settings/displayed-attributes', json=json)
return r
@@ -45,18 +49,17 @@ def init():
def put_story(story):
if not SEARCH_ENABLED: return
to_add = dict(title=story['title'], id=story['id'], date=story['date'])
return meili_api(requests.post, 'indexes/qotnews/documents', [to_add])
return meili_api(requests.post, 'indexes/qotnews/documents', [story])
def search(q):
if not SEARCH_ENABLED: return []
params = dict(q=q, limit=250)
r = meili_api(requests.get, 'indexes/qotnews/search', params=params)
return r['hits']
params = dict(q=q, limit=settings.FEED_LENGTH)
r = meili_api(requests.get, 'indexes/qotnews/search', params=params, parse_json=False)
return r
if __name__ == '__main__':
init()
print(update_rankings())
print(search('qot'))
print(search('facebook'))

View File

@@ -15,6 +15,7 @@ import traceback
import time
from urllib.parse import urlparse, parse_qs
import settings
import database
import search
import feed
@@ -27,7 +28,6 @@ from flask_cors import CORS
database.init()
search.init()
FEED_LENGTH = 75
news_index = 0
def new_id():
@@ -43,7 +43,7 @@ cors = CORS(flask_app)
@flask_app.route('/api')
def api():
skip = request.args.get('skip', 0)
limit = request.args.get('limit', FEED_LENGTH)
limit = request.args.get('limit', settings.FEED_LENGTH)
stories = database.get_stories(limit, skip)
# hacky nested json
res = Response('{"stories":[' + ','.join(stories) + ']}')
@@ -56,10 +56,8 @@ def apisearch():
if len(q) >= 3:
results = search.search(q)
else:
results = []
story_metas = [database.get_story(x['id']).meta_json for x in results]
# hacky nested json
res = Response('{"results":[' + ','.join(story_metas) + ']}')
results = '[]'
res = Response(results)
res.headers['content-type'] = 'application/json'
return res
@@ -124,7 +122,7 @@ def story(sid):
@flask_app.route('/search')
def index():
return render_template('index.html',
title='Feed',
title='QotNews',
url='news.t0.vc',
description='Hacker News, Reddit, Lobsters, and Tildes articles rendered in reader mode',
robots='index',
@@ -153,7 +151,7 @@ def static_story(sid):
url = url.replace('www.', '')
return render_template('index.html',
title=story['title'],
title=story['title'] + ' | QotNews',
url=url,
description=description,
robots='noindex',
@@ -179,7 +177,7 @@ def feed_thread():
logging.info('Already have ID / ref, skipping.')
continue
ref_list = database.get_reflist(FEED_LENGTH)
ref_list = database.get_reflist(settings.FEED_LENGTH)
# update current stories
if news_index < len(ref_list):
@@ -206,7 +204,7 @@ def feed_thread():
gevent.sleep(6)
news_index += 1
if news_index == FEED_LENGTH: news_index = 0
if news_index == settings.FEED_LENGTH: news_index = 0
except KeyboardInterrupt:
logging.info('Ending feed thread...')

View File

@@ -4,6 +4,7 @@
# Feed Lengths
# Number of top items from each site to pull
# set to 0 to disable that site
FEED_LENGTH = 75
NUM_HACKERNEWS = 15
NUM_LOBSTERS = 10
NUM_REDDIT = 15

View File

@@ -8,6 +8,14 @@ import string
from bleach.sanitizer import Cleaner
def alert_tanner(message):
try:
logging.info('Alerting Tanner: ' + message)
params = dict(qotnews=message)
requests.get('https://tbot.tannercollin.com/message', params=params, timeout=4)
except BaseException as e:
logging.error('Problem alerting Tanner: ' + str(e))
def gen_rand_id():
return ''.join(random.choice(string.ascii_uppercase) for _ in range(4))

View File

@@ -35,6 +35,7 @@ app.post('/', (req, res) => {
const url = req.body.url;
const requestOptions = {
url: url,
gzip: true,
//headers: {'User-Agent': 'Googlebot/2.1 (+http://www.google.com/bot.html)'},
//headers: {'User-Agent': 'Twitterbot/1.0'},
headers: {

View File

@@ -68,17 +68,17 @@ asn1@~0.2.3:
assert-plus@1.0.0, assert-plus@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525"
integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=
integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==
asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
aws-sign2@~0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=
integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==
aws4@^1.8.0:
version "1.11.0"
@@ -88,7 +88,7 @@ aws4@^1.8.0:
bcrypt-pbkdf@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e"
integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=
integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==
dependencies:
tweetnacl "^0.14.3"
@@ -121,7 +121,7 @@ bytes@3.1.2:
caseless@~0.12.0:
version "0.12.0"
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=
integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==
combined-stream@^1.0.6, combined-stream@~1.0.6:
version "1.0.8"
@@ -155,7 +155,7 @@ cookie@0.4.2:
core-util-is@1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==
cssom@^0.4.1:
version "0.4.4"
@@ -177,7 +177,7 @@ cssstyle@^2.0.0:
dashdash@^1.12.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=
integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==
dependencies:
assert-plus "^1.0.0"
@@ -205,7 +205,7 @@ deep-is@~0.1.3:
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
depd@~1.1.2:
version "1.1.2"
@@ -232,7 +232,7 @@ dompurify@^1.0.11:
ecc-jsbn@~0.1.1:
version "0.1.2"
resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9"
integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=
integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==
dependencies:
jsbn "~0.1.0"
safer-buffer "^2.1.0"
@@ -328,7 +328,7 @@ extend@~3.0.2:
extsprintf@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=
integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==
extsprintf@^1.2.0:
version "1.4.1"
@@ -366,7 +366,7 @@ finalhandler@~1.1.2:
forever-agent@~0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==
form-data@~2.3.2:
version "2.3.3"
@@ -390,14 +390,14 @@ fresh@0.5.2:
getpass@^0.1.1:
version "0.1.7"
resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=
integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==
dependencies:
assert-plus "^1.0.0"
har-schema@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=
integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==
har-validator@~5.1.3:
version "5.1.5"
@@ -428,7 +428,7 @@ http-errors@1.8.1:
http-signature@~1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=
integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==
dependencies:
assert-plus "^1.0.0"
jsprim "^1.2.2"
@@ -459,17 +459,17 @@ ipaddr.js@1.9.1:
is-typedarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==
isstream@~0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=
integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==
jsbn@~0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM=
integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==
jsdom@^15.1.1:
version "15.2.1"
@@ -516,7 +516,7 @@ json-schema@0.4.0:
json-stringify-safe@~5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=
integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==
jsprim@^1.2.2:
version "1.4.2"
@@ -566,7 +566,19 @@ mime-db@1.51.0:
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c"
integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==
mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34:
mime-db@1.52.0:
version "1.52.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
mime-types@^2.1.12, mime-types@~2.1.19:
version "2.1.35"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
dependencies:
mime-db "1.52.0"
mime-types@~2.1.24, mime-types@~2.1.34:
version "2.1.34"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24"
integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==
@@ -640,7 +652,7 @@ path-to-regexp@0.1.7:
performance-now@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=
integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==
pn@^1.1.0:
version "1.1.0"
@@ -661,9 +673,9 @@ proxy-addr@~2.0.7:
ipaddr.js "1.9.1"
psl@^1.1.28:
version "1.8.0"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24"
integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==
version "1.9.0"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7"
integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==
punycode@^2.1.0, punycode@^2.1.1:
version "2.1.1"
@@ -696,8 +708,8 @@ raw-body@2.4.3:
unpipe "1.0.0"
"readability@https://github.com/mozilla/readability":
version "0.4.2"
resolved "https://github.com/mozilla/readability#1d2cb030b32e753cc4b7c4ce8b64c3ce4dc1b2ff"
version "0.5.0"
resolved "https://github.com/mozilla/readability#39a5c5409fb653858b1832141895b882b9092b47"
request-promise-core@1.1.4:
version "1.1.4"
@@ -859,14 +871,14 @@ tr46@^1.0.1:
tunnel-agent@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=
integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==
dependencies:
safe-buffer "^5.0.1"
tweetnacl@^0.14.3, tweetnacl@~0.14.0:
version "0.14.5"
resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=
integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==
type-check@~0.3.2:
version "0.3.2"
@@ -913,7 +925,7 @@ vary@~1.1.2:
verror@1.10.0:
version "1.10.0"
resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400"
integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=
integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==
dependencies:
assert-plus "^1.0.0"
core-util-is "1.0.2"

View File

@@ -28,7 +28,7 @@
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>{{ title }} - QotNews</title>
<title>{{ title }}</title>
<style>
html {

View File

@@ -3,8 +3,10 @@ import { BrowserRouter as Router, Route, Link, Switch } from 'react-router-dom';
import localForage from 'localforage';
import './Style-light.css';
import './Style-dark.css';
import './Style-black.css';
import './Style-red.css';
import './fonts/Fonts.css';
import { ForwardDot } from './utils.js';
import { BackwardDot, ForwardDot } from './utils.js';
import Feed from './Feed.js';
import Article from './Article.js';
import Comments from './Comments.js';
@@ -38,6 +40,16 @@ class App extends React.Component {
localStorage.setItem('theme', 'dark');
}
black() {
this.setState({ theme: 'black' });
localStorage.setItem('theme', 'black');
}
red() {
this.setState({ theme: 'red' });
localStorage.setItem('theme', 'red');
}
componentDidMount() {
if (!this.cache.length) {
localForage.iterate((value, key) => {
@@ -47,22 +59,61 @@ class App extends React.Component {
}
}
goFullScreen() {
if ('wakeLock' in navigator) {
navigator.wakeLock.request('screen');
}
document.body.requestFullscreen({ navigationUI: 'hide' }).then(() => {
window.addEventListener('resize', () => this.forceUpdate());
this.forceUpdate();
});
};
exitFullScreen() {
document.exitFullscreen().then(() => {
this.forceUpdate();
});
};
render() {
const theme = this.state.theme;
document.body.style.backgroundColor = theme === 'dark' ? '#000' : '#eeeeee';
if (theme === 'dark') {
document.body.style.backgroundColor = '#1a1a1a';
} else if (theme === 'black') {
document.body.style.backgroundColor = '#000';
} else if (theme === 'red') {
document.body.style.backgroundColor = '#000';
} else {
document.body.style.backgroundColor = '#eeeeee';
}
const fullScreenAvailable = document.fullscreenEnabled ||
document.mozFullscreenEnabled ||
document.webkitFullscreenEnabled ||
document.msFullscreenEnabled;
return (
<div className={theme}>
<Router>
<div className='container menu'>
<p>
<Link to='/'>QotNews - Feed</Link>
<span className='theme'>Theme: <a href='#' onClick={() => this.light()}>Light</a> - <a href='#' onClick={() => this.dark()}>Dark</a></span>
<Link to='/'>QotNews</Link>
<span className='theme'><a href='#' onClick={() => this.light()}>Light</a> - <a href='#' onClick={() => this.dark()}>Dark</a> - <a href='#' onClick={() => this.black()}>Black</a> - <a href='#' onClick={() => this.red()}>Red</a></span>
<br />
<span className='slogan'>Hacker News, Reddit, Lobsters, and Tildes articles rendered in reader mode.</span>
</p>
<Route path='/(|search)' component={Search} />
<Route path='/(|search)' component={Submit} />
{fullScreenAvailable &&
<Route path='/(|search)' render={() => !document.fullscreenElement ?
<button className='fullscreen' onClick={() => this.goFullScreen()}>Enter Fullscreen</button>
:
<button className='fullscreen' onClick={() => this.exitFullScreen()}>Exit Fullscreen</button>
} />
}
</div>
<Route path='/' exact render={(props) => <Feed {...props} updateCache={this.updateCache} />} />
@@ -72,6 +123,7 @@ class App extends React.Component {
</Switch>
<Route path='/:id/c' exact render={(props) => <Comments {...props} cache={this.cache} />} />
<BackwardDot />
<ForwardDot />
<ScrollToTop />

View File

@@ -67,8 +67,8 @@ class Article extends React.Component {
{story ?
<div className='article'>
<Helmet>
<title>{story.title} - QotNews</title>
<meta name="robots" content="noindex">
<title>{story.title} | QotNews</title>
<meta name="robots" content="noindex" />
</Helmet>
<h1>{story.title}</h1>

View File

@@ -115,7 +115,8 @@ class Article extends React.Component {
{story ?
<div className='article'>
<Helmet>
<title>{story.title} - QotNews Comments</title>
<title>{story.title} | QotNews</title>
<meta name="robots" content="noindex" />
</Helmet>
<h1>{story.title}</h1>

View File

@@ -52,8 +52,8 @@ class Feed extends React.Component {
return (
<div className='container'>
<Helmet>
<title>Feed - QotNews</title>
<meta name="robots" content="index">
<title>QotNews</title>
<meta name="robots" content="index" />
</Helmet>
{error && <p>Connection error?</p>}
{stories ?

View File

@@ -29,7 +29,7 @@ class Results extends React.Component {
.then(res => res.json())
.then(
(result) => {
this.setState({ stories: result.results });
this.setState({ stories: result.hits });
},
(error) => {
if (error.message !== 'The operation was aborted. ') {
@@ -56,7 +56,7 @@ class Results extends React.Component {
return (
<div className='container'>
<Helmet>
<title>Feed - QotNews</title>
<title>Search Results | QotNews</title>
</Helmet>
{error && <p>Connection error?</p>}
{stories ?

View File

@@ -15,6 +15,7 @@ class ScrollToTop extends React.Component {
}
window.scrollTo(0, 0);
document.body.scrollTop = 0;
}
render() {

View File

@@ -37,7 +37,7 @@ class Search extends Component {
<span className='search'>
<form onSubmit={this.searchAgain}>
<input
placeholder='Search... (fixed)'
placeholder='Search...'
value={search}
onChange={this.searchArticles}
ref={this.inputRef}

View File

@@ -0,0 +1,68 @@
.black {
color: #ddd;
}
.black a {
color: #ddd;
}
.black input {
color: #ddd;
border: 1px solid #828282;
}
.black button {
background-color: #444444;
border-color: #bbb;
color: #ddd;
}
.black .item {
color: #828282;
}
.black .item .source-logo {
filter: grayscale(1);
}
.black .item a {
color: #828282;
}
.black .item a.link {
color: #ddd;
}
.black .item a.link:visited {
color: #828282;
}
.black .item .info a.hot {
color: #cccccc;
}
.black .article a {
border-bottom: 1px solid #aaaaaa;
}
.black .article u {
border-bottom: 1px solid #aaaaaa;
text-decoration: none;
}
.black .story-text video,
.black .story-text img {
filter: brightness(50%);
}
.black .article .info {
color: #828282;
}
.black .article .info a {
border-bottom: none;
color: #828282;
}
.black .comment.lined {
border-left: 1px solid #444444;
}

View File

@@ -11,12 +11,14 @@
border: 1px solid #828282;
}
.dark .item {
color: #828282;
.dark button {
background-color: #444444;
border-color: #bbb;
color: #ddd;
}
.dark .item .source-logo {
filter: grayscale(1);
.dark .item {
color: #828282;
}
.dark .item a {
@@ -43,6 +45,7 @@
text-decoration: none;
}
.dark .story-text video,
.dark .story-text img {
filter: brightness(50%);
}

View File

@@ -2,9 +2,30 @@ body {
text-rendering: optimizeLegibility;
font: 1rem/1.3 sans-serif;
color: #000000;
margin-bottom: 100vh;
word-break: break-word;
font-kerning: normal;
margin: 0;
}
::backdrop {
background-color: rgba(0,0,0,0);
}
body:fullscreen {
overflow-y: scroll !important;
}
body:-ms-fullscreen {
overflow-y: scroll !important;
}
body:-webkit-full-screen {
overflow-y: scroll !important;
}
body:-moz-full-screen {
overflow-y: scroll !important;
}
#root {
margin: 8px 8px 100vh 8px !important;
}
a {
@@ -22,6 +43,12 @@ input {
border-radius: 4px;
}
.fullscreen {
margin: 0.25rem;
padding: 0.25rem;
}
pre {
overflow: auto;
}
@@ -185,16 +212,20 @@ span.source {
cursor: pointer;
}
.toggleDot {
.dot {
cursor: pointer;
position: fixed;
bottom: 1rem;
left: 1rem;
height: 3rem;
width: 3rem;
background-color: #828282;
border-radius: 50%;
}
.toggleDot {
bottom: 1rem;
left: 1rem;
}
.toggleDot .button {
font: 2rem/1 'icomoon';
position: relative;
@@ -203,21 +234,27 @@ span.source {
}
.forwardDot {
cursor: pointer;
position: fixed;
bottom: 1rem;
right: 1rem;
height: 3rem;
width: 3rem;
background-color: #828282;
border-radius: 50%;
}
.forwardDot .button {
font: 2.5rem/1 'icomoon';
font: 2rem/1 'icomoon';
position: relative;
top: 0.25rem;
left: 0.3rem;
top: 0.5rem;
left: 0.5rem;
}
.backwardDot {
bottom: 1rem;
right: 5rem;
}
.backwardDot .button {
font: 2rem/1 'icomoon';
position: relative;
top: 0.5rem;
left: 0.5rem;
}
.search form {

View File

@@ -0,0 +1,82 @@
.red {
color: #b00;
scrollbar-color: #b00 #440000;
}
.red a {
color: #b00;
}
.red input {
color: #b00;
border: 1px solid #690000;
}
.red input::placeholder {
color: #690000;
}
.red hr {
background-color: #690000;
}
.red button {
background-color: #440000;
border-color: #b00;
color: #b00;
}
.red .item,
.red .slogan {
color: #690000;
}
.red .item .source-logo {
display: none;
}
.red .item a {
color: #690000;
}
.red .item a.link {
color: #b00;
}
.red .item a.link:visited {
color: #690000;
}
.red .item .info a.hot {
color: #cc0000;
}
.red .article a {
border-bottom: 1px solid #aa0000;
}
.red .article u {
border-bottom: 1px solid #aa0000;
text-decoration: none;
}
.red .story-text video,
.red .story-text img {
filter: grayscale(100%) brightness(20%) sepia(100%) hue-rotate(-50deg) saturate(600%) contrast(0.8);
}
.red .article .info {
color: #690000;
}
.red .article .info a {
border-bottom: none;
color: #690000;
}
.red .comment.lined {
border-left: 1px solid #440000;
}
.red .dot {
background-color: #440000;
}

View File

@@ -41,7 +41,7 @@ class Submit extends Component {
<span className='search'>
<form onSubmit={this.submitArticle}>
<input
placeholder='Submit Article'
placeholder='Submit URL'
ref={this.inputRef}
/>
</form>

Binary file not shown.

View File

@@ -25,8 +25,9 @@ export class ToggleDot extends React.Component {
render() {
const id = this.props.id;
const article = this.props.article;
return (
<div className='toggleDot'>
<div className='dot toggleDot'>
<div className='button'>
<Link to={'/' + id + (article ? '' : '/c')}>
{article ? '' : ''}
@@ -37,6 +38,27 @@ export class ToggleDot extends React.Component {
}
}
export class BackwardDot extends React.Component {
goBackward() {
localStorage.setItem('scrollLock', 'True');
window.history.back();
}
render() {
const isMobile = /iPhone|iPad|iPod|Android/i.test(navigator.userAgent);
if (!isMobile) return null;
if (!document.fullscreenElement) return null;
return (
<div className='dot backwardDot' onClick={this.goBackward}>
<div className='button'>
</div>
</div>
);
}
}
export class ForwardDot extends React.Component {
goForward() {
localStorage.setItem('scrollLock', 'True');
@@ -48,9 +70,9 @@ export class ForwardDot extends React.Component {
if (!isMobile) return null;
return (
<div className='forwardDot' onClick={this.goForward}>
<div className='dot forwardDot' onClick={this.goForward}>
<div className='button'>
</div>
</div>
);