Files
qotnews/apiserver/server.py
2025-12-04 22:18:22 +00:00

328 lines
10 KiB
Python

import os, logging
DEBUG = os.environ.get('DEBUG')
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.DEBUG if DEBUG else logging.INFO)
import gevent
from gevent import monkey
monkey.patch_all()
from gevent.pywsgi import WSGIServer
import copy
import json
import threading
import traceback
import time
import datetime
import humanize
import urllib.request
from urllib.parse import urlparse, parse_qs
import settings
import database
import search
import feed
from utils import gen_rand_id, NUM_ID_CHARS
from flask import abort, Flask, request, render_template, stream_with_context, Response
from werkzeug.exceptions import NotFound
from flask_cors import CORS
smallweb_set = set()
def load_smallweb_list():
EXCLUDED = [
'github.com',
]
global smallweb_set
try:
url = 'https://raw.githubusercontent.com/kagisearch/smallweb/refs/heads/main/smallweb.txt'
with urllib.request.urlopen(url, timeout=10) as response:
urls = response.read().decode('utf-8').splitlines()
hosts = {urlparse(u).hostname for u in urls if u and urlparse(u).hostname}
smallweb_set = {h.replace('www.', '') for h in hosts if h not in EXCLUDED}
logging.info('Loaded {} smallweb domains.'.format(len(smallweb_set)))
except Exception as e:
logging.error('Failed to load smallweb list: {}'.format(e))
load_smallweb_list()
database.init()
search.init()
news_index = 0
ref_list = []
current_item = {}
def new_id():
nid = gen_rand_id()
while database.get_story(nid):
nid = gen_rand_id()
return nid
def fromnow(ts):
return humanize.naturaltime(datetime.datetime.fromtimestamp(ts))
build_folder = './build'
flask_app = Flask(__name__, template_folder=build_folder, static_folder=build_folder, static_url_path='')
flask_app.jinja_env.filters['fromnow'] = fromnow
cors = CORS(flask_app)
@flask_app.route('/api')
def api():
skip = request.args.get('skip', 0)
limit = request.args.get('limit', settings.FEED_LENGTH)
if request.args.get('smallweb') == 'true' and smallweb_set:
limit = int(limit)
skip = int(skip)
filtered_stories = []
current_skip = skip
while len(filtered_stories) < limit:
stories_batch = database.get_stories(limit, current_skip)
if not stories_batch:
break
for story_str in stories_batch:
story = json.loads(story_str)
story_url = story.get('url') or story.get('link') or ''
if not story_url:
continue
hostname = urlparse(story_url).hostname
if hostname:
hostname = hostname.replace('www.', '')
if hostname in smallweb_set:
filtered_stories.append(story_str)
if len(filtered_stories) == limit:
break
if len(filtered_stories) == limit:
break
current_skip += limit
stories = filtered_stories
else:
stories = database.get_stories(limit, skip)
# hacky nested json
res = Response('{"stories":[' + ','.join(stories) + ']}')
res.headers['content-type'] = 'application/json'
return res
@flask_app.route('/api/stats', strict_slashes=False)
def apistats():
stats = {
'news_index': news_index,
'ref_list': ref_list,
'len_ref_list': len(ref_list),
'current_item': current_item,
'total_stories': database.count_stories(),
'id_space': 26**NUM_ID_CHARS,
}
return stats
@flask_app.route('/api/search', strict_slashes=False)
def apisearch():
q = request.args.get('q', '')
if len(q) >= 3:
results = search.search(q)
else:
results = '[]'
res = Response(results)
res.headers['content-type'] = 'application/json'
return res
@flask_app.route('/api/submit', methods=['POST'], strict_slashes=False)
def submit():
try:
url = request.form['url']
for prefix in ['http://', 'https://']:
if url.lower().startswith(prefix):
break
else: # for
url = 'http://' + url
nid = new_id()
logging.info('Manual submission: ' + url)
parse = urlparse(url)
if 'news.ycombinator.com' in parse.hostname:
source = 'hackernews'
ref = parse_qs(parse.query)['id'][0]
elif 'tildes.net' in parse.hostname and '~' in url:
source = 'tildes'
ref = parse.path.split('/')[2]
elif 'lobste.rs' in parse.hostname and '/s/' in url:
source = 'lobsters'
ref = parse.path.split('/')[2]
elif 'reddit.com' in parse.hostname and 'comments' in url:
source = 'reddit'
ref = parse.path.split('/')[4]
elif 'news.t0.vc' in parse.hostname:
raise Exception('Invalid article')
else:
source = 'manual'
ref = url
existing = database.get_story_by_ref(ref)
if existing and DEBUG:
ref = ref + '#' + str(time.time())
existing = False
if existing:
return {'nid': existing.sid}
else:
story = dict(id=nid, ref=ref, source=source)
valid = feed.update_story(story, is_manual=True)
if valid:
database.put_story(story)
search.put_story(story)
if DEBUG:
logging.info('Adding manual ref: {}, id: {}, source: {}'.format(ref, nid, source))
database.put_ref(ref, nid, source)
return {'nid': nid}
else:
raise Exception('Invalid article')
except Exception as e:
msg = 'Problem with article submission: {} - {}'.format(e.__class__.__name__, str(e))
logging.error(msg)
print(traceback.format_exc())
return {'error': msg.split('\n')[0]}, 400
@flask_app.route('/api/<sid>')
def story(sid):
story = database.get_story(sid)
if story:
# hacky nested json
res = Response('{"story":' + story.full_json + '}')
res.headers['content-type'] = 'application/json'
return res
else:
return abort(404)
@flask_app.route('/')
@flask_app.route('/search')
def index():
stories_json = database.get_stories(settings.FEED_LENGTH, 0)
stories = [json.loads(s) for s in stories_json]
for s in stories:
url = urlparse(s.get('url') or s.get('link') or '').hostname or ''
s['hostname'] = url.replace('www.', '')
return render_template('index.html',
title='QotNews',
url='news.t0.vc',
description='Hacker News, Reddit, Lobsters, and Tildes articles rendered in reader mode',
robots='index',
stories=stories,
)
@flask_app.route('/<sid>', strict_slashes=False)
@flask_app.route('/<sid>/c', strict_slashes=False)
def static_story(sid):
try:
return flask_app.send_static_file(sid)
except NotFound:
pass
story_obj = database.get_story(sid)
if not story_obj: return abort(404)
story = json.loads(story_obj.full_json)
score = story['score']
num_comments = story['num_comments']
source = story['source']
description = '{} point{}, {} comment{} on {}'.format(
score, 's' if score != 1 else '',
num_comments, 's' if num_comments != 1 else '',
source)
url = urlparse(story.get('url') or story.get('link') or '').hostname or ''
url = url.replace('www.', '')
return render_template('index.html',
title=story['title'] + ' | QotNews',
url=url,
description=description,
robots='noindex',
story=story,
show_comments=request.path.endswith('/c'),
)
http_server = WSGIServer(('', 33842), flask_app)
def feed_thread():
global news_index, ref_list, current_item
try:
while True:
# onboard new stories
if news_index == 0:
for ref, source in feed.list():
if database.get_story_by_ref(ref):
continue
try:
nid = new_id()
logging.info('Adding ref: {}, id: {}, source: {}'.format(ref, nid, source))
database.put_ref(ref, nid, source)
except database.IntegrityError:
logging.info('Already have ID / ref, skipping.')
continue
ref_list = database.get_reflist(settings.FEED_LENGTH)
# update current stories
if news_index < len(ref_list):
current_item = ref_list[news_index]
try:
story_json = database.get_story(current_item['sid']).full_json
story = json.loads(story_json)
except AttributeError:
story = dict(id=current_item['sid'], ref=current_item['ref'], source=current_item['source'])
logging.info('Updating {} story: {}, index: {}'.format(story['source'], story['ref'], news_index))
valid = feed.update_story(story)
if valid:
database.put_story(story)
search.put_story(story)
else:
database.del_ref(current_item['ref'])
logging.info('Removed ref {}'.format(current_item['ref']))
else:
logging.info('Skipping index: ' + str(news_index))
gevent.sleep(6)
news_index += 1
if news_index == settings.FEED_LENGTH: news_index = 0
except KeyboardInterrupt:
logging.info('Ending feed thread...')
except ValueError as e:
logging.critical('feed_thread error: {} {}'.format(e.__class__.__name__, e))
http_server.stop()
logging.info('Starting Feed thread...')
gevent.spawn(feed_thread)
logging.info('Starting HTTP thread...')
try:
http_server.serve_forever()
except KeyboardInterrupt:
logging.info('Exiting...')