diff --git a/apiserver/feed.py b/apiserver/feed.py
index a56e685..0cb8e42 100644
--- a/apiserver/feed.py
+++ b/apiserver/feed.py
@@ -7,7 +7,7 @@ import requests
import time
from bs4 import BeautifulSoup
-from feeds import hackernews, reddit, tildes
+from feeds import hackernews, reddit, tildes, manual
OUTLINE_API = 'https://outlineapi.com/article'
ARCHIVE_API = 'https://archive.fo/submit/'
@@ -99,6 +99,8 @@ def update_story(story):
res = reddit.story(story['ref'])
elif story['source'] == 'tildes':
res = tildes.story(story['ref'])
+ elif story['source'] == 'manual':
+ res = manual.story(story['ref'])
if res:
story.update(res) # join dicts
diff --git a/apiserver/feeds/manual.py b/apiserver/feeds/manual.py
new file mode 100644
index 0000000..fef3cd0
--- /dev/null
+++ b/apiserver/feeds/manual.py
@@ -0,0 +1,46 @@
+import logging
+logging.basicConfig(
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ level=logging.DEBUG)
+
+import requests
+import time
+from bs4 import BeautifulSoup
+
+USER_AGENT = 'Twitterbot/1.0'
+
+def api(route):
+ try:
+ headers = {'User-Agent': USER_AGENT}
+ r = requests.get(route, headers=headers, timeout=5)
+ if r.status_code != 200:
+ raise Exception('Bad response code ' + str(r.status_code))
+ return r.text
+ except KeyboardInterrupt:
+ raise
+ except BaseException as e:
+ logging.error('Problem hitting manual website: {}'.format(str(e)))
+ return False
+
+def story(ref):
+ html = api(ref)
+ if not html: return False
+
+ soup = BeautifulSoup(html, features='html.parser')
+
+ s = {}
+ s['author'] = 'manual submission'
+ s['author_link'] = 'https://news.t0.vc'
+ s['score'] = 0
+ s['date'] = int(time.time())
+ s['title'] = str(soup.title.string)
+ s['link'] = ref
+ s['url'] = ref
+ s['comments'] = []
+ s['num_comments'] = 0
+
+ return s
+
+# scratchpad so I can quickly develop the parser
+if __name__ == '__main__':
+ print(story('https://www.backblaze.com/blog/what-smart-stats-indicate-hard-drive-failures/'))
diff --git a/apiserver/server.py b/apiserver/server.py
index 44db571..0d7b254 100644
--- a/apiserver/server.py
+++ b/apiserver/server.py
@@ -13,7 +13,7 @@ import archive
import feed
from utils import gen_rand_id
-from flask import abort, Flask, request, render_template
+from flask import abort, Flask, request, render_template, stream_with_context, Response
from werkzeug.exceptions import NotFound
from flask_cors import CORS
@@ -36,12 +36,37 @@ with shelve.open(DATA_FILE) as db:
news_ref_to_id = db.get('news_ref_to_id', {})
news_cache = db.get('news_cache', {})
+ # clean cache if broken
+ try:
+ for ref in news_list:
+ nid = news_ref_to_id[ref]
+ _ = news_cache[nid]
+ except KeyError as e:
+ logging.error('Unable to find key: ' + str(e))
+ logging.info('Clearing caches...')
+ news_list = []
+ news_ref_to_id = {}
+ news_cache = {}
+
def get_story(sid):
if sid in news_cache:
return news_cache[sid]
else:
return archive.get_story(sid)
+def new_id():
+ nid = gen_rand_id()
+ while nid in news_cache or archive.get_story(nid):
+ nid = gen_rand_id()
+ return nid
+
+def remove_ref(old_ref):
+ while old_ref in news_list:
+ news_list.remove(old_ref)
+ old_story = news_cache.pop(news_ref_to_id[old_ref])
+ old_id = news_ref_to_id.pop(old_ref)
+ logging.info('Removed ref {} id {}.'.format(old_ref, old_id))
+
build_folder = '../webclient/build'
flask_app = Flask(__name__, template_folder=build_folder, static_folder=build_folder, static_url_path='')
cors = CORS(flask_app)
@@ -66,6 +91,20 @@ def search():
res = []
return {'results': res}
+@flask_app.route('/api/submit', methods=['POST'], strict_slashes=False)
+def submit():
+ url = request.form['url']
+ nid = new_id()
+ news_story = dict(id=nid, ref=url, source='manual')
+ news_cache[nid] = news_story
+ valid = feed.update_story(news_story)
+ if valid:
+ archive.update(news_story)
+ return {'nid': nid}
+ else:
+ news_cache.pop(nid, '')
+ abort(400)
+
@flask_app.route('/api/