Merge remote-tracking branch 'tanner/master' into master

And adding relevant setings.py.example/etc.
This commit is contained in:
Jason Schwarzenberger 2020-11-03 16:44:02 +13:00
commit 5f98a2e76a
6 changed files with 86 additions and 24 deletions

View File

@ -35,7 +35,7 @@ $ source env/bin/activate
(env) $ pip install -r requirements.txt (env) $ pip install -r requirements.txt
``` ```
Configure Praw for your Reddit account: Configure Praw for your Reddit account (optional):
* Go to https://www.reddit.com/prefs/apps * Go to https://www.reddit.com/prefs/apps
* Click "Create app" * Click "Create app"
@ -44,16 +44,14 @@ Configure Praw for your Reddit account:
* Description: blank * Description: blank
* About URL: blank * About URL: blank
* Redirect URL: your GitHub profile * Redirect URL: your GitHub profile
* Submit, copy the client ID and client secret into `praw.ini`: * Submit, copy the client ID and client secret into `settings.py` below
```text ```text
(env) $ vim praw.ini (env) $ vim settings.py.example
[bot]
client_id=paste here
client_secret=paste here
user_agent=script by github/your-username-here
``` ```
Edit it and save it as `settings.py`.
Now you can run the server: Now you can run the server:
```text ```text

View File

@ -105,7 +105,7 @@ ENV/
# DB # DB
db.sqlite3 db.sqlite3
praw.ini settings.py
data.db data.db
data.db.bak data.db.bak
data/archive/* data/archive/*

View File

@ -7,6 +7,7 @@ import requests
import time import time
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
import settings
from feeds import hackernews, reddit, tildes, substack, manual, sitemap from feeds import hackernews, reddit, tildes, substack, manual, sitemap
OUTLINE_API = 'https://api.outline.com/v3/parse_article' OUTLINE_API = 'https://api.outline.com/v3/parse_article'
@ -22,14 +23,30 @@ nzherald = sitemap.Sitemap("https://www.nzherald.co.nz/arcio/news-sitemap/")
def list(): def list():
feed = [] feed = []
feed += [(x, 'hackernews') for x in hackernews.feed()[:10]] if settings.NUM_HACKERNEWS:
feed += [(x, 'tildes') for x in tildes.feed()[:10]] feed += [(x, 'hackernews') for x in hackernews.feed()[:settings.NUM_HACKERNEWS]]
feed += [(x, 'stuff') for x in stuff.feed()[:10]]
feed += [(x, 'nzherald') for x in nzherald.feed()[:10]] if settings.NUM_REDDIT:
feed += [(x, 'substack') for x in substack.top.feed()[:15]] feed += [(x, 'reddit') for x in reddit.feed()[:settings.NUM_REDDIT]]
feed += [(x, 'reddit') for x in reddit.feed()[:15]]
feed += [(x, 'webworm') for x in webworm.feed()[:15]] if settings.NUM_TILDES:
feed += [(x, 'the bulletin') for x in bulletin.feed()[:15]] feed += [(x, 'tildes') for x in tildes.feed()[:settings.NUM_TILDES]]
if settings.NUM_SUBSTACK:
feed += [(x, 'substack') for x in substack.top.feed()[:settings.NUM_SUBSTACK]]
if settings.NUM_STUFF:
feed += [(x, 'stuff') for x in stuff.feed()[:settings.NUM_STUFF]]
if settings.NUM_NZHERALD:
feed += [(x, 'nzherald') for x in nzherald.feed()[:settings.NUM_NZHERALD]]
if settings.NUM_WEBWORM:
feed += [(x, 'webworm') for x in webworm.feed()[:settings.NUM_WEBWORM]]
if settings.NUM_BULLETIN:
feed += [(x, 'the bulletin') for x in bulletin.feed()[:settings.NUM_BULLETIN]]
return feed return feed
def get_article(url): def get_article(url):

View File

@ -12,18 +12,24 @@ from praw.exceptions import PRAWException
from praw.models import MoreComments from praw.models import MoreComments
from prawcore.exceptions import PrawcoreException from prawcore.exceptions import PrawcoreException
import settings
from utils import render_md, clean from utils import render_md, clean
SUBREDDITS = 'newzealand'
SITE_LINK = lambda x : 'https://old.reddit.com{}'.format(x) SITE_LINK = lambda x : 'https://old.reddit.com{}'.format(x)
SITE_AUTHOR_LINK = lambda x : 'https://old.reddit.com/u/{}'.format(x) SITE_AUTHOR_LINK = lambda x : 'https://old.reddit.com/u/{}'.format(x)
reddit = praw.Reddit('bot') if settings.NUM_REDDIT:
reddit = praw.Reddit(
client_id=settings.REDDIT_CLIENT_ID,
client_secret=settings.REDDIT_CLIENT_SECRET,
user_agent=settings.REDDIT_USER_AGENT,
)
subs = '+'.join(settings.SUBREDDITS)
def feed(): def feed():
try: try:
return [x.id for x in reddit.subreddit(SUBREDDITS).hot()] return [x.id for x in reddit.subreddit(subs).hot()]
except KeyboardInterrupt: except KeyboardInterrupt:
raise raise
except PRAWException as e: except PRAWException as e:

View File

@ -1,4 +0,0 @@
[bot]
client_id=
client_secret=
user_agent=

View File

@ -0,0 +1,45 @@
# QotNews settings
# edit this file and save it as settings.py
# Feed Lengths
# Number of top items from each site to pull
# set to 0 to disable that site
NUM_HACKERNEWS = 15
NUM_REDDIT = 10
NUM_TILDES = 5
NUM_SUBSTACK = 10
NUM_WEBWORM = 0
NUM_NZHERALD = 0
NUM_STUFF = 0
NUM_BULLETIN = 0
# Reddit account info
# leave blank if not using Reddit
REDDIT_CLIENT_ID = ''
REDDIT_CLIENT_SECRET = ''
REDDIT_USER_AGENT = ''
SUBREDDITS = [
'Economics',
'AcademicPhilosophy',
'DepthHub',
'Foodforthought',
'HistoryofIdeas',
'LaymanJournals',
'PhilosophyofScience',
'PoliticsPDFs',
'Scholar',
'StateOfTheUnion',
'TheAgora',
'TrueFilm',
'TrueReddit',
'UniversityofReddit',
'culturalstudies',
'hardscience',
'indepthsports',
'indepthstories',
'ludology',
'neurophilosophy',
'resilientcommunities',
'worldevents',
]