commit
aa54ea1b0a
@ -0,0 +1,108 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# project specific
|
||||
# this is used when I'm testing bullshit.
|
||||
data.json
|
@ -0,0 +1,15 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[packages]
|
||||
requests = "*"
|
||||
json = "*"
|
||||
pprint = "*"
|
||||
praw = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
@ -0,0 +1,27 @@
|
||||
# pynit
|
||||
an integration between saved reddit posts and pinboard
|
||||
|
||||
## Outline:
|
||||
### Getting data from reddit
|
||||
Currently i'm getting only the first page of saved, no NSFW items
|
||||
- [X] refactor to use praw instead of requests
|
||||
- praw has more functionality more obviously than I can figure out with requests.
|
||||
- its aggrevating.
|
||||
- [X] Figure out how to pull the entire list
|
||||
- in praw this is done through "limit=None" arg.
|
||||
- [X] Figure out how to enable pulling NSFW items
|
||||
- in praw this is actually just done by default
|
||||
- [ ] Figure out how to differentiate between self.posts, link.posts, and comments
|
||||
- each one will have different fields but REDDIT DOESN'T DOCUMENT THIS
|
||||
- because reddit is stupid, that's why, i guess.
|
||||
|
||||
### Parse data
|
||||
Do i need to do anything here, actually, or is json fine?
|
||||
|
||||
### Putting data in pinboard
|
||||
So far i've done nothing.
|
||||
- [ ] Get regular auth to work
|
||||
- [ ] Figure out how to post an item to my feed as public
|
||||
- [ ] Figure out how to post an item to my feed as private
|
||||
- [ ] Enable a conditional; NSFW items get posted as private, regular items as public.
|
||||
- [ ] Iterate through a list.
|
@ -0,0 +1,29 @@
|
||||
import pdb
|
||||
import praw
|
||||
import json
|
||||
import os
|
||||
|
||||
reddit = praw.Reddit(client_id=os.environ.get('REDDIT_ID'),
|
||||
client_secret=os.environ.get('REDDIT_SECRET'),
|
||||
user_agent='/u/ pynit-tasks',
|
||||
username = os.environ.get('REDDIT_UN'),
|
||||
password = os.environ.get('REDDIT_PW')
|
||||
)
|
||||
|
||||
your_user = reddit.redditor(os.environ.get('REDDIT_UN'))
|
||||
saved_posts = your_user.saved(limit=None)
|
||||
|
||||
posts_to_save = []
|
||||
for link in saved_posts:
|
||||
try:
|
||||
posts_to_save.append([link.name, link.subreddit.display_name, link.url, link.author.name, link.title])
|
||||
except AttributeError:
|
||||
# only a comment, not a saved post
|
||||
pass
|
||||
|
||||
# printing for test
|
||||
# print(posts_to_save)
|
||||
|
||||
with open('data.json', 'w') as outfile:
|
||||
json.dump(posts_to_save, outfile, indent=2)
|
||||
|
Loading…
Reference in new issue