From aa54ea1b0a6eb60e6cee0c9683e7e3a2d54ee5d6 Mon Sep 17 00:00:00 2001 From: jowj Date: Fri, 14 Jun 2019 15:31:11 -0500 Subject: [PATCH] Initial commit --- .gitignore | 108 +++++++++++++++++++++++++++++++++++++++++++++++++++++ Pipfile | 15 ++++++++ README.md | 27 ++++++++++++++ main.py | 29 ++++++++++++++ 4 files changed, 179 insertions(+) create mode 100644 .gitignore create mode 100644 Pipfile create mode 100644 README.md create mode 100644 main.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..eefaa49 --- /dev/null +++ b/.gitignore @@ -0,0 +1,108 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# project specific +# this is used when I'm testing bullshit. +data.json \ No newline at end of file diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..9c4ed05 --- /dev/null +++ b/Pipfile @@ -0,0 +1,15 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] + +[packages] +requests = "*" +json = "*" +pprint = "*" +praw = "*" + +[requires] +python_version = "3.7" diff --git a/README.md b/README.md new file mode 100644 index 0000000..b78ac56 --- /dev/null +++ b/README.md @@ -0,0 +1,27 @@ +# pynit +an integration between saved reddit posts and pinboard + +## Outline: +### Getting data from reddit +Currently i'm getting only the first page of saved, no NSFW items +- [X] refactor to use praw instead of requests + - praw has more functionality more obviously than I can figure out with requests. + - its aggrevating. +- [X] Figure out how to pull the entire list + - in praw this is done through "limit=None" arg. +- [X] Figure out how to enable pulling NSFW items + - in praw this is actually just done by default +- [ ] Figure out how to differentiate between self.posts, link.posts, and comments + - each one will have different fields but REDDIT DOESN'T DOCUMENT THIS + - because reddit is stupid, that's why, i guess. + +### Parse data +Do i need to do anything here, actually, or is json fine? + +### Putting data in pinboard +So far i've done nothing. +- [ ] Get regular auth to work +- [ ] Figure out how to post an item to my feed as public +- [ ] Figure out how to post an item to my feed as private +- [ ] Enable a conditional; NSFW items get posted as private, regular items as public. +- [ ] Iterate through a list. diff --git a/main.py b/main.py new file mode 100644 index 0000000..5592c03 --- /dev/null +++ b/main.py @@ -0,0 +1,29 @@ +import pdb +import praw +import json +import os + +reddit = praw.Reddit(client_id=os.environ.get('REDDIT_ID'), + client_secret=os.environ.get('REDDIT_SECRET'), + user_agent='/u/ pynit-tasks', + username = os.environ.get('REDDIT_UN'), + password = os.environ.get('REDDIT_PW') + ) + +your_user = reddit.redditor(os.environ.get('REDDIT_UN')) +saved_posts = your_user.saved(limit=None) + +posts_to_save = [] +for link in saved_posts: + try: + posts_to_save.append([link.name, link.subreddit.display_name, link.url, link.author.name, link.title]) + except AttributeError: + # only a comment, not a saved post + pass + +# printing for test +# print(posts_to_save) + +with open('data.json', 'w') as outfile: + json.dump(posts_to_save, outfile, indent=2) +