Compare commits

...

4 Commits

Author SHA1 Message Date
jowj bc9e36d571 Implement best practices
5 years ago
jowj 5384007fcc Rename main.py to reddit.py for clarity.
5 years ago
jowj 97e44e5874 Add a line about installing this.
5 years ago
jowj 801d8d177d Generate actual pipfiles, requirements file.
5 years ago

@ -6,12 +6,16 @@ verify_ssl = true
[dev-packages] [dev-packages]
[packages] [packages]
requests = "*" requests = "==2.22.0"
json = "*" praw = "==6.4.0"
pprint = "*" certifi = "==2019.9.11"
praw = "*" chardet = "==3.0.4"
pysnooper = "*" idna = "==2.8"
urllib-parse = "*" prawcore = "==1.0.1"
six = "==1.13.0"
urllib3 = "==1.25.6"
update_checker = "==0.16"
websocket_client = "==0.56.0"
[requires] [requires]
python_version = "3.7" python_version = "3.7"

101
Pipfile.lock generated

@ -0,0 +1,101 @@
{
"_meta": {
"hash": {
"sha256": "6b64c77ab5a55c11d34da51420b7da13e4ffe1038bdf239f3a06a52dfe35d1bc"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.7"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"certifi": {
"hashes": [
"sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
"sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
],
"index": "pypi",
"version": "==2019.9.11"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"index": "pypi",
"version": "==3.0.4"
},
"idna": {
"hashes": [
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
],
"index": "pypi",
"version": "==2.8"
},
"praw": {
"hashes": [
"sha256:2e5c98e49fe60e5308255ed147b670d350f98281f84f582df30f87de727b6de2",
"sha256:cb8f85541ad4c6b10214ef9639acccfb5fed7ffee977be169b85357d2d2ea6d9"
],
"index": "pypi",
"version": "==6.4.0"
},
"prawcore": {
"hashes": [
"sha256:25dd14bf121bc0ad2ffc78e2322d9a01a516017105a5596cc21bb1e9a928b40c",
"sha256:ab5558efb438aa73fc66c4178bfc809194dea3ce2addf4dec873de7e2fd2824e"
],
"index": "pypi",
"version": "==1.0.1"
},
"requests": {
"hashes": [
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
],
"index": "pypi",
"version": "==2.22.0"
},
"six": {
"hashes": [
"sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd",
"sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
],
"index": "pypi",
"version": "==1.13.0"
},
"update-checker": {
"hashes": [
"sha256:59cfad7f9a0ee99f95f1dfc60f55bf184937bcab46a7270341c2c33695572453",
"sha256:70e39446fccf77b21192cf7a8214051fa93a636dc3b5c8b602b589d100a168b8"
],
"index": "pypi",
"version": "==0.16"
},
"urllib3": {
"hashes": [
"sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398",
"sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86"
],
"index": "pypi",
"version": "==1.25.6"
},
"websocket-client": {
"hashes": [
"sha256:1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9",
"sha256:1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a"
],
"index": "pypi",
"version": "==0.56.0"
}
},
"develop": {}
}

@ -9,6 +9,10 @@ specifically, comments and posts saved to your reddit user profile can be pulled
the tag "added-by-pynnit" is also added to each entry moved to pinboard in this way. This allows for easy viewing of all imported links. I found this very useful when I was writing the script in the first place, but you may not want it. the tag "added-by-pynnit" is also added to each entry moved to pinboard in this way. This allows for easy viewing of all imported links. I found this very useful when I was writing the script in the first place, but you may not want it.
## Using this bullshit ## Using this bullshit
### Installing
I use `pipenv` pretty exclusively because of how it works with my editor. that's the only package manager i've actually tested with, but theoretically the typical `pip install -r requirements.txt` should work just fine.
### Setting up the environment
You need several things to use this project: You need several things to use this project:
- Reddit username - Reddit username
- Reddit password - Reddit password
@ -64,13 +68,6 @@ So far i've done nothing.
- [ ] Enable a conditional; NSFW items get posted as private, regular items as public. - [ ] Enable a conditional; NSFW items get posted as private, regular items as public.
- [X] Iterate through a list. - [X] Iterate through a list.
#### IF WE RECEIVE OVER 5 MILLION DOLLARS I WILL:
- [X] figure out how the fuck to compare urls/titles against already existing entries in pinboard
- and obviously don't add dupes
- HAH you don't have to worry about this, Pinboard will do this for you! Hurray.
- [ ] figure out how to pull in RES saved items; that'll be weird slash impossible maybe
### Information about reddit (i'm so sorry) ### Information about reddit (i'm so sorry)
So, there are multiple kinds of reddit posts, and each kind of reddit post seems to have distinct names for the same things, which is REALLY fucking annoying. Its extra frustrating because there's not just a quick lookup for this, you have to just dig through Too Much json. So, there are multiple kinds of reddit posts, and each kind of reddit post seems to have distinct names for the same things, which is REALLY fucking annoying. Its extra frustrating because there's not just a quick lookup for this, you have to just dig through Too Much json.

@ -1,21 +1,27 @@
import json import json
import os import os
import requests
import time import time
import requests
pinboard_token = os.environ.get("PINBOARD_TOKEN") PINBOARD_TOKEN = os.environ.get("PINBOARD_TOKEN")
pinboard_base_url = "https://api.pinboard.in/v1/" PINBOARD_BASE_URL = "https://api.pinboard.in/v1/"
pinboard_auth_snippet = f"?auth_token={pinboard_token}" PINBOARD_AUTH_SNIPPET = f"?auth_token={PINBOARD_TOKEN}"
def get_all_posts(): def get_all_posts():
get_post_snippet = f"posts/all?auth_token={pinboard_token}" """
pinboard_url = pinboard_base_url + get_post_snippet returns a list of all pins in pinboard account
"""
get_post_snippet = f"posts/all?auth_token={PINBOARD_TOKEN}"
pinboard_url = PINBOARD_BASE_URL + get_post_snippet
return requests.get(pinboard_url) return requests.get(pinboard_url)
def add_pin_url(reddit_dict): def add_pin_url(reddit_dict):
"""
adds a pin to pinboard and returns the response
"""
add_post_snippet = "posts/add" add_post_snippet = "posts/add"
# headers = {'Content-type': 'application/json'} # headers = {'Content-type': 'application/json'}
args = { args = {
@ -26,7 +32,7 @@ def add_pin_url(reddit_dict):
'replace': 'no' 'replace': 'no'
} }
post_url = pinboard_base_url + add_post_snippet + pinboard_auth_snippet post_url = PINBOARD_BASE_URL + add_post_snippet + PINBOARD_AUTH_SNIPPET
response = requests.get(post_url, params=args) response = requests.get(post_url, params=args)
# pdb.set_trace() # pdb.set_trace()
@ -35,6 +41,10 @@ def add_pin_url(reddit_dict):
def import_reddit_url_from_file(filename): def import_reddit_url_from_file(filename):
"""
imports a list of reddit URLs and meta data from a file.
returns a json object of that data.
"""
with open(filename, 'r') as infile: with open(filename, 'r') as infile:
data = json.loads(infile.read()) data = json.loads(infile.read())
@ -43,11 +53,10 @@ def import_reddit_url_from_file(filename):
if __name__ == "__main__": if __name__ == "__main__":
""" """
You have to sleep for 3 seconds between requests or Maciej will Get Unhappy per You have to sleep for 3 seconds between requests or Maciej will Get Unhappy
https://pinboard.in/api per https://pinboard.in/api
""" """
reddit_data = import_reddit_url_from_file("data.json") REDDIT_DATA = import_reddit_url_from_file("data.json")
for entry in reddit_data: for entry in REDDIT_DATA:
post_response = add_pin_url(entry) post_response = add_pin_url(entry)
time.sleep(3) time.sleep(3)
# print(post_response.text)

@ -1,7 +1,7 @@
import praw
import json import json
import os import os
import pdb import praw
def munge_idiot_data(reddit_dict): def munge_idiot_data(reddit_dict):
""" """
@ -22,26 +22,37 @@ def munge_idiot_data(reddit_dict):
if __name__ == "__main__": if __name__ == "__main__":
reddit = praw.Reddit(client_id=os.environ.get('REDDIT_ID'), REDDIT = praw.Reddit(client_id=os.environ.get('REDDIT_ID'),
client_secret=os.environ.get('REDDIT_SECRET'), client_secret=os.environ.get('REDDIT_SECRET'),
user_agent='/u/ pynit-tasks', user_agent='/u/ pynit-tasks',
username=os.environ.get('REDDIT_UN'), username=os.environ.get('REDDIT_UN'),
password=os.environ.get('REDDIT_PW') password=os.environ.get('REDDIT_PW')
) )
your_user = reddit.redditor(os.environ.get('REDDIT_UN')) # this line is the most cursed line in programming
saved_posts = your_user.saved(limit=None) # REDDIT.redditor,
YOUR_USER = REDDIT.redditor(os.environ.get('REDDIT_UN'))
SAVED_POSTS = YOUR_USER.saved(limit=None)
posts_to_save = [] POSTS_TO_SAVE = []
for link in saved_posts: for link in SAVED_POSTS:
if hasattr(link, 'is_self'): if hasattr(link, 'is_self'):
posts_to_save.append({'title':link.title, 'tag':link.subreddit.display_name + ' added-by-pynnit', 'description':link.selftext, 'url':link.permalink}) POSTS_TO_SAVE.append({
'title': link.title,
'tag': link.subreddit.display_name + ' added-by-pynnit',
'description': link.selftext,
'url': link.permalink
})
elif hasattr(link, 'is_root'): elif hasattr(link, 'is_root'):
posts_to_save.append({'title':link.link_title, 'tag':link.subreddit.display_name + ' added-by-pynnit', 'description':link.body, 'url':link.link_url}) POSTS_TO_SAVE.append({
'title': link.link_title,
'tag': link.subreddit.display_name + ' added-by-pynnit',
'description': link.body,
'url': link.link_url
})
else: else:
print("shit is fucked.") print("shit is fucked.")
munged_data = munge_idiot_data(posts_to_save) MUNGED_DATA = munge_idiot_data(POSTS_TO_SAVE)
with open('data.json', 'w') as outfile: with open('data.json', 'w') as outfile:
json.dump(munged_data, outfile, indent=2) json.dump(MUNGED_DATA, outfile, indent=2)

@ -0,0 +1,11 @@
-i https://pypi.org/simple
certifi==2019.9.11
chardet==3.0.4
idna==2.8
praw==6.4.0
prawcore==1.0.1
requests==2.22.0
six==1.13.0
update-checker==0.16
urllib3==1.25.6
websocket-client==0.56.0
Loading…
Cancel
Save