AAAAAH ITS WORKING

- changed requests.post() to requests.get()
- changed data= to params= in requests.get() section for adding a ping
- enabled looping to iterate through large dictionaries instead of 1
- offs
- removed author from json; wasn't doing anything with it anyway
- re-arranged url construction in pinboard.py
- added time.sleep(3) to comply with pinboard api rules
master
jowj 5 years ago
parent ebb3707ba2
commit f1f878a404

@ -11,6 +11,7 @@ json = "*"
pprint = "*"
praw = "*"
pysnooper = "*"
urllib-parse = "*"
[requires]
python_version = "3.7"

@ -3,7 +3,25 @@ import json
import os
import pdb
# Handle reddit requests
def munge_idiot_data(reddit_dict):
"""
this function handles converting reddit relative urls to fully qualified urls.
its extremely fucking unclear which *.url properties will give you fully qualified urls,
so rather than handlign this properly by just fixing the broken ones, i'm going to inspect
every url that comes through my apparatus.
"""
protocol = 'https'
# pdb.set_trace()
for single_dict in reddit_dict:
if protocol in single_dict['url']:
pass
else:
single_dict['url'] = 'https://reddit.com' + single_dict['url']
return reddit_dict
if __name__ == "__main__":
reddit = praw.Reddit(client_id=os.environ.get('REDDIT_ID'),
client_secret=os.environ.get('REDDIT_SECRET'),
user_agent='/u/ pynit-tasks',
@ -12,19 +30,18 @@ reddit = praw.Reddit(client_id=os.environ.get('REDDIT_ID'),
)
your_user = reddit.redditor(os.environ.get('REDDIT_UN'))
saved_posts = your_user.saved(limit=1)
saved_posts = your_user.saved(limit=None)
posts_to_save = []
for link in saved_posts:
if hasattr(link, 'is_self'):
posts_to_save.append({'title':link.title, 'tag':link.subreddit.display_name, 'author':link.author.name, 'description':link.selftext, 'url':link.permalink})
posts_to_save.append({'title':link.title, 'tag':link.subreddit.display_name + ' added-by-pynnit', 'description':link.selftext, 'url':link.permalink})
elif hasattr(link, 'is_root'):
posts_to_save.append({'title':link.link_title, 'tag':link.subreddit.display_name, 'author':link.author.name, 'description':link.body, 'url':link.link_url})
posts_to_save.append({'title':link.link_title, 'tag':link.subreddit.display_name + ' added-by-pynnit', 'description':link.body, 'url':link.link_url})
else:
print("shit is fucked.")
# print(posts_to_save)
munged_data = munge_idiot_data(posts_to_save)
with open('data.json', 'w') as outfile:
json.dump(posts_to_save, outfile, indent=2)
json.dump(munged_data, outfile, indent=2)

@ -1,11 +1,12 @@
import json
import os
import requests
import pysnooper
import pdb
import time
pinboard_token = os.environ.get("PINBOARD_TOKEN")
pinboard_base_url = "https://api.pinboard.in/v1/"
pinboard_auth_snippet = f"?auth_token={pinboard_token}"
def get_all_posts():
get_post_snippet = f"posts/all?auth_token={pinboard_token}"
@ -15,8 +16,8 @@ def get_all_posts():
def add_pin_url(reddit_dict):
add_post_snippet = f"posts/add?auth_token={pinboard_token}"
headers = {'Content-type': 'application/json'}
add_post_snippet = "posts/add"
# headers = {'Content-type': 'application/json'}
args = {
'url': reddit_dict['url'],
'description': reddit_dict['title'],
@ -25,19 +26,28 @@ def add_pin_url(reddit_dict):
'replace': 'no'
}
post_url = pinboard_base_url + add_post_snippet
args_json = json.dumps(args)
response = requests.post(post_url, data=args_json, headers=headers)
post_url = pinboard_base_url + add_post_snippet + pinboard_auth_snippet
response = requests.get(post_url, params=args)
# pdb.set_trace()
print(response.text)
return response
@pysnooper.snoop()
def import_reddit_url_from_file(filename):
with open(filename, 'r') as infile:
data = json.loads(infile.read())
# pdb.set_trace()
return data[0]
return data
if __name__ == "__main__":
"""
You have to sleep for 3 seconds between requests or Maciej will Get Unhappy per
https://pinboard.in/api
"""
reddit_data = import_reddit_url_from_file("data.json")
add_pin_url(reddit_data)
for entry in reddit_data:
post_response = add_pin_url(entry)
time.sleep(3)
# print(post_response.text)

Loading…
Cancel
Save