-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsydneyreddit.py
More file actions
113 lines (102 loc) · 3.63 KB
/
sydneyreddit.py
File metadata and controls
113 lines (102 loc) · 3.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import praw
import json
import requests
import tweepy
import time
import datetime
def main():
restart = True
while restart == True:
try:
while True:
restart = False
subreddit = setup_connection_reddit('sydney')
post_dict, post_ids = tweet_creator(subreddit)
if post_dict != False:
tweeter(post_dict, post_ids)
print "[bot] Sleeping 10 secs"
time.sleep(10)
except Exception, e:
restart = True
print "[bot] Exception caught: ", e
print "[bot] Exception caught - Sleeping 10 secs"
time.sleep(10)
def tweet_creator(subreddit_info):
post_dict = {}
post_ids = []
print "[bot] Getting posts from Reddit"
for submission in subreddit_info.get_new(limit=1):
post_dict[strip_title(submission.title)] = submission.url
post_ids.append(submission.id)
mini_post_dict = {}
for post in post_dict:
post_title = post
post_link = post_dict[post]
if duplicate_check(post) == False:
print "[bot] Generating short url using goo.gl"
short_link = shorten(post_link)
mini_post_dict[post_title] = short_link
return mini_post_dict, post_ids
else:
print "[bot] Skipped generating short URL"
return False, False
def setup_connection_reddit(subreddit):
time = datetime.datetime.now().time()
print "[bot] Start time: " + str(time) + "\n[bot] Setting up connection with Reddit"
r = praw.Reddit('SydneyReddit' 'monitoring %s' % (subreddit))
subreddit = r.get_subreddit(subreddit)
return subreddit
def shorten(url):
print "[bot] Starting URL shortening process"
f = open('SydneyReddit.txt')
lines = f.readlines()
f.close()
key = lines[4].strip()
# print "[bot] Key is: " + key
headers = {'content-type': 'application/json'}
payload = {"longUrl": url}
url = "https://www.googleapis.com/urlshortener/v1/url?key=" + key
r = requests.post(url, data=json.dumps(payload), headers=headers)
link = json.loads(r.text)['id']
return link
def duplicate_check(id):
print "[bot] Checking for duplicates"
found = False
with open('posted_posts.txt', 'r') as file:
lines = file.readlines()
if id in lines[-1]:
print "[bot] Duplicate found"
found = True
else:
print "[bot] Duplicate not found"
return found
def add_id_to_file(id):
print "[bot] Adding post to posted_posts.txt : " + str(id)
with open('posted_posts.txt', 'a') as file:
file.write(str(id) + "\n")
file.close()
def strip_title(title):
if len(title) < 115:
return title
else:
return title[:114] + "..."
def tweeter(post_dict, post_ids):
f = open('SydneyReddit.txt')
lines = f.readlines()
f.close()
access_token = lines[0].strip()
access_token_secret = lines[1].strip()
consumer_key = lines[2].strip()
consumer_secret = lines[3].strip()
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
for post, post_id in zip(post_dict, post_ids):
# found = duplicate_check(post)
# if found == 0:
print "[bot] Posting the following on twitter"
print post.encode('ascii', 'ignore') + " " + post_dict[post] + " #Sydney"
api.update_status(status=post.encode('ascii', 'ignore') + " " + post_dict[post] + " #Sydney")
add_id_to_file(post.encode('ascii', 'ignore'))
if __name__ == '__main__':
main()