1- import requests
2- import urllib .parse
31import psaw
42import praw .exceptions
53import time
64import pendulum
5+ import webbrowser
76from datetime import datetime , timedelta
87
98import prawcore .exceptions
1312from core import parse
1413from core .history import SwitcharooLog
1514from core .arguments import tracer as argparser
15+ from core .pushshift import get_comment_from_psaw , get_original_comment_from_psaw
1616
1717credentials = CredentialsLoader .get_credentials ()['reddit' ]
1818
3030
3131args = argparser .parse_args ()
3232
33+
3334def get_newest_id (subreddit , index = 0 ):
3435 """Retrieves the newest post's id. Used for starting the last switcharoo history trackers"""
3536 return [i for i in subreddit .new (params = {"limit" : "1" })][index ].url
@@ -55,33 +56,6 @@ def get_newest_id(subreddit, index=0):
5556print ("SwitcharooHelper Tracer v{} Ctrl+C to stop" .format (consts .version ))
5657
5758
58- # Weird other way to get the data but it returns the edited version?
59- def get_comment_from_psaw (parent_id , comment_id ):
60- params = {'parent_id' : f"t1_{ parent_id } " , "filter" : "id,created_utc,edited,body" }
61- # Come on PushShift, percent coding is a standard
62- payload_str = urllib .parse .urlencode (params , safe = "," )
63- r = requests .get ("https://api.pushshift.io/reddit/comment/search/" ,
64- params = payload_str )
65- j = r .json ()
66- for i in j ['data' ]:
67- if i ['id' ] == comment_id :
68- return i
69- return None
70-
71-
72- def get_original_comment_from_psaw (comment_id ):
73- params = {'ids' : comment_id , "filter" : "id,created_utc,body" }
74- # Come on PushShift, percent coding is a standard
75- payload_str = urllib .parse .urlencode (params , safe = "," )
76- r = requests .get ("https://api.pushshift.io/reddit/comment/search/" ,
77- params = payload_str )
78- j = r .json ()
79- if j .get ('data' , None ):
80- if len (j ['data' ]) > 0 :
81- return j ['data' ][0 ]
82- return None
83-
84-
8559def unable_to_find_link (url : parse .RedditURL , last_url : parse .RedditURL ):
8660 print ("Unable to find a link in this roo." )
8761 print (last_url .to_link (reddit ))
@@ -100,35 +74,6 @@ def unable_to_find_link(url: parse.RedditURL, last_url: parse.RedditURL):
10074 return parse .RedditURL (url )
10175
10276
103- def search_pushshift (last_url ):
104- print ("Searching PushShift for" , last_url .comment_id )
105- # psaw leaves a little to be desired in default functionality
106- ps_comment = get_comment_from_psaw (comment .parent_id [3 :], last_url .comment_id )
107- if ps_comment :
108- ps_comment = parse .parse_comment (ps_comment ['body' ])
109- pso_comment = get_original_comment_from_psaw (last_url .comment_id )
110- if pso_comment :
111- pso_comment = parse .parse_comment (pso_comment ['body' ])
112- if ps_comment and pso_comment :
113- if ps_comment == pso_comment :
114- url = ps_comment
115- else :
116- print ("Two versions of comment, which one to use? (1/2)" )
117- print (pso_comment .to_link (reddit ), ps_comment .to_link (reddit ))
118- option = input ()
119- if option == "1" :
120- url = pso_comment
121- else :
122- url = ps_comment
123- elif ps_comment :
124- url = ps_comment
125- elif pso_comment :
126- url = pso_comment
127- else :
128- url = parse .RedditURL ("" )
129- return url
130-
131-
13277def add_comment (url : parse .RedditURL , start_url : parse .RedditURL = None ):
13378 # Double check it's not already there
13479 q = log .search (comment_id = url .comment_id )
@@ -142,7 +87,13 @@ def add_comment(url: parse.RedditURL, start_url: parse.RedditURL = None):
14287 if q :
14388 print ("Adjusting roo time" )
14489 comment_time = q .time - timedelta (seconds = 1 )
145- log .add_comment (url .thread_id , url .comment_id , url .params .get ("context" , 0 ), comment_time )
90+ try :
91+ context = int (url .params .get ("context" , 0 ))
92+ except ValueError :
93+ print (f"Got { url .params ['context' ]} for url { url } , what should it be?" )
94+ context = int (input ())
95+
96+ log .add_comment (url .thread_id , url .comment_id , context , comment_time )
14697
14798
14899
@@ -171,9 +122,9 @@ def add_comment(url: parse.RedditURL, start_url: parse.RedditURL = None):
171122 roo_count += 1
172123
173124 last_url = url
174- if comment .body == "[deleted]" :
125+ if comment .body == "[deleted]" or comment . body == "[removed]" :
175126 print ("Comment was deleted" )
176- url = search_pushshift (last_url )
127+ url = parse . search_pushshift (comment , last_url )
177128 else :
178129 url = parse .parse_comment (comment .body )
179130
@@ -182,15 +133,31 @@ def add_comment(url: parse.RedditURL, start_url: parse.RedditURL = None):
182133 print ("Roo linked incorrectly, searching thread for link" )
183134 new_last_url = parse .find_roo_comment (comment )
184135 if new_last_url and last_url :
185- new_last_url .params ['context' ] = str (int (new_last_url .params .get ('context' , 0 )) +
186- int (last_url .params .get ('context' , 0 )))
136+ try :
137+ new_last_url .params ['context' ] = str (int (new_last_url .params .get ('context' , 0 )) +
138+ int (last_url .params .get ('context' , 0 )))
139+ except ValueError :
140+ print (f"Got { new_last_url .params ['context' ]} and { last_url .params ['context' ]} , what should it be?" )
141+ new_last_url .params ['context' ] = int (input ())
187142 if new_last_url :
188- print (last_url .to_link (reddit ), "should actually be" , new_last_url .to_link (reddit ))
189- last_url = new_last_url
143+ if args .discover :
144+ print ("Should" , last_url .to_link (reddit ), "actually be" , new_last_url .to_link (reddit ), "?" )
145+ print ("(y/n)" )
146+ webbrowser .open (last_url .to_link (reddit ))
147+ webbrowser .open (new_last_url .to_link (reddit ))
148+ option = input ()
149+ if option == "y" :
150+ last_url = new_last_url
151+ else :
152+ last_url = new_last_url
190153 comment = reddit .comment (last_url .comment_id )
191- url = parse .parse_comment (comment .body )
154+ if comment .body == "[deleted]" or comment .body == "[removed]" :
155+ print ("Comment was deleted" )
156+ url = parse .search_pushshift (comment , last_url )
157+ else :
158+ url = parse .parse_comment (comment .body )
192159 else :
193- url = search_pushshift (last_url )
160+ url = parse . search_pushshift (comment , last_url )
194161
195162 if args .discover :
196163 add_comment (last_url , start_url = start_url )
0 commit comments