2626
2727SITE = config ['site' ]['siteNum' ]
2828URL = config ['site' ]['customSite' ]
29- PROXY = config ['lists' ]['proxyList' ]
29+ PROXY = config ['proxy' ]['enableProxy' ]
30+ PROTOCOL = config ['proxy' ]['proxyProtocol' ]
31+ PROXYLIST = config ['proxy' ]['proxyList' ]
32+
3033
3134# Site URLs
3235URLS = {
4447 12 :"https://youtube.com/%word%"
4548}
4649
50+ # Proxy List
51+ proxyDict = {}
52+
4753def generate_pw (size = 16 , chars = string .ascii_uppercase + string .digits + string .ascii_lowercase ):
4854 return '' .join (random .choice (chars ) for _ in range (size ))
4955
@@ -55,6 +61,20 @@ def replace(word):
5561 else :
5662 print ("instagram" )
5763
64+ def get_proxy ():
65+ if PROXY and (PROXYLIST != []):
66+ fx = open (PROXYLIST , 'r' )
67+ proxies = fx .read ().split ('\n ' )
68+ fx .close ()
69+
70+ i = random .randrange (0 , proxies .__len__ ())
71+ return str (proxies [i ])
72+ else :
73+ if not PROXY :
74+ print ("Proxy support is disabled. Please enable it in the config." )
75+ elif PROXYLIST == []:
76+ print ("No proxies available to use." )
77+
5878def taken (word , service , error = None ):
5979 if error != None :
6080 print (str (word ) + " is " + colored ('TAKEN' , 'red' , attrs = ['bold' ]) + " on " + str (service ) + " because " + str (error ))
@@ -67,6 +87,9 @@ def available(word, service, link):
6787 fx .write (link + "\n " )
6888 fx .close ()
6989
90+ def manual (response , word , service ):
91+ print ("The username " + word + " requires manual verification on " + service + " (" + str (response .status_code ) + ")" )
92+
7093def log_result (response , word , link , matches = None ):
7194 service = re .search (DOMAIN , link ).group (1 )
7295 if matches != None :
@@ -77,7 +100,7 @@ def log_result(response, word, link, matches=None):
77100 elif matches [2 ]:
78101 taken (word , service )
79102 else :
80- print ( "The username " + word + " requires manual verification on " + service + " (" + str ( response . status_code ) + ")" )
103+ manual ( response , word , service )
81104
82105 elif response .status_code == 200 :
83106 if int (SITE ) == 3 : # Twitter
@@ -116,14 +139,19 @@ def log_result(response, word, link, matches=None):
116139 elif int (SITE ) == 8 :
117140 available (word , service , link )
118141 else :
119- print ( "The username " + word + " requires manual verification on " + service + " (" + str ( response . status_code ) + ")" )
142+ manual ( response , word , service )
120143 elif response .status_code == 404 :
121144 available (word , service , link )
122145 else :
123- print ( "The username " + word + " requires manual verification on " + service + " (" + str ( response . status_code ) + ")" )
146+ manual ( response , word , service )
124147
125148def get_cookie ():
126- r = requests .get (URLS [int (SITE )])
149+ r = None
150+ if PROXY :
151+ proxyDict [PROTOCOL ] = get_proxy ()
152+ r = requests .get (URLS [int (SITE )], proxies = proxyDict )
153+ else :
154+ r = requests .get (URLS [int (SITE )])
127155 return r .cookies
128156
129157def ready_payload (word ):
@@ -151,13 +179,21 @@ def prepare_headers(cookie):
151179def send_get (words ):
152180 for w in range (words .__len__ ()):
153181 link = replace (words [w ])
154- r = requests .get (link )
182+ if PROXY :
183+ proxyDict [PROTOCOL ] = get_proxy ()
184+ r = requests .get (link , proxies = proxyDict )
185+ else :
186+ r = requests .get (link )
155187 log_result (r , words [w ], link )
156188
157189def parse_page (words ):
158190 for w in range (words .__len__ ()):
159191 link = replace (words [w ])
160- r = requests .get (link )
192+ if PROXY :
193+ proxyDict [PROTOCOL ] = get_proxy ()
194+ r = requests .get (link , proxies = proxyDict )
195+ else :
196+ r = requests .get (link )
161197 page = r .content
162198 soup = BeautifulSoup (page , "html.parser" )
163199 matches = []
@@ -185,9 +221,14 @@ def send_post(words):
185221 cookie = get_cookie ()
186222 header = prepare_headers (cookie )
187223 link = URLS [int (SITE )]
224+ r = None
188225 for w in range (words .__len__ ()):
189226 payload = ready_payload (words [w ])
190- r = requests .post (URLS [int (SITE )], json = payload , headers = header , cookies = cookie )
227+ if PROXY :
228+ proxyDict [PROTOCOL ] = get_proxy ()
229+ r = requests .post (URLS [int (SITE )], json = payload , headers = header , cookies = cookie , proxies = proxyDict )
230+ else :
231+ r = requests .post (URLS [int (SITE )], json = payload , headers = header , cookies = cookie )
191232 log_result (r , words [w ], link )
192233
193234def main ():
@@ -217,7 +258,7 @@ def main():
217258 elif ans == "N" :
218259 confirm = input ("Continue executing script? (y|N)" )
219260 if confirm == "y" :
220- print ("_________________________________\n | SERVICE | VALUE TO ENTER |\n _________________________________\n | CUSTOM | 1 |\n | MINECRAFT | 2 | \n | TWITTER | 3 |\n | INSTAGRAM | 4 |\n | STEAM ID | 5 |\n | STEAM GROUP | 6 |\n | SOUNDCLOUD | 7 |\n | TWITCH | 8 |\n | MIXER | 9 |\n | GITHUB | 10 |\n | ABOUT.ME | 11 |\n | YOUTUBE | 12 |\n _________________________________\n Quit? (y/N)\n " )
261+ print ("_________________________________\n | SERVICE | VALUE TO ENTER |\n _________________________________\n | CUSTOM | 1 |\n | TWITTER | 3 |\n | INSTAGRAM | 4 |\n | STEAM ID | 5 |\n | STEAM GROUP | 6 |\n | SOUNDCLOUD | 7 |\n | TWITCH | 8 |\n | MIXER | 9 |\n | GITHUB | 10 |\n | ABOUT.ME | 11 |\n | YOUTUBE | 12 |\n _________________________________\n Quit? (y/N)\n " )
221262 SITE = input ("Enter the number from the table above with the site you want to check..." )
222263
223264 if SITE .isdigit ():
0 commit comments