-
Notifications
You must be signed in to change notification settings - Fork 7
Expand file tree
/
Copy pathmain.py
More file actions
75 lines (57 loc) · 2.04 KB
/
main.py
File metadata and controls
75 lines (57 loc) · 2.04 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import requests
import time
from termcolor import colored
timeout = "3000"
url = "https://api.proxyscrape.com/v4/free-proxy-list/get?request=display_proxies&protocol=http&proxy_format=ipport&format=text&timeout=20000"
url2 = "https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt"
headers = {
"accept": "text/plain, */*; q=0.01",
"accept-language": "en-US,en;q=0.8",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
}
response = requests.request(
"GET", url, headers=headers
)
response2 = requests.request("GET", url2, headers=headers)
print(response.text)
print(response2.text)
proxies = []
proxies.extend(response.text.split("\n"))
proxies.extend(response2.text.split("\n"))
proxies = list(set(filter(None, proxies)))
print("Amount of proxies after removing duplicates:", len(proxies))
time.sleep(3)
import urllib.request, socket
socket.setdefaulttimeout(3)
def is_bad_proxy(pip):
try:
proxy_handler = urllib.request.ProxyHandler({"http": pip})
opener = urllib.request.build_opener(proxy_handler)
opener.addheaders = [("User-agent", "Mozilla/5.0")]
urllib.request.install_opener(opener)
sock = urllib.request.urlopen("http://api.ipify.org/")
except urllib.error.HTTPError as e:
return e.code
except Exception as detail:
return 1
return 0
import concurrent.futures
working = []
badcount = 0
workingcount = 0
def check_proxy(proxy):
global badcount, workingcount
if is_bad_proxy(proxy):
badcount += 1
print(colored(f"{badcount} bad proxies", "red"))
else:
workingcount += 1
print(colored(f"{workingcount} working proxies", "green"))
working.append(proxy + "\n")
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor() as executor:
executor.map(check_proxy, proxies)
end_time = time.time()
print("Time taken: ", end_time - start_time, "seconds")
with open("proxies.txt", "w") as f:
f.writelines(working)