|
| 1 | +import datetime |
| 2 | + |
| 3 | +import requests |
| 4 | +from bs4 import BeautifulSoup |
| 5 | +from sqlalchemy import create_engine |
| 6 | +import urllib.request |
| 7 | +import socket |
| 8 | +import urllib.error |
| 9 | +import time |
| 10 | +import json |
| 11 | + |
| 12 | + |
| 13 | +e = create_engine('sqlite:///database.db') |
| 14 | + |
| 15 | +#function to retrive proxy lists |
| 16 | +def get_proxy_list(url): |
| 17 | + proxieslist = [] |
| 18 | + if 'rapidapi' in url: |
| 19 | + |
| 20 | + querystring = {"limit": "150", "type": "HTTPS"} |
| 21 | + |
| 22 | + headers = { |
| 23 | + 'x-rapidapi-host': "proxypage1.p.rapidapi.com", |
| 24 | + 'x-rapidapi-key': "6c57d35416msh577a78de53cc96ap18190ajsnaaa53be816e6", |
| 25 | + 'content-type': "application/x-www-form-urlencoded" |
| 26 | + } |
| 27 | + |
| 28 | + response = requests.get(url, headers=headers, params=querystring) |
| 29 | + json_data = json.loads(response.text) |
| 30 | + for data in json_data: |
| 31 | + dict_line = {'IP': str(data['ip']), 'Port': str(data['port'])} |
| 32 | + proxieslist.append(dict_line) |
| 33 | + |
| 34 | + else: |
| 35 | + r = requests.get(url) |
| 36 | + proxy_response = BeautifulSoup(r.content, 'lxml') |
| 37 | + table = proxy_response.find('table') |
| 38 | + rows = table.find_all('tr') |
| 39 | + for row in rows: |
| 40 | + ip = row.contents[0].text |
| 41 | + port = row.contents[1].text |
| 42 | + secureconn = row.contents[6].text |
| 43 | + |
| 44 | + if (secureconn == 'yes'): |
| 45 | + dict_line = {'IP': ip, 'Port': port} |
| 46 | + proxieslist.append(dict_line) |
| 47 | + |
| 48 | + return proxieslist |
| 49 | + |
| 50 | +def is_bad_proxy(pip, testurl): |
| 51 | + try: |
| 52 | + proxy_handler = urllib.request.ProxyHandler({'http': pip}) |
| 53 | + opener = urllib.request.build_opener(proxy_handler) |
| 54 | + opener.addheaders = [('User-agent', 'Mozilla/5.0')] |
| 55 | + urllib.request.install_opener(opener) |
| 56 | + req = urllib.request.Request(testurl) |
| 57 | + sock = urllib.request.urlopen(req) |
| 58 | + except urllib.error.HTTPError as e: |
| 59 | + print('Error code: ', e.code) |
| 60 | + return e.code |
| 61 | + except Exception as detail: |
| 62 | + print("ERROR:", detail) |
| 63 | + return True |
| 64 | + return False |
| 65 | + |
| 66 | + |
| 67 | +def basicproxytest(proxies, date): |
| 68 | + conn = e.connect() |
| 69 | + for proxy in proxies: |
| 70 | + line = 'https://'+ proxy['IP'] + ':' + proxy['Port'] |
| 71 | + proxies = {'http': line, 'https': line} |
| 72 | + print(proxies) |
| 73 | + try: |
| 74 | + testIP = requests.get('https://httpbin.org/ip', proxies = proxies, timeout = 0.5) |
| 75 | + resposeIP = testIP.json()['origin'] |
| 76 | + origin = resposeIP.split(',') |
| 77 | + if (origin[0] == proxy['IP']): |
| 78 | + print(proxy['IP']) |
| 79 | + conn.execute("UPDATE proxylists SET basictest = ? WHERE IP = ?", |
| 80 | + (date, proxy['IP'])) |
| 81 | + except: |
| 82 | + print('Bad Proxies') |
| 83 | + |
| 84 | + if (conn): |
| 85 | + conn.close() |
| 86 | + print("The SQLite connection is closed") |
| 87 | + |
| 88 | +def proxiestodb(proxies, date): |
| 89 | + conn = e.connect() |
| 90 | + for proxy in proxies: |
| 91 | + try: |
| 92 | + conn.execute("insert into proxylists (IP, Port, insertdate, lastupdate) values (?, ?, ?, ?)", |
| 93 | + (proxy['IP'], proxy['Port'], date, date)) |
| 94 | + print('Data has been stored successfully') |
| 95 | + except: |
| 96 | + conn.execute("UPDATE proxylists SET lastupdate = ? WHERE IP = ?", |
| 97 | + (date, proxy['IP'])) |
| 98 | + print('Lastupdate has been stored sucessfully') |
| 99 | + |
| 100 | + if (conn): |
| 101 | + conn.close() |
| 102 | + print("The SQLite connection is closed") |
| 103 | + |
| 104 | +def updateprovidertable(url, date, records): |
| 105 | + records = len(records) |
| 106 | + conn = e.connect() |
| 107 | + try: |
| 108 | + conn.execute("UPDATE proxyprovider SET lastupdate = ?, recordsfound = ? WHERE baseurl = ?", |
| 109 | + (date, records, url)) |
| 110 | + msg = 'Data has been updated successfully' |
| 111 | + print(msg) |
| 112 | + except: |
| 113 | + msg = 'Error while updating provider table !' |
| 114 | + print(msg) |
| 115 | + |
| 116 | + if (conn): |
| 117 | + conn.close() |
| 118 | + print("The SQLite connection is closed") |
| 119 | + |
| 120 | + |
| 121 | +def functesturl(proxies, url): |
| 122 | + testurl = 'https://'+ url |
| 123 | + conn = e.connect() |
| 124 | + goodproxy = [] |
| 125 | + badproxy = [] |
| 126 | + date = str(datetime.datetime.now()) |
| 127 | + dataformat = date[0:10] + ',' + date[11:19] |
| 128 | + |
| 129 | + socket.setdefaulttimeout(120) |
| 130 | + for currentproxy in proxies: |
| 131 | + proxy_IP = currentproxy.split(':') |
| 132 | + |
| 133 | + if is_bad_proxy(currentproxy, testurl): |
| 134 | + print('not working. . . .') |
| 135 | + badproxy.append(currentproxy) |
| 136 | + else: |
| 137 | + print('working . . .') |
| 138 | + conn.execute("UPDATE proxylists SET urltest = ? WHERE IP = ?", |
| 139 | + (dataformat, proxy_IP[0])) |
| 140 | + goodproxy.append(currentproxy) |
| 141 | + |
| 142 | + recordsfound = len(goodproxy) |
| 143 | + try: |
| 144 | + conn.execute("insert into proxytest (testurl, successrecords, testdate) values (?, ?, ?)", |
| 145 | + (testurl, recordsfound, dataformat )) |
| 146 | + except: |
| 147 | + conn.execute("UPDATE proxytest SET successrecords = ?, testdate = ? WHERE testurl = ?", |
| 148 | + (recordsfound, dataformat, testurl)) |
| 149 | + |
| 150 | + if (conn): |
| 151 | + conn.close() |
| 152 | + print('completed . . ') |
| 153 | + print("The SQLite connection is closed") |
| 154 | + |
| 155 | + |
| 156 | +def updatedb(jsondata): |
| 157 | + proxylist = [] |
| 158 | + date = str(datetime.datetime.now()) |
| 159 | + dataformat = date[0:10] + ',' + date[11:19] |
| 160 | + proxylist.append(jsondata) |
| 161 | + |
| 162 | + proxiestodb(proxylist, dataformat) |
| 163 | + print('Proxies --> Database completed ') |
| 164 | + |
| 165 | + basicproxytest(proxylist, dataformat) |
| 166 | + print('proxies --> basicproxytest completed') |
| 167 | + |
| 168 | +def backendserverupdate(): |
| 169 | + |
| 170 | + urls = ["https://www.sslproxies.org/", "https://free-proxy-list.net/", "https://proxypage1.p.rapidapi.com/v1/tier1"] |
| 171 | + |
| 172 | + for url in urls: |
| 173 | + print(url) |
| 174 | + |
| 175 | + proxies = get_proxy_list(url) |
| 176 | + print(proxies) |
| 177 | + |
| 178 | + date = str(datetime.datetime.now()) |
| 179 | + dataformat = date[0:10] + ',' + date[11:19] |
| 180 | + |
| 181 | + updateprovidertable(url, dataformat, proxies) |
| 182 | + |
| 183 | + proxiestodb(proxies, dataformat) |
| 184 | + print('Proxies --> Database completed ') |
| 185 | + |
| 186 | + basicproxytest(proxies, dataformat) |
| 187 | + print('proxies --> basicproxytest completed') |
| 188 | + |
| 189 | +if __name__ == '__main__': |
| 190 | + while True: |
| 191 | + backendserverupdate() |
| 192 | + time.sleep(600) |
| 193 | + print('Updating the server...') |
| 194 | + |
| 195 | + |
0 commit comments