Skip to content

Commit dd9fcf5

Browse files
authored
final_version
0 parents  commit dd9fcf5

File tree

16 files changed

+1316
-0
lines changed

16 files changed

+1316
-0
lines changed

Procfile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
web: sh run.sh

README.md

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Proxy Aggregator API
2+
3+
4+
## install
5+
6+
```
7+
pip install -r requirements.txt
8+
```
9+
10+
## run
11+
```
12+
sh run.sh
13+
python app.py
14+
15+
```
16+
17+
then go to http://localhost:5000/ for the home page of the application
18+
19+
## Authors
20+
21+
* **Sharat Gujamagadi** - *552567* -

Server.py

Lines changed: 195 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,195 @@
1+
import datetime
2+
3+
import requests
4+
from bs4 import BeautifulSoup
5+
from sqlalchemy import create_engine
6+
import urllib.request
7+
import socket
8+
import urllib.error
9+
import time
10+
import json
11+
12+
13+
e = create_engine('sqlite:///database.db')
14+
15+
#function to retrive proxy lists
16+
def get_proxy_list(url):
17+
proxieslist = []
18+
if 'rapidapi' in url:
19+
20+
querystring = {"limit": "150", "type": "HTTPS"}
21+
22+
headers = {
23+
'x-rapidapi-host': "proxypage1.p.rapidapi.com",
24+
'x-rapidapi-key': "6c57d35416msh577a78de53cc96ap18190ajsnaaa53be816e6",
25+
'content-type': "application/x-www-form-urlencoded"
26+
}
27+
28+
response = requests.get(url, headers=headers, params=querystring)
29+
json_data = json.loads(response.text)
30+
for data in json_data:
31+
dict_line = {'IP': str(data['ip']), 'Port': str(data['port'])}
32+
proxieslist.append(dict_line)
33+
34+
else:
35+
r = requests.get(url)
36+
proxy_response = BeautifulSoup(r.content, 'lxml')
37+
table = proxy_response.find('table')
38+
rows = table.find_all('tr')
39+
for row in rows:
40+
ip = row.contents[0].text
41+
port = row.contents[1].text
42+
secureconn = row.contents[6].text
43+
44+
if (secureconn == 'yes'):
45+
dict_line = {'IP': ip, 'Port': port}
46+
proxieslist.append(dict_line)
47+
48+
return proxieslist
49+
50+
def is_bad_proxy(pip, testurl):
51+
try:
52+
proxy_handler = urllib.request.ProxyHandler({'http': pip})
53+
opener = urllib.request.build_opener(proxy_handler)
54+
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
55+
urllib.request.install_opener(opener)
56+
req = urllib.request.Request(testurl)
57+
sock = urllib.request.urlopen(req)
58+
except urllib.error.HTTPError as e:
59+
print('Error code: ', e.code)
60+
return e.code
61+
except Exception as detail:
62+
print("ERROR:", detail)
63+
return True
64+
return False
65+
66+
67+
def basicproxytest(proxies, date):
68+
conn = e.connect()
69+
for proxy in proxies:
70+
line = 'https://'+ proxy['IP'] + ':' + proxy['Port']
71+
proxies = {'http': line, 'https': line}
72+
print(proxies)
73+
try:
74+
testIP = requests.get('https://httpbin.org/ip', proxies = proxies, timeout = 0.5)
75+
resposeIP = testIP.json()['origin']
76+
origin = resposeIP.split(',')
77+
if (origin[0] == proxy['IP']):
78+
print(proxy['IP'])
79+
conn.execute("UPDATE proxylists SET basictest = ? WHERE IP = ?",
80+
(date, proxy['IP']))
81+
except:
82+
print('Bad Proxies')
83+
84+
if (conn):
85+
conn.close()
86+
print("The SQLite connection is closed")
87+
88+
def proxiestodb(proxies, date):
89+
conn = e.connect()
90+
for proxy in proxies:
91+
try:
92+
conn.execute("insert into proxylists (IP, Port, insertdate, lastupdate) values (?, ?, ?, ?)",
93+
(proxy['IP'], proxy['Port'], date, date))
94+
print('Data has been stored successfully')
95+
except:
96+
conn.execute("UPDATE proxylists SET lastupdate = ? WHERE IP = ?",
97+
(date, proxy['IP']))
98+
print('Lastupdate has been stored sucessfully')
99+
100+
if (conn):
101+
conn.close()
102+
print("The SQLite connection is closed")
103+
104+
def updateprovidertable(url, date, records):
105+
records = len(records)
106+
conn = e.connect()
107+
try:
108+
conn.execute("UPDATE proxyprovider SET lastupdate = ?, recordsfound = ? WHERE baseurl = ?",
109+
(date, records, url))
110+
msg = 'Data has been updated successfully'
111+
print(msg)
112+
except:
113+
msg = 'Error while updating provider table !'
114+
print(msg)
115+
116+
if (conn):
117+
conn.close()
118+
print("The SQLite connection is closed")
119+
120+
121+
def functesturl(proxies, url):
122+
testurl = 'https://'+ url
123+
conn = e.connect()
124+
goodproxy = []
125+
badproxy = []
126+
date = str(datetime.datetime.now())
127+
dataformat = date[0:10] + ',' + date[11:19]
128+
129+
socket.setdefaulttimeout(120)
130+
for currentproxy in proxies:
131+
proxy_IP = currentproxy.split(':')
132+
133+
if is_bad_proxy(currentproxy, testurl):
134+
print('not working. . . .')
135+
badproxy.append(currentproxy)
136+
else:
137+
print('working . . .')
138+
conn.execute("UPDATE proxylists SET urltest = ? WHERE IP = ?",
139+
(dataformat, proxy_IP[0]))
140+
goodproxy.append(currentproxy)
141+
142+
recordsfound = len(goodproxy)
143+
try:
144+
conn.execute("insert into proxytest (testurl, successrecords, testdate) values (?, ?, ?)",
145+
(testurl, recordsfound, dataformat ))
146+
except:
147+
conn.execute("UPDATE proxytest SET successrecords = ?, testdate = ? WHERE testurl = ?",
148+
(recordsfound, dataformat, testurl))
149+
150+
if (conn):
151+
conn.close()
152+
print('completed . . ')
153+
print("The SQLite connection is closed")
154+
155+
156+
def updatedb(jsondata):
157+
proxylist = []
158+
date = str(datetime.datetime.now())
159+
dataformat = date[0:10] + ',' + date[11:19]
160+
proxylist.append(jsondata)
161+
162+
proxiestodb(proxylist, dataformat)
163+
print('Proxies --> Database completed ')
164+
165+
basicproxytest(proxylist, dataformat)
166+
print('proxies --> basicproxytest completed')
167+
168+
def backendserverupdate():
169+
170+
urls = ["https://www.sslproxies.org/", "https://free-proxy-list.net/", "https://proxypage1.p.rapidapi.com/v1/tier1"]
171+
172+
for url in urls:
173+
print(url)
174+
175+
proxies = get_proxy_list(url)
176+
print(proxies)
177+
178+
date = str(datetime.datetime.now())
179+
dataformat = date[0:10] + ',' + date[11:19]
180+
181+
updateprovidertable(url, dataformat, proxies)
182+
183+
proxiestodb(proxies, dataformat)
184+
print('Proxies --> Database completed ')
185+
186+
basicproxytest(proxies, dataformat)
187+
print('proxies --> basicproxytest completed')
188+
189+
if __name__ == '__main__':
190+
while True:
191+
backendserverupdate()
192+
time.sleep(600)
193+
print('Updating the server...')
194+
195+

app.py

Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
import datetime
2+
3+
from flask import Flask, render_template,jsonify, request
4+
from flask_restful import Resource, Api
5+
from sqlalchemy import create_engine
6+
from Server import functesturl, updatedb, basicproxytest
7+
8+
e = create_engine('sqlite:///database.db') # loads db into memory
9+
10+
11+
# Resource
12+
app = Flask(__name__, template_folder="templates")
13+
api = Api(app) # api is a collection of objects, where each object contains a specific functionality (GET, POST, etc)
14+
15+
16+
@app.route('/')
17+
def home():
18+
"""
19+
This function just responds to the browser ULR
20+
localhost:5000/
21+
22+
:return: the rendered template 'home.html'
23+
"""
24+
return render_template('home.html')
25+
26+
@app.route('/proxies')
27+
def proxies():
28+
return render_template('proxylistview.html')
29+
30+
class index(Resource):
31+
def get(self):
32+
conn = e.connect()
33+
try:
34+
query = conn.execute("select * from proxyprovider")
35+
rows = query.cursor.fetchall()
36+
query_testurl = conn.execute("select * from proxytest")
37+
rows_testurl = query_testurl.cursor.fetchall()
38+
return jsonify({'tasks': rows, 'testtasks': rows_testurl})
39+
except:
40+
return {'msg': 'Error occurred ! '}, 400
41+
conn.close()
42+
43+
def post(self):
44+
json = request.get_json()
45+
urlTest = json['url']
46+
conn = e.connect()
47+
48+
query = conn.execute("select * from proxylists")
49+
rows = query.cursor.fetchall()
50+
if not rows:
51+
return {'message': 'No Proxies Are Available in the Database !'}, 400
52+
proxyList = []
53+
for i in rows:
54+
line = i[0]+':'+ str(i[1])
55+
proxyList.append(line)
56+
conn.close()
57+
functesturl(proxyList, urlTest)
58+
return { 'message': 'Proxy test with certain url is completed, find the details in Test URLS Division!' }, 201
59+
60+
class proxiesretrive(Resource):
61+
def get(self):
62+
conn = e.connect()
63+
try:
64+
query = conn.execute("select * from proxylists")
65+
rows = query.cursor.fetchall()
66+
return jsonify({'tasks': rows})
67+
except:
68+
return {'msg': 'Error occurred ! '}, 400
69+
70+
conn.close()
71+
72+
def post(self):
73+
json_data = request.get_json()
74+
if not json_data:
75+
return {'message': 'No input data provided'}, 400
76+
conn = e.connect()
77+
query = conn.execute("select * from proxylists where IP = ? ", (json_data['IP']))
78+
row = query.cursor.fetchall()
79+
if not row:
80+
updatedb(json_data)
81+
return { 'message': 'IP Entry Has Been Added Successfully ' }, 201
82+
else:
83+
return { 'message': 'IP is already exists, please enter new IP address !' }, 400
84+
85+
conn.close()
86+
87+
def put(self):
88+
json_data = request.get_json()
89+
proxy_list = []
90+
if not json_data:
91+
return {'message': 'No input data provided'}, 400
92+
conn = e.connect()
93+
query = conn.execute("select * from proxylists where IP = ? ", (json_data['oldIP']))
94+
row = query.cursor.fetchall()
95+
if not row:
96+
return {'message': 'IP does not exist'}, 400
97+
else:
98+
date = str(datetime.datetime.now())
99+
dataformat = date[0:10] + ',' + date[11:19]
100+
conn.execute("UPDATE proxylists SET IP = ?, Port = ?, insertdate = ?, lastupdate = ? WHERE IP = ?",
101+
(json_data['newIP'],json_data['newPort'], dataformat, dataformat, json_data['oldIP']))
102+
dict_line = {'IP': json_data['newIP'], 'Port': json_data['newPort']}
103+
proxy_list.append(dict_line)
104+
basicproxytest(proxy_list, dataformat)
105+
return {'message': 'IP Entry Has Been Updated Successfully '}, 201
106+
107+
conn.close()
108+
109+
def delete(self):
110+
json_data = request.get_json(force=True)
111+
if json_data == 1:
112+
print('deleted')
113+
conn = e.connect()
114+
conn.execute("DELETE from proxylists")
115+
conn.close()
116+
return {"status": 'success'}, 201
117+
else:
118+
return {'message': 'No input data provided'}, 400
119+
120+
121+
#resource route
122+
api.add_resource(proxiesretrive, '/proxiestest')
123+
api.add_resource(index, '/indexpage')
124+
125+
if __name__ == '__main__':
126+
app.run(debug=True)

database.db

65 KB
Binary file not shown.

0 commit comments

Comments
 (0)