Skip to content

Commit f5a1dda

Browse files
committed
api change
1 parent 093da34 commit f5a1dda

File tree

5 files changed

+58
-42
lines changed

5 files changed

+58
-42
lines changed

scrappeycom.egg-info/PKG-INFO

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Metadata-Version: 2.1
22
Name: scrappeycom
3-
Version: 0.3.3
3+
Version: 0.3.4
44
Summary: An API wrapper for Scrappey.com written in Python (cloudflare bypass & solver)
55
Home-page: https://github.com/pim97/scrappey-wrapper-python
66
Download-URL: https://github.com/pim97/scrappey-wrapper-python/releases/tag/v_03

scrappeycom.egg-info/requires.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1 @@
11
requests
2-
urllib

scrappeycom/scrappey.py

Lines changed: 15 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ def __init__(self, api_key):
66
self.api_key = api_key
77
self.base_url = 'https://publisher.scrappey.com/api/v1'
88

9-
def send_request(self, endpoint, method, data=None):
9+
def send_request(self, endpoint, data=None):
1010
url = f'{self.base_url}?key={self.api_key}'
1111

1212
headers = {
@@ -19,39 +19,25 @@ def send_request(self, endpoint, method, data=None):
1919
}
2020

2121
try:
22-
response = requests.request(method, url, headers=headers, json=payload)
22+
response = requests.post(url, headers=headers, json=payload)
2323
response.raise_for_status()
2424
return response.json()
2525
except requests.exceptions.RequestException as error:
2626
raise error
2727

28-
def create_session(self, session_id=None, proxy=None):
29-
return self.send_request('sessions.create', 'POST', {'session': session_id, 'proxy': proxy})
28+
def create_session(self, data):
29+
return self.send_request(endpoint='sessions.create', data=data)
3030

31-
def destroy_session(self, session_id):
32-
if session_id is None:
33-
raise ValueError('sessionId parameter is required.')
31+
def destroy_session(self, session):
32+
if session is None:
33+
raise ValueError('session parameter is required.')
34+
return self.send_request(endpoint='sessions.destroy', data={'session': session})
3435

35-
return self.send_request('sessions.destroy', 'POST', {'session': session_id})
36+
def request(self, data):
37+
if data is None:
38+
raise ValueError('data parameter is required.')
39+
40+
if data['cmd'] is None:
41+
raise ValueError('data.cmd parameter is required.')
3642

37-
def get_request(self, url, session_id=None, cookiejar=None, proxy=None):
38-
if url is None:
39-
raise ValueError('url parameter is required.')
40-
41-
if session_id is None and cookiejar is None and proxy is None:
42-
raise ValueError('At least one of sessionId, cookiejar, or proxy parameters must be provided.')
43-
44-
return self.send_request('request.get', 'POST', {'url': url, 'session': session_id, 'cookiejar': cookiejar, 'proxy': proxy})
45-
46-
def post_request(self, url, post_data, session_id=None, cookiejar=None, proxy=None):
47-
is_form_data = isinstance(post_data, str) and '=' in post_data
48-
49-
if not is_form_data:
50-
try:
51-
request_data = urllib.parse.urlencode(post_data)
52-
except ValueError:
53-
raise ValueError('Invalid postData format. It must be in application/x-www-form-urlencoded format.')
54-
else:
55-
request_data = post_data
56-
57-
return self.send_request('request.post', 'POST', {'url': url, 'postData': request_data, 'session': session_id, 'cookiejar': cookiejar, 'proxy': proxy})
43+
return self.send_request(endpoint=data['cmd'], data=data)

scrappeycom/test.py

Lines changed: 40 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,53 @@
11
from scrappey import Scrappey
2+
import uuid
23

3-
scrappey = Scrappey('YOUR_API_KEY')
4+
scrappey = Scrappey('API_KEY')
45

56
def run_test():
67
try:
7-
session = scrappey.create_session()
8-
print(session)
8+
sessionData = {
9+
'session': str(uuid.uuid4()), #uuid is also optional, otherwise default uuid will be used
10+
#'proxy': 'http://username:password@ip:port' #proxy is optional, otherwise default proxy will be used
11+
}
12+
session = scrappey.create_session(sessionData)
13+
print('Session created:', session['session'])
914

10-
get_request_result = scrappey.get_request('https://httpbin.rs/get', session['session'])
15+
# View all the options here with the request builder
16+
# https://app.scrappey.com/#/builder
17+
# Just copy paste it below, example
18+
#
19+
# {
20+
# "cmd": "request.get",
21+
# "url": "https://httpbin.rs/get"
22+
# }
23+
24+
get_request_result = scrappey.request({
25+
"cmd": "request.get",
26+
'session': session['session'],
27+
'url': 'https://httpbin.rs/get',
28+
})
1129
print('GET Request Result:', get_request_result)
1230

13-
post_data = {'username': 'user123', 'password': 'pass456'}
14-
post_request_result = scrappey.post_request('https://httpbin.rs/post', post_data, session['session'])
31+
post_request_result = scrappey.request({
32+
"cmd": "request.post",
33+
"url": "https://httpbin.rs/post",
34+
"postData": "test=test&test2=test2"
35+
})
1536
print('POST Request Result:', post_request_result)
1637

17-
scrappey.destroy_session(session['session'])
18-
print('Session destroyed.')
38+
# JSON request example
39+
post_request_result_json = scrappey.request({
40+
"cmd": "request.post",
41+
"url": "https://backend.scrappey.com/api/auth/login",
42+
"postData": "{\"email\":\"email\",\"password\":\"password\"}",
43+
"customHeaders": {
44+
"content-type": "application/json"
45+
}
46+
})
47+
print('POST Request Result:', post_request_result_json)
48+
49+
sessionDestroyed = scrappey.destroy_session(sessionData)
50+
print('Session destroyed:', sessionDestroyed)
1951
except Exception as error:
2052
print(error)
2153

setup.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
setup(
77
name = 'scrappeycom', # How you named your package folder (MyLib)
88
packages = ['scrappeycom'], # Chose the same as "name"
9-
version = '0.3.3', # Start with a small number and increase it with every change you make
9+
version = '0.3.5', # Start with a small number and increase it with every change you make
1010
license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository
1111
description = 'An API wrapper for Scrappey.com written in Python (cloudflare bypass & solver)', # Give a short description about your library
1212
author = 'dormic97', # Type in your name
@@ -17,8 +17,7 @@
1717
long_description=long_description,
1818
long_description_content_type='text/markdown',
1919
install_requires=[ # I get to this in a second
20-
'requests',
21-
'urllib'
20+
'requests'
2221
],
2322
classifiers=[
2423
'Development Status :: 5 - Production/Stable', # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package

0 commit comments

Comments
 (0)