Skip to content

Commit c391f79

Browse files
committed
make fmt
1 parent 4a75e63 commit c391f79

File tree

2 files changed

+52
-54
lines changed

2 files changed

+52
-54
lines changed

tests/test_errors.py

Lines changed: 46 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -83,53 +83,52 @@ def make_private_link_response() -> requests.Response:
8383
for x in base_subclass_test_cases]
8484

8585

86-
@pytest.mark.parametrize(
87-
'response, expected_error, expected_message', subclass_test_cases +
88-
[(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
89-
(fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
90-
(fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
91-
(make_private_link_response(), errors.PrivateLinkValidationError,
92-
('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
93-
'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
94-
'endpoint. For more information, see '
95-
'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
96-
),
97-
(fake_valid_response(
98-
'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
99-
'/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
100-
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
101-
'/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
102-
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
103-
'/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
104-
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
105-
'/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
106-
(fake_response(
107-
'GET', 400,
108-
'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
109-
), errors.BadRequest,
110-
'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
111-
),
112-
(fake_response('GET', 400, '<pre>Worker environment not ready</pre>'), errors.BadRequest,
113-
'Worker environment not ready'),
114-
(fake_response('GET', 400, 'this is not a real response'), errors.BadRequest,
115-
('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. '
116-
'Please report this issue with the following debugging information to the SDK issue tracker at '
117-
'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n'
118-
'< 400 Bad Request\n'
119-
'< this is not a real response```')),
120-
(fake_response(
121-
'GET', 404,
122-
json.dumps({
123-
'detail': 'Group with id 1234 is not found',
124-
'status': '404',
125-
'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error']
126-
})), errors.NotFound, 'None Group with id 1234 is not found'),
127-
(fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound,
128-
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```'
129-
),
130-
(fake_raw_response('GET', 404, b'\x80'), errors.NotFound,
131-
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```'
132-
)])
86+
@pytest.mark.parametrize('response, expected_error, expected_message', subclass_test_cases + [
87+
(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
88+
(fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
89+
(fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
90+
(make_private_link_response(), errors.PrivateLinkValidationError,
91+
('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
92+
'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
93+
'endpoint. For more information, see '
94+
'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
95+
),
96+
(fake_valid_response(
97+
'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
98+
'/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
99+
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
100+
'/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
101+
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
102+
'/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
103+
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
104+
'/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
105+
(fake_response(
106+
'GET', 400,
107+
'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
108+
), errors.BadRequest,
109+
'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'),
110+
(fake_response('GET', 400, '<pre>Worker environment not ready</pre>'), errors.BadRequest,
111+
'Worker environment not ready'),
112+
(fake_response('GET', 400, 'this is not a real response'), errors.BadRequest,
113+
('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. '
114+
'Please report this issue with the following debugging information to the SDK issue tracker at '
115+
'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n'
116+
'< 400 Bad Request\n'
117+
'< this is not a real response```')),
118+
(fake_response(
119+
'GET', 404,
120+
json.dumps({
121+
'detail': 'Group with id 1234 is not found',
122+
'status': '404',
123+
'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error']
124+
})), errors.NotFound, 'None Group with id 1234 is not found'),
125+
(fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound,
126+
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```'
127+
),
128+
(fake_raw_response('GET', 404, b'\x80'), errors.NotFound,
129+
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```'
130+
)
131+
])
133132
def test_get_api_error(response, expected_error, expected_message):
134133
parser = errors._Parser()
135134
with pytest.raises(errors.DatabricksError) as e:

tests/test_oauth.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,26 +10,25 @@ def test_token_cache_unique_filename_by_host():
1010
common_args = dict(client_id="abc",
1111
redirect_url="http://localhost:8020",
1212
oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
13-
assert TokenCache(host="http://localhost:",
14-
**common_args).filename != TokenCache("https://bar.cloud.databricks.com",
15-
**common_args).filename
13+
assert TokenCache(host="http://localhost:", **common_args).filename != TokenCache(
14+
"https://bar.cloud.databricks.com", **common_args).filename
1615

1716

1817
def test_token_cache_unique_filename_by_client_id():
1918
common_args = dict(host="http://localhost:",
2019
redirect_url="http://localhost:8020",
2120
oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
22-
assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def",
23-
**common_args).filename
21+
assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", **
22+
common_args).filename
2423

2524

2625
def test_token_cache_unique_filename_by_scopes():
2726
common_args = dict(host="http://localhost:",
2827
client_id="abc",
2928
redirect_url="http://localhost:8020",
3029
oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
31-
assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"],
32-
**common_args).filename
30+
assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], **
31+
common_args).filename
3332

3433

3534
def test_account_oidc_endpoints(requests_mock):

0 commit comments

Comments
 (0)