diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 12779819a..e406a373e 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -151,7 +151,7 @@ private ApiClient(Builder builder) { bodyLogger = new BodyLogger(mapper, 1024, debugTruncateBytes); } - private static void setQuery(Request in, I entity) { + public static void setQuery(Request in, I entity) { if (entity == null) { return; } @@ -161,36 +161,18 @@ private static void setQuery(Request in, I entity) { } } - private static void setHeaders(Request in, Map headers) { - if (headers == null) { - return; - } - for (Map.Entry e : headers.entrySet()) { - in.withHeader(e.getKey(), e.getValue()); - } - } - - public Collection getCollection( - String path, I in, Class element, Map headers) { + public Collection getCollection(Request req, Class element) { return withJavaType( - path, - in, - mapper.getTypeFactory().constructCollectionType(Collection.class, element), - headers); + req, mapper.getTypeFactory().constructCollectionType(Collection.class, element)); } - public Map getStringMap(String path, I in, Map headers) { + public Map getStringMap(Request req) { return withJavaType( - path, - in, - mapper.getTypeFactory().constructMapType(Map.class, String.class, String.class), - headers); + req, mapper.getTypeFactory().constructMapType(Map.class, String.class, String.class)); } - protected O withJavaType( - String path, I in, JavaType javaType, Map headers) { + protected O withJavaType(Request request, JavaType javaType) { try { - Request request = prepareRequest("GET", path, in, headers); Response response = getResponse(request); return deserialize(response.getBody(), javaType); } catch (IOException e) { @@ -198,95 +180,6 @@ protected O withJavaType( } } - public O HEAD(String path, Class target, Map headers) { - return HEAD(path, null, target, headers); - } - - public O HEAD(String path, I in, Class target, Map headers) { - try { - return execute(prepareRequest("HEAD", path, in, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - public O GET(String path, Class target, Map headers) { - return GET(path, null, target, headers); - } - - public O GET(String path, I in, Class target, Map headers) { - try { - return execute(prepareRequest("GET", path, in, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - public O POST(String path, Class target, Map headers) { - try { - return execute(prepareRequest("POST", path, null, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - public O POST(String path, I in, Class target, Map headers) { - try { - return execute(prepareRequest("POST", path, in, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - public O PUT(String path, I in, Class target, Map headers) { - try { - return execute(prepareRequest("PUT", path, in, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - public O PATCH(String path, I in, Class target, Map headers) { - try { - return execute(prepareRequest("PATCH", path, in, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - public O DELETE(String path, I in, Class target, Map headers) { - try { - return execute(prepareRequest("DELETE", path, in, headers), target); - } catch (IOException e) { - throw new DatabricksException("IO error: " + e.getMessage(), e); - } - } - - private boolean hasBody(String method) { - return !method.equals("GET") && !method.equals("DELETE") && !method.equals("HEAD"); - } - - private Request prepareBaseRequest(String method, String path, I in) - throws JsonProcessingException { - if (in == null || !hasBody(method)) { - return new Request(method, path); - } else if (InputStream.class.isAssignableFrom(in.getClass())) { - InputStream body = (InputStream) in; - return new Request(method, path, body); - } else { - String body = (in instanceof String) ? (String) in : serialize(in); - return new Request(method, path, body); - } - } - - private Request prepareRequest(String method, String path, I in, Map headers) - throws JsonProcessingException { - Request req = prepareBaseRequest(method, path, in); - setQuery(req, in); - setHeaders(req, headers); - return req; - } - /** * Executes HTTP request with retries and converts it to proper POJO * @@ -294,7 +187,7 @@ private Request prepareRequest(String method, String path, I in, Map T execute(Request in, Class target) throws IOException { + public T execute(Request in, Class target) throws IOException { Response out = getResponse(in); if (target == Void.class) { return null; @@ -533,7 +426,7 @@ public void deserialize(Response response, T object) throws IOException { } } - private String serialize(Object body) throws JsonProcessingException { + public String serialize(Object body) throws JsonProcessingException { if (body == null) { return null; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 20e7f883e..2943ce82e 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -586,10 +586,13 @@ private OpenIDConnectEndpoints fetchDefaultOidcEndpoints() throws IOException { .withHttpClient(getHttpClient()) .withGetHostFunc(v -> getHost()) .build(); - return apiClient.GET( - "/oidc/.well-known/oauth-authorization-server", - OpenIDConnectEndpoints.class, - new HashMap<>()); + try { + return apiClient.execute( + new Request("GET", "/oidc/.well-known/oauth-authorization-server"), + OpenIDConnectEndpoints.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/RefreshableTokenSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/RefreshableTokenSource.java index aee64d087..e93f91ae5 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/RefreshableTokenSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/oauth/RefreshableTokenSource.java @@ -4,6 +4,7 @@ import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.core.http.FormRequest; import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.http.Request; import java.time.LocalDateTime; import java.time.temporal.ChronoUnit; import java.util.Base64; @@ -61,12 +62,11 @@ protected static Token retrieveToken( break; } headers.put("Content-Type", "application/x-www-form-urlencoded"); + Request req = new Request("POST", tokenUrl, FormRequest.wrapValuesInList(params)); + req.withHeaders(headers); try { ApiClient apiClient = new ApiClient.Builder().withHttpClient(hc).build(); - - OAuthResponse resp = - apiClient.POST( - tokenUrl, FormRequest.wrapValuesInList(params), OAuthResponse.class, headers); + OAuthResponse resp = apiClient.execute(req, OAuthResponse.class); if (resp.getErrorCode() != null) { throw new IllegalArgumentException(resp.getErrorCode() + ": " + resp.getErrorSummary()); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java index e45306647..0e6ec9ff6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.apps; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Apps */ @Generated @@ -18,35 +19,55 @@ public AppsImpl(ApiClient apiClient) { @Override public App create(CreateAppRequest request) { String path = "/api/2.0/apps"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getApp(), App.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getApp())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, App.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public App delete(DeleteAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, App.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, App.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AppDeployment deploy(CreateAppDeploymentRequest request) { String path = String.format("/api/2.0/apps/%s/deployments", request.getAppName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getAppDeployment(), AppDeployment.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getAppDeployment())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AppDeployment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public App get(GetAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, App.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, App.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -54,86 +75,136 @@ public AppDeployment getDeployment(GetAppDeploymentRequest request) { String path = String.format( "/api/2.0/apps/%s/deployments/%s", request.getAppName(), request.getDeploymentId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AppDeployment.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AppDeployment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetAppPermissionLevelsResponse getPermissionLevels(GetAppPermissionLevelsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s/permissionLevels", request.getAppName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetAppPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetAppPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AppPermissions getPermissions(GetAppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AppPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AppPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAppsResponse list(ListAppsRequest request) { String path = "/api/2.0/apps"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAppsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAppsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAppDeploymentsResponse listDeployments(ListAppDeploymentsRequest request) { String path = String.format("/api/2.0/apps/%s/deployments", request.getAppName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAppDeploymentsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAppDeploymentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AppPermissions setPermissions(AppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, AppPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AppPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public App start(StartAppRequest request) { String path = String.format("/api/2.0/apps/%s/start", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, App.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, App.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public App stop(StopAppRequest request) { String path = String.format("/api/2.0/apps/%s/stop", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, App.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, App.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public App update(UpdateAppRequest request) { String path = String.format("/api/2.0/apps/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request.getApp(), App.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getApp())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, App.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AppPermissions updatePermissions(AppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, AppPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AppPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java index b9cbb0c7c..47caf60ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of BillableUsage */ @Generated @@ -19,8 +20,13 @@ public BillableUsageImpl(ApiClient apiClient) { public DownloadResponse download(DownloadRequest request) { String path = String.format("/api/2.0/accounts/%s/usage/download", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "text/plain"); - return apiClient.GET(path, request, DownloadResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "text/plain"); + return apiClient.execute(req, DownloadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java index b291b467e..7bf0119d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Budgets */ @Generated @@ -18,10 +19,15 @@ public BudgetsImpl(ApiClient apiClient) { @Override public CreateBudgetConfigurationResponse create(CreateBudgetConfigurationRequest request) { String path = String.format("/api/2.1/accounts/%s/budgets", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateBudgetConfigurationResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateBudgetConfigurationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -30,9 +36,14 @@ public void delete(DeleteBudgetConfigurationRequest request) { String.format( "/api/2.1/accounts/%s/budgets/%s", apiClient.configuredAccountID(), request.getBudgetId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteBudgetConfigurationResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteBudgetConfigurationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -41,17 +52,27 @@ public GetBudgetConfigurationResponse get(GetBudgetConfigurationRequest request) String.format( "/api/2.1/accounts/%s/budgets/%s", apiClient.configuredAccountID(), request.getBudgetId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetBudgetConfigurationResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetBudgetConfigurationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListBudgetConfigurationsResponse list(ListBudgetConfigurationsRequest request) { String path = String.format("/api/2.1/accounts/%s/budgets", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListBudgetConfigurationsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListBudgetConfigurationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -60,9 +81,14 @@ public UpdateBudgetConfigurationResponse update(UpdateBudgetConfigurationRequest String.format( "/api/2.1/accounts/%s/budgets/%s", apiClient.configuredAccountID(), request.getBudgetId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateBudgetConfigurationResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateBudgetConfigurationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java index c82c9a16d..dd4e64dcf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of LogDelivery */ @Generated @@ -19,10 +20,15 @@ public LogDeliveryImpl(ApiClient apiClient) { public WrappedLogDeliveryConfiguration create(WrappedCreateLogDeliveryConfiguration request) { String path = String.format("/api/2.0/accounts/%s/log-delivery", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, WrappedLogDeliveryConfiguration.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WrappedLogDeliveryConfiguration.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,18 +37,28 @@ public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) { String.format( "/api/2.0/accounts/%s/log-delivery/%s", apiClient.configuredAccountID(), request.getLogDeliveryConfigurationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, WrappedLogDeliveryConfiguration.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WrappedLogDeliveryConfiguration.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public WrappedLogDeliveryConfigurations list(ListLogDeliveryRequest request) { String path = String.format("/api/2.0/accounts/%s/log-delivery", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, WrappedLogDeliveryConfigurations.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WrappedLogDeliveryConfigurations.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -51,9 +67,14 @@ public void patchStatus(UpdateLogDeliveryConfigurationStatusRequest request) { String.format( "/api/2.0/accounts/%s/log-delivery/%s", apiClient.configuredAccountID(), request.getLogDeliveryConfigurationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchStatusResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchStatusResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java index e677e090c..6f6574868 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.billing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of UsageDashboards */ @Generated @@ -18,17 +19,27 @@ public UsageDashboardsImpl(ApiClient apiClient) { @Override public CreateBillingUsageDashboardResponse create(CreateBillingUsageDashboardRequest request) { String path = String.format("/api/2.0/accounts/%s/dashboard", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateBillingUsageDashboardResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateBillingUsageDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetBillingUsageDashboardResponse get(GetBillingUsageDashboardRequest request) { String path = String.format("/api/2.0/accounts/%s/dashboard", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetBillingUsageDashboardResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetBillingUsageDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java index 9918494f4..ad7867175 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountMetastoreAssignments */ @Generated @@ -21,10 +22,15 @@ public void create(AccountsCreateMetastoreAssignment request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/metastores/%s", apiClient.configuredAccountID(), request.getWorkspaceId(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, CreateResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CreateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,9 +39,14 @@ public void delete(DeleteAccountMetastoreAssignmentRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/metastores/%s", apiClient.configuredAccountID(), request.getWorkspaceId(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public AccountsMetastoreAssignment get(GetAccountMetastoreAssignmentRequest requ String.format( "/api/2.0/accounts/%s/workspaces/%s/metastore", apiClient.configuredAccountID(), request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AccountsMetastoreAssignment.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AccountsMetastoreAssignment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -56,9 +72,14 @@ public ListAccountMetastoreAssignmentsResponse list( String.format( "/api/2.0/accounts/%s/metastores/%s/workspaces", apiClient.configuredAccountID(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAccountMetastoreAssignmentsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAccountMetastoreAssignmentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -67,9 +88,14 @@ public void update(AccountsUpdateMetastoreAssignment request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/metastores/%s", apiClient.configuredAccountID(), request.getWorkspaceId(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java index 41afd30bd..b8a166770 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountMetastores */ @Generated @@ -18,10 +19,15 @@ public AccountMetastoresImpl(ApiClient apiClient) { @Override public AccountsMetastoreInfo create(AccountsCreateMetastore request) { String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, AccountsMetastoreInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AccountsMetastoreInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -30,9 +36,14 @@ public void delete(DeleteAccountMetastoreRequest request) { String.format( "/api/2.0/accounts/%s/metastores/%s", apiClient.configuredAccountID(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -41,17 +52,26 @@ public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) { String.format( "/api/2.0/accounts/%s/metastores/%s", apiClient.configuredAccountID(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AccountsMetastoreInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AccountsMetastoreInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListMetastoresResponse list() { String path = String.format("/api/2.0/accounts/%s/metastores", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListMetastoresResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListMetastoresResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -60,9 +80,14 @@ public AccountsMetastoreInfo update(AccountsUpdateMetastore request) { String.format( "/api/2.0/accounts/%s/metastores/%s", apiClient.configuredAccountID(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, AccountsMetastoreInfo.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AccountsMetastoreInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java index e7bb99315..fdb1c75e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountStorageCredentialsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountStorageCredentials */ @Generated @@ -21,10 +22,15 @@ public AccountsStorageCredentialInfo create(AccountsCreateStorageCredential requ String.format( "/api/2.0/accounts/%s/metastores/%s/storage-credentials", apiClient.configuredAccountID(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, AccountsStorageCredentialInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AccountsStorageCredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -35,9 +41,14 @@ public void delete(DeleteAccountStorageCredentialRequest request) { apiClient.configuredAccountID(), request.getMetastoreId(), request.getStorageCredentialName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -48,9 +59,14 @@ public AccountsStorageCredentialInfo get(GetAccountStorageCredentialRequest requ apiClient.configuredAccountID(), request.getMetastoreId(), request.getStorageCredentialName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AccountsStorageCredentialInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AccountsStorageCredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -59,9 +75,14 @@ public ListAccountStorageCredentialsResponse list(ListAccountStorageCredentialsR String.format( "/api/2.0/accounts/%s/metastores/%s/storage-credentials", apiClient.configuredAccountID(), request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAccountStorageCredentialsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAccountStorageCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -72,9 +93,14 @@ public AccountsStorageCredentialInfo update(AccountsUpdateStorageCredential requ apiClient.configuredAccountID(), request.getMetastoreId(), request.getStorageCredentialName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, AccountsStorageCredentialInfo.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AccountsStorageCredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java index 5a7a55677..56ef167fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAllowlistsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ArtifactAllowlists */ @Generated @@ -19,18 +20,28 @@ public ArtifactAllowlistsImpl(ApiClient apiClient) { public ArtifactAllowlistInfo get(GetArtifactAllowlistRequest request) { String path = String.format("/api/2.1/unity-catalog/artifact-allowlists/%s", request.getArtifactType()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ArtifactAllowlistInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ArtifactAllowlistInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ArtifactAllowlistInfo update(SetArtifactAllowlist request) { String path = String.format("/api/2.1/unity-catalog/artifact-allowlists/%s", request.getArtifactType()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ArtifactAllowlistInfo.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ArtifactAllowlistInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java index 404904ace..29649052b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Catalogs */ @Generated @@ -18,42 +19,67 @@ public CatalogsImpl(ApiClient apiClient) { @Override public CatalogInfo create(CreateCatalog request) { String path = "/api/2.1/unity-catalog/catalogs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CatalogInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CatalogInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteCatalogRequest request) { String path = String.format("/api/2.1/unity-catalog/catalogs/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CatalogInfo get(GetCatalogRequest request) { String path = String.format("/api/2.1/unity-catalog/catalogs/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CatalogInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CatalogInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListCatalogsResponse list(ListCatalogsRequest request) { String path = "/api/2.1/unity-catalog/catalogs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCatalogsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListCatalogsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CatalogInfo update(UpdateCatalog request) { String path = String.format("/api/2.1/unity-catalog/catalogs/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CatalogInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CatalogInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java index 1908778ac..cbd09dede 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Connections */ @Generated @@ -18,42 +19,67 @@ public ConnectionsImpl(ApiClient apiClient) { @Override public ConnectionInfo create(CreateConnection request) { String path = "/api/2.1/unity-catalog/connections"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ConnectionInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ConnectionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteConnectionRequest request) { String path = String.format("/api/2.1/unity-catalog/connections/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ConnectionInfo get(GetConnectionRequest request) { String path = String.format("/api/2.1/unity-catalog/connections/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ConnectionInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ConnectionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListConnectionsResponse list(ListConnectionsRequest request) { String path = "/api/2.1/unity-catalog/connections"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListConnectionsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListConnectionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ConnectionInfo update(UpdateConnection request) { String path = String.format("/api/2.1/unity-catalog/connections/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ConnectionInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ConnectionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java index b2aad2644..1557d0944 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Credentials */ @Generated @@ -18,61 +19,96 @@ public CredentialsImpl(ApiClient apiClient) { @Override public CredentialInfo createCredential(CreateCredentialRequest request) { String path = "/api/2.1/unity-catalog/credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CredentialInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteCredential(DeleteCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteCredentialResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteCredentialResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TemporaryCredentials generateTemporaryServiceCredential( GenerateTemporaryServiceCredentialRequest request) { String path = "/api/2.1/unity-catalog/temporary-service-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, TemporaryCredentials.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TemporaryCredentials.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CredentialInfo getCredential(GetCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CredentialInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListCredentialsResponse listCredentials(ListCredentialsRequest request) { String path = "/api/2.1/unity-catalog/credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCredentialsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CredentialInfo updateCredential(UpdateCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/credentials/%s", request.getNameArg()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CredentialInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ValidateCredentialResponse validateCredential(ValidateCredentialRequest request) { String path = "/api/2.1/unity-catalog/validate-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ValidateCredentialResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ValidateCredentialResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java index 0b30c0a78..8c3107d8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ExternalLocations */ @Generated @@ -18,42 +19,67 @@ public ExternalLocationsImpl(ApiClient apiClient) { @Override public ExternalLocationInfo create(CreateExternalLocation request) { String path = "/api/2.1/unity-catalog/external-locations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ExternalLocationInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExternalLocationInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteExternalLocationRequest request) { String path = String.format("/api/2.1/unity-catalog/external-locations/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExternalLocationInfo get(GetExternalLocationRequest request) { String path = String.format("/api/2.1/unity-catalog/external-locations/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ExternalLocationInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ExternalLocationInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListExternalLocationsResponse list(ListExternalLocationsRequest request) { String path = "/api/2.1/unity-catalog/external-locations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListExternalLocationsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExternalLocationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExternalLocationInfo update(UpdateExternalLocation request) { String path = String.format("/api/2.1/unity-catalog/external-locations/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ExternalLocationInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExternalLocationInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java index 88587432c..422449786 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Functions */ @Generated @@ -18,42 +19,67 @@ public FunctionsImpl(ApiClient apiClient) { @Override public FunctionInfo create(CreateFunctionRequest request) { String path = "/api/2.1/unity-catalog/functions"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, FunctionInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FunctionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteFunctionRequest request) { String path = String.format("/api/2.1/unity-catalog/functions/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public FunctionInfo get(GetFunctionRequest request) { String path = String.format("/api/2.1/unity-catalog/functions/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, FunctionInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, FunctionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListFunctionsResponse list(ListFunctionsRequest request) { String path = "/api/2.1/unity-catalog/functions"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListFunctionsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListFunctionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public FunctionInfo update(UpdateFunction request) { String path = String.format("/api/2.1/unity-catalog/functions/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, FunctionInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FunctionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java index a4f83e394..1a8219ede 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Grants */ @Generated @@ -21,9 +22,14 @@ public PermissionsList get(GetGrantRequest request) { String.format( "/api/2.1/unity-catalog/permissions/%s/%s", request.getSecurableType(), request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PermissionsList.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PermissionsList.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public EffectivePermissionsList getEffective(GetEffectiveRequest request) { String.format( "/api/2.1/unity-catalog/effective-permissions/%s/%s", request.getSecurableType(), request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, EffectivePermissionsList.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EffectivePermissionsList.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,9 +54,14 @@ public PermissionsList update(UpdatePermissions request) { String.format( "/api/2.1/unity-catalog/permissions/%s/%s", request.getSecurableType(), request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, PermissionsList.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PermissionsList.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java index c63190749..87e65c5ac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Metastores */ @Generated @@ -19,86 +20,133 @@ public MetastoresImpl(ApiClient apiClient) { public void assign(CreateMetastoreAssignment request) { String path = String.format("/api/2.1/unity-catalog/workspaces/%s/metastore", request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, AssignResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, AssignResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MetastoreInfo create(CreateMetastore request) { String path = "/api/2.1/unity-catalog/metastores"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, MetastoreInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MetastoreInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MetastoreAssignment current() { String path = "/api/2.1/unity-catalog/current-metastore-assignment"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, MetastoreAssignment.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MetastoreAssignment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteMetastoreRequest request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MetastoreInfo get(GetMetastoreRequest request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, MetastoreInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MetastoreInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListMetastoresResponse list() { String path = "/api/2.1/unity-catalog/metastores"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListMetastoresResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListMetastoresResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetMetastoreSummaryResponse summary() { String path = "/api/2.1/unity-catalog/metastore_summary"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetMetastoreSummaryResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetMetastoreSummaryResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void unassign(UnassignRequest request) { String path = String.format("/api/2.1/unity-catalog/workspaces/%s/metastore", request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, UnassignResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, UnassignResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MetastoreInfo update(UpdateMetastore request) { String path = String.format("/api/2.1/unity-catalog/metastores/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, MetastoreInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MetastoreInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void updateAssignment(UpdateMetastoreAssignment request) { String path = String.format("/api/2.1/unity-catalog/workspaces/%s/metastore", request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateAssignmentResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateAssignmentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java index ebb95ea18..b847105e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ModelVersions */ @Generated @@ -21,8 +22,13 @@ public void delete(DeleteModelVersionRequest request) { String.format( "/api/2.1/unity-catalog/models/%s/versions/%s", request.getFullName(), request.getVersion()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,9 +37,14 @@ public ModelVersionInfo get(GetModelVersionRequest request) { String.format( "/api/2.1/unity-catalog/models/%s/versions/%s", request.getFullName(), request.getVersion()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ModelVersionInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ModelVersionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,17 +53,27 @@ public ModelVersionInfo getByAlias(GetByAliasRequest request) { String.format( "/api/2.1/unity-catalog/models/%s/aliases/%s", request.getFullName(), request.getAlias()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ModelVersionInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ModelVersionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListModelVersionsResponse list(ListModelVersionsRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s/versions", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListModelVersionsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListModelVersionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -61,9 +82,14 @@ public ModelVersionInfo update(UpdateModelVersionRequest request) { String.format( "/api/2.1/unity-catalog/models/%s/versions/%s", request.getFullName(), request.getVersion()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ModelVersionInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ModelVersionInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java index a1d482fa6..2121a24ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of OnlineTables */ @Generated @@ -18,25 +19,40 @@ public OnlineTablesImpl(ApiClient apiClient) { @Override public OnlineTable create(CreateOnlineTableRequest request) { String path = "/api/2.0/online-tables"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getTable(), OnlineTable.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getTable())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, OnlineTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteOnlineTableRequest request) { String path = String.format("/api/2.0/online-tables/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public OnlineTable get(GetOnlineTableRequest request) { String path = String.format("/api/2.0/online-tables/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, OnlineTable.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, OnlineTable.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java index 221a92b7a..78553ca8e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of QualityMonitors */ @Generated @@ -21,32 +22,52 @@ public void cancelRefresh(CancelRefreshRequest request) { String.format( "/api/2.1/unity-catalog/tables/%s/monitor/refreshes/%s/cancel", request.getTableName(), request.getRefreshId()); - Map headers = new HashMap<>(); - apiClient.POST(path, null, CancelRefreshResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, CancelRefreshResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MonitorInfo create(CreateMonitor request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, MonitorInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MonitorInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteQualityMonitorRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MonitorInfo get(GetQualityMonitorRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, MonitorInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MonitorInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -55,18 +76,28 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) { String.format( "/api/2.1/unity-catalog/tables/%s/monitor/refreshes/%s", request.getTableName(), request.getRefreshId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, MonitorRefreshInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MonitorRefreshInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MonitorRefreshListResponse listRefreshes(ListRefreshesRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, MonitorRefreshListResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MonitorRefreshListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -74,27 +105,42 @@ public RegenerateDashboardResponse regenerateDashboard(RegenerateDashboardReques String path = String.format( "/api/2.1/quality-monitoring/tables/%s/monitor/dashboard", request.getTableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RegenerateDashboardResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RegenerateDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MonitorRefreshInfo runRefresh(RunRefreshRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.POST(path, null, MonitorRefreshInfo.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MonitorRefreshInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public MonitorInfo update(UpdateMonitor request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, MonitorInfo.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MonitorInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java index 35f626cb5..bc2de4af6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of RegisteredModels */ @Generated @@ -18,17 +19,27 @@ public RegisteredModelsImpl(ApiClient apiClient) { @Override public RegisteredModelInfo create(CreateRegisteredModelRequest request) { String path = "/api/2.1/unity-catalog/models"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RegisteredModelInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RegisteredModelInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteRegisteredModelRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s", request.getFullName()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -37,24 +48,39 @@ public void deleteAlias(DeleteAliasRequest request) { String.format( "/api/2.1/unity-catalog/models/%s/aliases/%s", request.getFullName(), request.getAlias()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteAliasResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteAliasResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RegisteredModelInfo get(GetRegisteredModelRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RegisteredModelInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RegisteredModelInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListRegisteredModelsResponse list(ListRegisteredModelsRequest request) { String path = "/api/2.1/unity-catalog/models"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListRegisteredModelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListRegisteredModelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -63,18 +89,28 @@ public RegisteredModelAlias setAlias(SetRegisteredModelAliasRequest request) { String.format( "/api/2.1/unity-catalog/models/%s/aliases/%s", request.getFullName(), request.getAlias()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, RegisteredModelAlias.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RegisteredModelAlias.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RegisteredModelInfo update(UpdateRegisteredModelRequest request) { String path = String.format("/api/2.1/unity-catalog/models/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, RegisteredModelInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RegisteredModelInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java index 3f15812c3..05c2e571e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ResourceQuotas */ @Generated @@ -21,16 +22,26 @@ public GetQuotaResponse getQuota(GetQuotaRequest request) { String.format( "/api/2.1/unity-catalog/resource-quotas/%s/%s/%s", request.getParentSecurableType(), request.getParentFullName(), request.getQuotaName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetQuotaResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetQuotaResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListQuotasResponse listQuotas(ListQuotasRequest request) { String path = "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListQuotasResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListQuotasResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java index ba856d53f..e5470c89b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Schemas */ @Generated @@ -18,42 +19,67 @@ public SchemasImpl(ApiClient apiClient) { @Override public SchemaInfo create(CreateSchema request) { String path = "/api/2.1/unity-catalog/schemas"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, SchemaInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SchemaInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteSchemaRequest request) { String path = String.format("/api/2.1/unity-catalog/schemas/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SchemaInfo get(GetSchemaRequest request) { String path = String.format("/api/2.1/unity-catalog/schemas/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, SchemaInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, SchemaInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListSchemasResponse list(ListSchemasRequest request) { String path = "/api/2.1/unity-catalog/schemas"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListSchemasResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListSchemasResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SchemaInfo update(UpdateSchema request) { String path = String.format("/api/2.1/unity-catalog/schemas/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, SchemaInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SchemaInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java index 694d3d9df..3f1390c9f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of StorageCredentials */ @Generated @@ -18,51 +19,81 @@ public StorageCredentialsImpl(ApiClient apiClient) { @Override public StorageCredentialInfo create(CreateStorageCredential request) { String path = "/api/2.1/unity-catalog/storage-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, StorageCredentialInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, StorageCredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteStorageCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/storage-credentials/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public StorageCredentialInfo get(GetStorageCredentialRequest request) { String path = String.format("/api/2.1/unity-catalog/storage-credentials/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, StorageCredentialInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, StorageCredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListStorageCredentialsResponse list(ListStorageCredentialsRequest request) { String path = "/api/2.1/unity-catalog/storage-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListStorageCredentialsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListStorageCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public StorageCredentialInfo update(UpdateStorageCredential request) { String path = String.format("/api/2.1/unity-catalog/storage-credentials/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, StorageCredentialInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, StorageCredentialInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ValidateStorageCredentialResponse validate(ValidateStorageCredential request) { String path = "/api/2.1/unity-catalog/validate-storage-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ValidateStorageCredentialResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ValidateStorageCredentialResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java index e9eab87e7..d6ade950b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of SystemSchemas */ @Generated @@ -21,9 +22,14 @@ public void disable(DisableRequest request) { String.format( "/api/2.1/unity-catalog/metastores/%s/systemschemas/%s", request.getMetastoreId(), request.getSchemaName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DisableResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DisableResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public void enable(EnableRequest request) { String.format( "/api/2.1/unity-catalog/metastores/%s/systemschemas/%s", request.getMetastoreId(), request.getSchemaName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.PUT(path, null, EnableResponse.class, headers); + try { + Request req = new Request("PUT", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, EnableResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,8 +53,13 @@ public ListSystemSchemasResponse list(ListSystemSchemasRequest request) { String path = String.format( "/api/2.1/unity-catalog/metastores/%s/systemschemas", request.getMetastoreId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListSystemSchemasResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListSystemSchemasResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java index cfe70f06d..e026a9c3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of TableConstraints */ @Generated @@ -18,17 +19,27 @@ public TableConstraintsImpl(ApiClient apiClient) { @Override public TableConstraint create(CreateTableConstraint request) { String path = "/api/2.1/unity-catalog/constraints"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, TableConstraint.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TableConstraint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteTableConstraintRequest request) { String path = String.format("/api/2.1/unity-catalog/constraints/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java index 2d1d63737..6f5b3304b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Tables */ @Generated @@ -18,49 +19,79 @@ public TablesImpl(ApiClient apiClient) { @Override public void delete(DeleteTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TableExistsResponse exists(ExistsRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s/exists", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, TableExistsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, TableExistsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TableInfo get(GetTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, TableInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, TableInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListTablesResponse list(ListTablesRequest request) { String path = "/api/2.1/unity-catalog/tables"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListTablesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListTablesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListTableSummariesResponse listSummaries(ListSummariesRequest request) { String path = "/api/2.1/unity-catalog/table-summaries"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListTableSummariesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListTableSummariesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java index 562476973..8c3fab716 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of TemporaryTableCredentials */ @Generated @@ -19,9 +20,14 @@ public TemporaryTableCredentialsImpl(ApiClient apiClient) { public GenerateTemporaryTableCredentialResponse generateTemporaryTableCredentials( GenerateTemporaryTableCredentialRequest request) { String path = "/api/2.0/unity-catalog/temporary-table-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, GenerateTemporaryTableCredentialResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GenerateTemporaryTableCredentialResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java index fa07ec0a7..00fe08a18 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Volumes */ @Generated @@ -18,41 +19,66 @@ public VolumesImpl(ApiClient apiClient) { @Override public VolumeInfo create(CreateVolumeRequestContent request) { String path = "/api/2.1/unity-catalog/volumes"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, VolumeInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, VolumeInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteVolumeRequest request) { String path = String.format("/api/2.1/unity-catalog/volumes/%s", request.getName()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListVolumesResponseContent list(ListVolumesRequest request) { String path = "/api/2.1/unity-catalog/volumes"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListVolumesResponseContent.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListVolumesResponseContent.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public VolumeInfo read(ReadVolumeRequest request) { String path = String.format("/api/2.1/unity-catalog/volumes/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, VolumeInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, VolumeInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public VolumeInfo update(UpdateVolumeRequestContent request) { String path = String.format("/api/2.1/unity-catalog/volumes/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, VolumeInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, VolumeInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java index 8e5f5114b..7516c5686 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of WorkspaceBindings */ @Generated @@ -19,9 +20,14 @@ public WorkspaceBindingsImpl(ApiClient apiClient) { public CurrentWorkspaceBindings get(GetWorkspaceBindingRequest request) { String path = String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CurrentWorkspaceBindings.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CurrentWorkspaceBindings.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -30,19 +36,29 @@ public WorkspaceBindingsResponse getBindings(GetBindingsRequest request) { String.format( "/api/2.1/unity-catalog/bindings/%s/%s", request.getSecurableType(), request.getSecurableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, WorkspaceBindingsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WorkspaceBindingsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CurrentWorkspaceBindings update(UpdateWorkspaceBindings request) { String path = String.format("/api/2.1/unity-catalog/workspace-bindings/catalogs/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CurrentWorkspaceBindings.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CurrentWorkspaceBindings.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -51,9 +67,14 @@ public WorkspaceBindingsResponse updateBindings(UpdateWorkspaceBindingsParameter String.format( "/api/2.1/unity-catalog/bindings/%s/%s", request.getSecurableType(), request.getSecurableName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, WorkspaceBindingsResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceBindingsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java index fb91ae406..444feb55d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.cleanrooms; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CleanRoomAssets */ @Generated @@ -18,10 +19,15 @@ public CleanRoomAssetsImpl(ApiClient apiClient) { @Override public CleanRoomAsset create(CreateCleanRoomAssetRequest request) { String path = String.format("/api/2.0/clean-rooms/%s/assets", request.getCleanRoomName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getAsset(), CleanRoomAsset.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getAsset())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CleanRoomAsset.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -30,9 +36,14 @@ public void delete(DeleteCleanRoomAssetRequest request) { String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", request.getCleanRoomName(), request.getAssetType(), request.getAssetFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteCleanRoomAssetResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteCleanRoomAssetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -41,17 +52,27 @@ public CleanRoomAsset get(GetCleanRoomAssetRequest request) { String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", request.getCleanRoomName(), request.getAssetType(), request.getAssetFullName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CleanRoomAsset.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CleanRoomAsset.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListCleanRoomAssetsResponse list(ListCleanRoomAssetsRequest request) { String path = String.format("/api/2.0/clean-rooms/%s/assets", request.getCleanRoomName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCleanRoomAssetsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListCleanRoomAssetsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -60,9 +81,14 @@ public CleanRoomAsset update(UpdateCleanRoomAssetRequest request) { String.format( "/api/2.0/clean-rooms/%s/assets/%s/%s", request.getCleanRoomName(), request.getAssetType(), request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request.getAsset(), CleanRoomAsset.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getAsset())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CleanRoomAsset.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java index 5c703d7e8..e406e6a03 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomTaskRunsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.cleanrooms; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CleanRoomTaskRuns */ @Generated @@ -18,8 +19,13 @@ public CleanRoomTaskRunsImpl(ApiClient apiClient) { @Override public ListCleanRoomNotebookTaskRunsResponse list(ListCleanRoomNotebookTaskRunsRequest request) { String path = String.format("/api/2.0/clean-rooms/%s/runs", request.getCleanRoomName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCleanRoomNotebookTaskRunsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListCleanRoomNotebookTaskRunsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java index 60cc9e296..b0bacf5d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.cleanrooms; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CleanRooms */ @Generated @@ -18,10 +19,15 @@ public CleanRoomsImpl(ApiClient apiClient) { @Override public CleanRoom create(CreateCleanRoomRequest request) { String path = "/api/2.0/clean-rooms"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getCleanRoom(), CleanRoom.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getCleanRoom())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CleanRoom.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -29,43 +35,67 @@ public CreateCleanRoomOutputCatalogResponse createOutputCatalog( CreateCleanRoomOutputCatalogRequest request) { String path = String.format("/api/2.0/clean-rooms/%s/output-catalogs", request.getCleanRoomName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST( - path, request.getOutputCatalog(), CreateCleanRoomOutputCatalogResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getOutputCatalog())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateCleanRoomOutputCatalogResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteCleanRoomRequest request) { String path = String.format("/api/2.0/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CleanRoom get(GetCleanRoomRequest request) { String path = String.format("/api/2.0/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CleanRoom.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CleanRoom.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListCleanRoomsResponse list(ListCleanRoomsRequest request) { String path = "/api/2.0/clean-rooms"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListCleanRoomsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListCleanRoomsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CleanRoom update(UpdateCleanRoomRequest request) { String path = String.format("/api/2.0/clean-rooms/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CleanRoom.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CleanRoom.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java index 4d6361229..ec3d88c43 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ClusterPolicies */ @Generated @@ -18,36 +19,56 @@ public ClusterPoliciesImpl(ApiClient apiClient) { @Override public CreatePolicyResponse create(CreatePolicy request) { String path = "/api/2.0/policies/clusters/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreatePolicyResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreatePolicyResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeletePolicy request) { String path = "/api/2.0/policies/clusters/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeletePolicyResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeletePolicyResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void edit(EditPolicy request) { String path = "/api/2.0/policies/clusters/edit"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, EditPolicyResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, EditPolicyResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Policy get(GetClusterPolicyRequest request) { String path = "/api/2.0/policies/clusters/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Policy.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Policy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -57,45 +78,70 @@ public GetClusterPolicyPermissionLevelsResponse getPermissionLevels( String.format( "/api/2.0/permissions/cluster-policies/%s/permissionLevels", request.getClusterPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetClusterPolicyPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetClusterPolicyPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterPolicyPermissions getPermissions(GetClusterPolicyPermissionsRequest request) { String path = String.format("/api/2.0/permissions/cluster-policies/%s", request.getClusterPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ClusterPolicyPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ClusterPolicyPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListPoliciesResponse list(ListClusterPoliciesRequest request) { String path = "/api/2.0/policies/clusters/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListPoliciesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListPoliciesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterPolicyPermissions setPermissions(ClusterPolicyPermissionsRequest request) { String path = String.format("/api/2.0/permissions/cluster-policies/%s", request.getClusterPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ClusterPolicyPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ClusterPolicyPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterPolicyPermissions updatePermissions(ClusterPolicyPermissionsRequest request) { String path = String.format("/api/2.0/permissions/cluster-policies/%s", request.getClusterPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ClusterPolicyPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ClusterPolicyPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java index 600996ab2..e847a4571 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Clusters */ @Generated @@ -18,54 +19,84 @@ public ClustersImpl(ApiClient apiClient) { @Override public void changeOwner(ChangeClusterOwner request) { String path = "/api/2.1/clusters/change-owner"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, ChangeClusterOwnerResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ChangeClusterOwnerResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateClusterResponse create(CreateCluster request) { String path = "/api/2.1/clusters/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteCluster request) { String path = "/api/2.1/clusters/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void edit(EditCluster request) { String path = "/api/2.1/clusters/edit"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, EditClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, EditClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetEventsResponse events(GetEvents request) { String path = "/api/2.1/clusters/events"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, GetEventsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GetEventsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterDetails get(GetClusterRequest request) { String path = "/api/2.1/clusters/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ClusterDetails.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ClusterDetails.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -73,129 +104,201 @@ public GetClusterPermissionLevelsResponse getPermissionLevels( GetClusterPermissionLevelsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s/permissionLevels", request.getClusterId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetClusterPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetClusterPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterPermissions getPermissions(GetClusterPermissionsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s", request.getClusterId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ClusterPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ClusterPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListClustersResponse list(ListClustersRequest request) { String path = "/api/2.1/clusters/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListClustersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListClustersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListNodeTypesResponse listNodeTypes() { String path = "/api/2.1/clusters/list-node-types"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListNodeTypesResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListNodeTypesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAvailableZonesResponse listZones() { String path = "/api/2.1/clusters/list-zones"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListAvailableZonesResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAvailableZonesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void permanentDelete(PermanentDeleteCluster request) { String path = "/api/2.1/clusters/permanent-delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, PermanentDeleteClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PermanentDeleteClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void pin(PinCluster request) { String path = "/api/2.1/clusters/pin"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, PinClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PinClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void resize(ResizeCluster request) { String path = "/api/2.1/clusters/resize"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, ResizeClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ResizeClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void restart(RestartCluster request) { String path = "/api/2.1/clusters/restart"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, RestartClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, RestartClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterPermissions setPermissions(ClusterPermissionsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s", request.getClusterId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ClusterPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ClusterPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetSparkVersionsResponse sparkVersions() { String path = "/api/2.1/clusters/spark-versions"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetSparkVersionsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetSparkVersionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void start(StartCluster request) { String path = "/api/2.1/clusters/start"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, StartClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, StartClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void unpin(UnpinCluster request) { String path = "/api/2.1/clusters/unpin"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, UnpinClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UnpinClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateCluster request) { String path = "/api/2.1/clusters/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, UpdateClusterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateClusterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterPermissions updatePermissions(ClusterPermissionsRequest request) { String path = String.format("/api/2.0/permissions/clusters/%s", request.getClusterId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ClusterPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ClusterPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java index 69c6503e3..3cd62c5cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExecutionImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CommandExecution */ @Generated @@ -18,52 +19,82 @@ public CommandExecutionImpl(ApiClient apiClient) { @Override public void cancel(CancelCommand request) { String path = "/api/1.2/commands/cancel"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, CancelResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CancelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CommandStatusResponse commandStatus(CommandStatusRequest request) { String path = "/api/1.2/commands/status"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CommandStatusResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CommandStatusResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ContextStatusResponse contextStatus(ContextStatusRequest request) { String path = "/api/1.2/contexts/status"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ContextStatusResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ContextStatusResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Created create(CreateContext request) { String path = "/api/1.2/contexts/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Created.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Created.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void destroy(DestroyContext request) { String path = "/api/1.2/contexts/destroy"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DestroyResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DestroyResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Created execute(Command request) { String path = "/api/1.2/commands/execute"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Created.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Created.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java index a791cec65..283ea3002 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of GlobalInitScripts */ @Generated @@ -18,40 +19,64 @@ public GlobalInitScriptsImpl(ApiClient apiClient) { @Override public CreateResponse create(GlobalInitScriptCreateRequest request) { String path = "/api/2.0/global-init-scripts"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteGlobalInitScriptRequest request) { String path = String.format("/api/2.0/global-init-scripts/%s", request.getScriptId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest request) { String path = String.format("/api/2.0/global-init-scripts/%s", request.getScriptId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GlobalInitScriptDetailsWithContent.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GlobalInitScriptDetailsWithContent.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListGlobalInitScriptsResponse list() { String path = "/api/2.0/global-init-scripts"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListGlobalInitScriptsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListGlobalInitScriptsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(GlobalInitScriptUpdateRequest request) { String path = String.format("/api/2.0/global-init-scripts/%s", request.getScriptId()); - Map headers = new HashMap<>(); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java index 88a13977f..b80dd7710 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of InstancePools */ @Generated @@ -18,36 +19,56 @@ public InstancePoolsImpl(ApiClient apiClient) { @Override public CreateInstancePoolResponse create(CreateInstancePool request) { String path = "/api/2.0/instance-pools/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateInstancePoolResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateInstancePoolResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteInstancePool request) { String path = "/api/2.0/instance-pools/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteInstancePoolResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteInstancePoolResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void edit(EditInstancePool request) { String path = "/api/2.0/instance-pools/edit"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, EditInstancePoolResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, EditInstancePoolResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetInstancePool get(GetInstancePoolRequest request) { String path = "/api/2.0/instance-pools/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetInstancePool.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetInstancePool.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -56,45 +77,69 @@ public GetInstancePoolPermissionLevelsResponse getPermissionLevels( String path = String.format( "/api/2.0/permissions/instance-pools/%s/permissionLevels", request.getInstancePoolId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetInstancePoolPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetInstancePoolPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public InstancePoolPermissions getPermissions(GetInstancePoolPermissionsRequest request) { String path = String.format("/api/2.0/permissions/instance-pools/%s", request.getInstancePoolId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, InstancePoolPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, InstancePoolPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListInstancePools list() { String path = "/api/2.0/instance-pools/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListInstancePools.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListInstancePools.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public InstancePoolPermissions setPermissions(InstancePoolPermissionsRequest request) { String path = String.format("/api/2.0/permissions/instance-pools/%s", request.getInstancePoolId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, InstancePoolPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, InstancePoolPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public InstancePoolPermissions updatePermissions(InstancePoolPermissionsRequest request) { String path = String.format("/api/2.0/permissions/instance-pools/%s", request.getInstancePoolId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, InstancePoolPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, InstancePoolPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java index 72801e081..de759151d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of InstanceProfiles */ @Generated @@ -18,35 +19,54 @@ public InstanceProfilesImpl(ApiClient apiClient) { @Override public void add(AddInstanceProfile request) { String path = "/api/2.0/instance-profiles/add"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, AddResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, AddResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void edit(InstanceProfile request) { String path = "/api/2.0/instance-profiles/edit"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, EditResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, EditResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListInstanceProfilesResponse list() { String path = "/api/2.0/instance-profiles/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListInstanceProfilesResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListInstanceProfilesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void remove(RemoveInstanceProfile request) { String path = "/api/2.0/instance-profiles/remove"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, RemoveResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, RemoveResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java index 35da6cc08..2b8b647f2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Libraries */ @Generated @@ -18,34 +19,53 @@ public LibrariesImpl(ApiClient apiClient) { @Override public ListAllClusterLibraryStatusesResponse allClusterStatuses() { String path = "/api/2.0/libraries/all-cluster-statuses"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListAllClusterLibraryStatusesResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAllClusterLibraryStatusesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ClusterLibraryStatuses clusterStatus(ClusterStatus request) { String path = "/api/2.0/libraries/cluster-status"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ClusterLibraryStatuses.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ClusterLibraryStatuses.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void install(InstallLibraries request) { String path = "/api/2.0/libraries/install"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, InstallLibrariesResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, InstallLibrariesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void uninstall(UninstallLibraries request) { String path = "/api/2.0/libraries/uninstall"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, UninstallLibrariesResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UninstallLibrariesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java index 4db81c10d..3fe7f6593 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyComplianceForClustersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of PolicyComplianceForClusters */ @Generated @@ -19,25 +20,40 @@ public PolicyComplianceForClustersImpl(ApiClient apiClient) { public EnforceClusterComplianceResponse enforceCompliance( EnforceClusterComplianceRequest request) { String path = "/api/2.0/policies/clusters/enforce-compliance"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, EnforceClusterComplianceResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EnforceClusterComplianceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetClusterComplianceResponse getCompliance(GetClusterComplianceRequest request) { String path = "/api/2.0/policies/clusters/get-compliance"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetClusterComplianceResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetClusterComplianceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListClusterCompliancesResponse listCompliance(ListClusterCompliancesRequest request) { String path = "/api/2.0/policies/clusters/list-compliance"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListClusterCompliancesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListClusterCompliancesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java index 9902a2134..90a79ba2c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamiliesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.compute; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of PolicyFamilies */ @Generated @@ -18,16 +19,26 @@ public PolicyFamiliesImpl(ApiClient apiClient) { @Override public PolicyFamily get(GetPolicyFamilyRequest request) { String path = String.format("/api/2.0/policy-families/%s", request.getPolicyFamilyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PolicyFamily.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PolicyFamily.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListPolicyFamiliesResponse list(ListPolicyFamiliesRequest request) { String path = "/api/2.0/policy-families"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListPolicyFamiliesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListPolicyFamiliesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index fc0c9236f..ce93fcdb5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Genie */ @Generated @@ -21,10 +22,15 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) String.format( "/api/2.0/genie/spaces/%s/conversations/%s/messages", request.getSpaceId(), request.getConversationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, GenieMessage.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GenieMessage.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -34,9 +40,14 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( String.format( "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/execute-query", request.getSpaceId(), request.getConversationId(), request.getMessageId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.POST(path, null, GenieGetMessageQueryResultResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -45,9 +56,14 @@ public GenieMessage getMessage(GenieGetConversationMessageRequest request) { String.format( "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s", request.getSpaceId(), request.getConversationId(), request.getMessageId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GenieMessage.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieMessage.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -57,9 +73,14 @@ public GenieGetMessageQueryResultResponse getMessageQueryResult( String.format( "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/query-result", request.getSpaceId(), request.getConversationId(), request.getMessageId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GenieGetMessageQueryResultResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGetMessageQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -67,9 +88,14 @@ public GenieStartConversationResponse startConversation( GenieStartConversationMessageRequest request) { String path = String.format("/api/2.0/genie/spaces/%s/start-conversation", request.getSpaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, GenieStartConversationResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GenieStartConversationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java index 3d9689b2d..ac931a0e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.dashboards; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Lakeview */ @Generated @@ -18,20 +19,30 @@ public LakeviewImpl(ApiClient apiClient) { @Override public Dashboard create(CreateDashboardRequest request) { String path = "/api/2.0/lakeview/dashboards"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getDashboard(), Dashboard.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getDashboard())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Schedule createSchedule(CreateScheduleRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s/schedules", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getSchedule(), Schedule.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getSchedule())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Schedule.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -40,10 +51,15 @@ public Subscription createSubscription(CreateSubscriptionRequest request) { String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s/subscriptions", request.getDashboardId(), request.getScheduleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getSubscription(), Subscription.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getSubscription())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Subscription.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -52,9 +68,14 @@ public void deleteSchedule(DeleteScheduleRequest request) { String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s", request.getDashboardId(), request.getScheduleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteScheduleResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteScheduleResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -63,26 +84,41 @@ public void deleteSubscription(DeleteSubscriptionRequest request) { String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s/subscriptions/%s", request.getDashboardId(), request.getScheduleId(), request.getSubscriptionId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteSubscriptionResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteSubscriptionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Dashboard get(GetDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Dashboard.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PublishedDashboard getPublished(GetPublishedDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PublishedDashboard.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PublishedDashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -91,9 +127,14 @@ public Schedule getSchedule(GetScheduleRequest request) { String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s", request.getDashboardId(), request.getScheduleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Schedule.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Schedule.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -102,26 +143,41 @@ public Subscription getSubscription(GetSubscriptionRequest request) { String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s/subscriptions/%s", request.getDashboardId(), request.getScheduleId(), request.getSubscriptionId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Subscription.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Subscription.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListDashboardsResponse list(ListDashboardsRequest request) { String path = "/api/2.0/lakeview/dashboards"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListDashboardsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDashboardsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListSchedulesResponse listSchedules(ListSchedulesRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s/schedules", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListSchedulesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListSchedulesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -130,54 +186,84 @@ public ListSubscriptionsResponse listSubscriptions(ListSubscriptionsRequest requ String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s/subscriptions", request.getDashboardId(), request.getScheduleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListSubscriptionsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListSubscriptionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Dashboard migrate(MigrateDashboardRequest request) { String path = "/api/2.0/lakeview/dashboards/migrate"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Dashboard.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PublishedDashboard publish(PublishRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, PublishedDashboard.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PublishedDashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void trash(TrashDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, TrashDashboardResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, TrashDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void unpublish(UnpublishDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, UnpublishDashboardResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, UnpublishDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Dashboard update(UpdateDashboardRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request.getDashboard(), Dashboard.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getDashboard())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -186,9 +272,14 @@ public Schedule updateSchedule(UpdateScheduleRequest request) { String.format( "/api/2.0/lakeview/dashboards/%s/schedules/%s", request.getDashboardId(), request.getScheduleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request.getSchedule(), Schedule.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request.getSchedule())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Schedule.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java index 5db9fa8fa..1887e9e8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Dbfs */ @Generated @@ -18,87 +19,137 @@ public DbfsImpl(ApiClient apiClient) { @Override public void addBlock(AddBlock request) { String path = "/api/2.0/dbfs/add-block"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, AddBlockResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, AddBlockResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void close(Close request) { String path = "/api/2.0/dbfs/close"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, CloseResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CloseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateResponse create(Create request) { String path = "/api/2.0/dbfs/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(Delete request) { String path = "/api/2.0/dbfs/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public FileInfo getStatus(GetStatusRequest request) { String path = "/api/2.0/dbfs/get-status"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, FileInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, FileInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListStatusResponse list(ListDbfsRequest request) { String path = "/api/2.0/dbfs/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListStatusResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListStatusResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void mkdirs(MkDirs request) { String path = "/api/2.0/dbfs/mkdirs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, MkDirsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, MkDirsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void move(Move request) { String path = "/api/2.0/dbfs/move"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, MoveResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, MoveResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void put(Put request) { String path = "/api/2.0/dbfs/put"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, PutResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PutResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ReadResponse read(ReadDbfsRequest request) { String path = "/api/2.0/dbfs/read"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ReadResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ReadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java index 95adf6bef..508253f77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; import com.databricks.sdk.core.http.Encoding; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Files */ @Generated @@ -22,8 +23,13 @@ public void createDirectory(CreateDirectoryRequest request) { String.format( "/api/2.0/fs/directories%s", Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); - Map headers = new HashMap<>(); - apiClient.PUT(path, null, CreateDirectoryResponse.class, headers); + try { + Request req = new Request("PUT", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, CreateDirectoryResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,8 +37,13 @@ public void delete(DeleteFileRequest request) { String path = String.format( "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -41,8 +52,13 @@ public void deleteDirectory(DeleteDirectoryRequest request) { String.format( "/api/2.0/fs/directories%s", Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteDirectoryResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteDirectoryResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -50,9 +66,14 @@ public DownloadResponse download(DownloadRequest request) { String path = String.format( "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); - Map headers = new HashMap<>(); - headers.put("Accept", "application/octet-stream"); - return apiClient.GET(path, request, DownloadResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/octet-stream"); + return apiClient.execute(req, DownloadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -61,8 +82,13 @@ public void getDirectoryMetadata(GetDirectoryMetadataRequest request) { String.format( "/api/2.0/fs/directories%s", Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); - Map headers = new HashMap<>(); - apiClient.HEAD(path, request, GetDirectoryMetadataResponse.class, headers); + try { + Request req = new Request("HEAD", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, GetDirectoryMetadataResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -70,8 +96,13 @@ public GetMetadataResponse getMetadata(GetMetadataRequest request) { String path = String.format( "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); - Map headers = new HashMap<>(); - return apiClient.HEAD(path, request, GetMetadataResponse.class, headers); + try { + Request req = new Request("HEAD", path); + ApiClient.setQuery(req, request); + return apiClient.execute(req, GetMetadataResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -80,9 +111,14 @@ public ListDirectoryResponse listDirectoryContents(ListDirectoryContentsRequest String.format( "/api/2.0/fs/directories%s", Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListDirectoryResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListDirectoryResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -90,8 +126,13 @@ public void upload(UploadRequest request) { String path = String.format( "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); - Map headers = new HashMap<>(); - headers.put("Content-Type", "application/octet-stream"); - apiClient.PUT(path, request.getContents(), UploadResponse.class, headers); + try { + Request req = new Request("PUT", path, request.getContents()); + ApiClient.setQuery(req, request); + req.withHeader("Content-Type", "application/octet-stream"); + apiClient.execute(req, UploadResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java index 43bc13198..c59b4c528 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountAccessControl */ @Generated @@ -22,9 +23,14 @@ public GetAssignableRolesForResourceResponse getAssignableRolesForResource( String.format( "/api/2.0/preview/accounts/%s/access-control/assignable-roles", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetAssignableRolesForResourceResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetAssignableRolesForResourceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,9 +39,14 @@ public RuleSetResponse getRuleSet(GetRuleSetRequest request) { String.format( "/api/2.0/preview/accounts/%s/access-control/rule-sets", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RuleSetResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RuleSetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) { String.format( "/api/2.0/preview/accounts/%s/access-control/rule-sets", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, RuleSetResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RuleSetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java index b3292da04..a0911c9c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountAccessControlProxyImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountAccessControlProxy */ @Generated @@ -19,25 +20,40 @@ public AccountAccessControlProxyImpl(ApiClient apiClient) { public GetAssignableRolesForResourceResponse getAssignableRolesForResource( GetAssignableRolesForResourceRequest request) { String path = "/api/2.0/preview/accounts/access-control/assignable-roles"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetAssignableRolesForResourceResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetAssignableRolesForResourceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RuleSetResponse getRuleSet(GetRuleSetRequest request) { String path = "/api/2.0/preview/accounts/access-control/rule-sets"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RuleSetResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RuleSetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RuleSetResponse updateRuleSet(UpdateRuleSetRequest request) { String path = "/api/2.0/preview/accounts/access-control/rule-sets"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, RuleSetResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RuleSetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java index 8ef9fae15..a429fe4f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountGroups */ @Generated @@ -19,10 +20,15 @@ public AccountGroupsImpl(ApiClient apiClient) { public Group create(Group request) { String path = String.format("/api/2.0/accounts/%s/scim/v2/Groups", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Group.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,8 +37,13 @@ public void delete(DeleteAccountGroupRequest request) { String.format( "/api/2.0/accounts/%s/scim/v2/Groups/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -41,18 +52,28 @@ public Group get(GetAccountGroupRequest request) { String.format( "/api/2.0/accounts/%s/scim/v2/Groups/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Group.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListGroupsResponse list(ListAccountGroupsRequest request) { String path = String.format("/api/2.0/accounts/%s/scim/v2/Groups", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListGroupsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListGroupsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -61,10 +82,15 @@ public void patch(PartialUpdate request) { String.format( "/api/2.0/accounts/%s/scim/v2/Groups/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -73,9 +99,14 @@ public void update(Group request) { String.format( "/api/2.0/accounts/%s/scim/v2/Groups/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java index de3e4ffb3..fc614cefe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountServicePrincipals */ @Generated @@ -20,10 +21,15 @@ public ServicePrincipal create(ServicePrincipal request) { String path = String.format( "/api/2.0/accounts/%s/scim/v2/ServicePrincipals", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ServicePrincipal.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,8 +38,13 @@ public void delete(DeleteAccountServicePrincipalRequest request) { String.format( "/api/2.0/accounts/%s/scim/v2/ServicePrincipals/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,9 +53,14 @@ public ServicePrincipal get(GetAccountServicePrincipalRequest request) { String.format( "/api/2.0/accounts/%s/scim/v2/ServicePrincipals/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ServicePrincipal.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -52,9 +68,14 @@ public ListServicePrincipalResponse list(ListAccountServicePrincipalsRequest req String path = String.format( "/api/2.0/accounts/%s/scim/v2/ServicePrincipals", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListServicePrincipalResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListServicePrincipalResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -63,10 +84,15 @@ public void patch(PartialUpdate request) { String.format( "/api/2.0/accounts/%s/scim/v2/ServicePrincipals/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -75,9 +101,14 @@ public void update(ServicePrincipal request) { String.format( "/api/2.0/accounts/%s/scim/v2/ServicePrincipals/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java index a8b1262db..a8e6fa307 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountUsers */ @Generated @@ -19,10 +20,15 @@ public AccountUsersImpl(ApiClient apiClient) { public User create(User request) { String path = String.format("/api/2.0/accounts/%s/scim/v2/Users", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, User.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,8 +37,13 @@ public void delete(DeleteAccountUserRequest request) { String.format( "/api/2.0/accounts/%s/scim/v2/Users/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -41,18 +52,28 @@ public User get(GetAccountUserRequest request) { String.format( "/api/2.0/accounts/%s/scim/v2/Users/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, User.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListUsersResponse list(ListAccountUsersRequest request) { String path = String.format("/api/2.0/accounts/%s/scim/v2/Users", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListUsersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListUsersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -61,10 +82,15 @@ public void patch(PartialUpdate request) { String.format( "/api/2.0/accounts/%s/scim/v2/Users/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -73,9 +99,14 @@ public void update(User request) { String.format( "/api/2.0/accounts/%s/scim/v2/Users/%s", apiClient.configuredAccountID(), request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserImpl.java index d33b0f8b1..37e2dc04c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/CurrentUserImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CurrentUser */ @Generated @@ -18,8 +19,12 @@ public CurrentUserImpl(ApiClient apiClient) { @Override public User me() { String path = "/api/2.0/preview/scim/v2/Me"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, User.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java index ac1d74f5b..e4e091671 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Groups */ @Generated @@ -18,50 +19,80 @@ public GroupsImpl(ApiClient apiClient) { @Override public Group create(Group request) { String path = "/api/2.0/preview/scim/v2/Groups"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Group.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteGroupRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Group get(GetGroupRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Group.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Group.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListGroupsResponse list(ListGroupsRequest request) { String path = "/api/2.0/preview/scim/v2/Groups"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListGroupsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListGroupsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void patch(PartialUpdate request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(Group request) { String path = String.format("/api/2.0/preview/scim/v2/Groups/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java index e3cca9622..ef88fff0e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of PermissionMigration */ @Generated @@ -18,9 +19,14 @@ public PermissionMigrationImpl(ApiClient apiClient) { @Override public MigratePermissionsResponse migratePermissions(MigratePermissionsRequest request) { String path = "/api/2.0/permissionmigration"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, MigratePermissionsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MigratePermissionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java index cd11548dd..af9d0c100 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Permissions */ @Generated @@ -21,9 +22,14 @@ public ObjectPermissions get(GetPermissionRequest request) { String.format( "/api/2.0/permissions/%s/%s", request.getRequestObjectType(), request.getRequestObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ObjectPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ObjectPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public GetPermissionLevelsResponse getPermissionLevels(GetPermissionLevelsReques String.format( "/api/2.0/permissions/%s/%s/permissionLevels", request.getRequestObjectType(), request.getRequestObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,10 +54,15 @@ public ObjectPermissions set(PermissionsRequest request) { String.format( "/api/2.0/permissions/%s/%s", request.getRequestObjectType(), request.getRequestObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ObjectPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ObjectPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -55,9 +71,14 @@ public ObjectPermissions update(PermissionsRequest request) { String.format( "/api/2.0/permissions/%s/%s", request.getRequestObjectType(), request.getRequestObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ObjectPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ObjectPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java index b19f3705d..516abd5d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ServicePrincipals */ @Generated @@ -18,50 +19,80 @@ public ServicePrincipalsImpl(ApiClient apiClient) { @Override public ServicePrincipal create(ServicePrincipal request) { String path = "/api/2.0/preview/scim/v2/ServicePrincipals"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ServicePrincipal.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteServicePrincipalRequest request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServicePrincipal get(GetServicePrincipalRequest request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ServicePrincipal.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServicePrincipal.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListServicePrincipalResponse list(ListServicePrincipalsRequest request) { String path = "/api/2.0/preview/scim/v2/ServicePrincipals"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListServicePrincipalResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListServicePrincipalResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void patch(PartialUpdate request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(ServicePrincipal request) { String path = String.format("/api/2.0/preview/scim/v2/ServicePrincipals/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java index f63afd101..309d6bd5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Users */ @Generated @@ -18,84 +19,132 @@ public UsersImpl(ApiClient apiClient) { @Override public User create(User request) { String path = "/api/2.0/preview/scim/v2/Users"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, User.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteUserRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public User get(GetUserRequest request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, User.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, User.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetPasswordPermissionLevelsResponse getPermissionLevels() { String path = "/api/2.0/permissions/authorization/passwords/permissionLevels"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetPasswordPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPasswordPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PasswordPermissions getPermissions() { String path = "/api/2.0/permissions/authorization/passwords"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, PasswordPermissions.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PasswordPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListUsersResponse list(ListUsersRequest request) { String path = "/api/2.0/preview/scim/v2/Users"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListUsersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListUsersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void patch(PartialUpdate request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, PatchResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PasswordPermissions setPermissions(PasswordPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/passwords"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, PasswordPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PasswordPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(User request) { String path = String.format("/api/2.0/preview/scim/v2/Users/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PasswordPermissions updatePermissions(PasswordPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/passwords"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, PasswordPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PasswordPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java index 556ac8bdc..d163fe44c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.iam; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of WorkspaceAssignment */ @Generated @@ -21,9 +22,14 @@ public void delete(DeleteWorkspaceAssignmentRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/permissionassignments/principals/%s", apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteWorkspacePermissionAssignmentResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteWorkspacePermissionAssignmentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public WorkspacePermissions get(GetWorkspaceAssignmentRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/permissionassignments/permissions", apiClient.configuredAccountID(), request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, WorkspacePermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WorkspacePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,9 +54,14 @@ public PermissionAssignments list(ListWorkspaceAssignmentRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/permissionassignments", apiClient.configuredAccountID(), request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PermissionAssignments.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PermissionAssignments.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -54,9 +70,14 @@ public PermissionAssignment update(UpdateWorkspaceAssignments request) { String.format( "/api/2.0/accounts/%s/workspaces/%s/permissionassignments/principals/%s", apiClient.configuredAccountID(), request.getWorkspaceId(), request.getPrincipalId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, PermissionAssignment.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PermissionAssignment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java index 145ece228..48347babe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Jobs */ @Generated @@ -18,173 +19,273 @@ public JobsImpl(ApiClient apiClient) { @Override public void cancelAllRuns(CancelAllRuns request) { String path = "/api/2.1/jobs/runs/cancel-all"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, CancelAllRunsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CancelAllRunsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void cancelRun(CancelRun request) { String path = "/api/2.1/jobs/runs/cancel"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, CancelRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CancelRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateResponse create(CreateJob request) { String path = "/api/2.1/jobs/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteJob request) { String path = "/api/2.1/jobs/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteRun(DeleteRun request) { String path = "/api/2.1/jobs/runs/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExportRunOutput exportRun(ExportRunRequest request) { String path = "/api/2.1/jobs/runs/export"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ExportRunOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ExportRunOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Job get(GetJobRequest request) { String path = "/api/2.1/jobs/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Job.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Job.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetJobPermissionLevelsResponse getPermissionLevels(GetJobPermissionLevelsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s/permissionLevels", request.getJobId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetJobPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetJobPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public JobPermissions getPermissions(GetJobPermissionsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s", request.getJobId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, JobPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, JobPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Run getRun(GetRunRequest request) { String path = "/api/2.1/jobs/runs/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Run.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Run.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RunOutput getRunOutput(GetRunOutputRequest request) { String path = "/api/2.1/jobs/runs/get-output"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RunOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RunOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListJobsResponse list(ListJobsRequest request) { String path = "/api/2.1/jobs/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListJobsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListJobsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListRunsResponse listRuns(ListRunsRequest request) { String path = "/api/2.1/jobs/runs/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListRunsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListRunsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RepairRunResponse repairRun(RepairRun request) { String path = "/api/2.1/jobs/runs/repair"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RepairRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RepairRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void reset(ResetJob request) { String path = "/api/2.1/jobs/reset"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, ResetResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ResetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RunNowResponse runNow(RunNow request) { String path = "/api/2.1/jobs/run-now"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RunNowResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RunNowResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public JobPermissions setPermissions(JobPermissionsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s", request.getJobId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, JobPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, JobPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SubmitRunResponse submit(SubmitRun request) { String path = "/api/2.1/jobs/runs/submit"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, SubmitRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SubmitRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateJob request) { String path = "/api/2.1/jobs/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public JobPermissions updatePermissions(JobPermissionsRequest request) { String path = String.format("/api/2.0/permissions/jobs/%s", request.getJobId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, JobPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, JobPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java index 05d498e66..6319d399d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PolicyComplianceForJobsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.jobs; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of PolicyComplianceForJobs */ @Generated @@ -18,25 +19,40 @@ public PolicyComplianceForJobsImpl(ApiClient apiClient) { @Override public EnforcePolicyComplianceResponse enforceCompliance(EnforcePolicyComplianceRequest request) { String path = "/api/2.0/policies/jobs/enforce-compliance"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, EnforcePolicyComplianceResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EnforcePolicyComplianceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetPolicyComplianceResponse getCompliance(GetPolicyComplianceRequest request) { String path = "/api/2.0/policies/jobs/get-compliance"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPolicyComplianceResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPolicyComplianceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListJobComplianceForPolicyResponse listCompliance(ListJobComplianceRequest request) { String path = "/api/2.0/policies/jobs/list-compliance"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListJobComplianceForPolicyResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListJobComplianceForPolicyResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java index 98c870847..2a781b223 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ConsumerFulfillments */ @Generated @@ -19,9 +20,14 @@ public ConsumerFulfillmentsImpl(ApiClient apiClient) { public GetListingContentMetadataResponse get(GetListingContentMetadataRequest request) { String path = String.format("/api/2.1/marketplace-consumer/listings/%s/content", request.getListingId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetListingContentMetadataResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetListingContentMetadataResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -29,8 +35,13 @@ public ListFulfillmentsResponse list(ListFulfillmentsRequest request) { String path = String.format( "/api/2.1/marketplace-consumer/listings/%s/fulfillments", request.getListingId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListFulfillmentsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListFulfillmentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java index 6a8f91809..c817eb350 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ConsumerInstallations */ @Generated @@ -20,10 +21,15 @@ public Installation create(CreateInstallationRequest request) { String path = String.format( "/api/2.1/marketplace-consumer/listings/%s/installations", request.getListingId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Installation.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Installation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,17 +38,27 @@ public void delete(DeleteInstallationRequest request) { String.format( "/api/2.1/marketplace-consumer/listings/%s/installations/%s", request.getListingId(), request.getInstallationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteInstallationResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteInstallationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAllInstallationsResponse list(ListAllInstallationsRequest request) { String path = "/api/2.1/marketplace-consumer/installations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAllInstallationsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAllInstallationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -50,9 +66,14 @@ public ListInstallationsResponse listListingInstallations(ListInstallationsReque String path = String.format( "/api/2.1/marketplace-consumer/listings/%s/installations", request.getListingId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListInstallationsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListInstallationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -61,9 +82,14 @@ public UpdateInstallationResponse update(UpdateInstallationRequest request) { String.format( "/api/2.1/marketplace-consumer/listings/%s/installations/%s", request.getListingId(), request.getInstallationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateInstallationResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateInstallationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java index 52b059699..c1177e723 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ConsumerListings */ @Generated @@ -18,32 +19,52 @@ public ConsumerListingsImpl(ApiClient apiClient) { @Override public BatchGetListingsResponse batchGet(BatchGetListingsRequest request) { String path = "/api/2.1/marketplace-consumer/listings:batchGet"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, BatchGetListingsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, BatchGetListingsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetListingResponse get(GetListingRequest request) { String path = String.format("/api/2.1/marketplace-consumer/listings/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetListingResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListListingsResponse list(ListListingsRequest request) { String path = "/api/2.1/marketplace-consumer/listings"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListListingsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListListingsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SearchListingsResponse search(SearchListingsRequest request) { String path = "/api/2.1/marketplace-consumer/search-listings"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, SearchListingsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, SearchListingsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java index 6f35542a7..be0d25da4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ConsumerPersonalizationRequests */ @Generated @@ -21,10 +22,15 @@ public CreatePersonalizationRequestResponse create(CreatePersonalizationRequest String.format( "/api/2.1/marketplace-consumer/listings/%s/personalization-requests", request.getListingId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreatePersonalizationRequestResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreatePersonalizationRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,17 +39,27 @@ public GetPersonalizationRequestResponse get(GetPersonalizationRequestRequest re String.format( "/api/2.1/marketplace-consumer/listings/%s/personalization-requests", request.getListingId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPersonalizationRequestResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPersonalizationRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAllPersonalizationRequestsResponse list( ListAllPersonalizationRequestsRequest request) { String path = "/api/2.1/marketplace-consumer/personalization-requests"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAllPersonalizationRequestsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAllPersonalizationRequestsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java index 33e933fa7..f767a09f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ConsumerProviders */ @Generated @@ -18,24 +19,39 @@ public ConsumerProvidersImpl(ApiClient apiClient) { @Override public BatchGetProvidersResponse batchGet(BatchGetProvidersRequest request) { String path = "/api/2.1/marketplace-consumer/providers:batchGet"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, BatchGetProvidersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, BatchGetProvidersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetProviderResponse get(GetProviderRequest request) { String path = String.format("/api/2.1/marketplace-consumer/providers/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetProviderResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetProviderResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListProvidersResponse list(ListProvidersRequest request) { String path = "/api/2.1/marketplace-consumer/providers"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListProvidersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProvidersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java index 19ca56ec5..6baa7bce6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderExchangeFilters */ @Generated @@ -18,34 +19,54 @@ public ProviderExchangeFiltersImpl(ApiClient apiClient) { @Override public CreateExchangeFilterResponse create(CreateExchangeFilterRequest request) { String path = "/api/2.0/marketplace-exchange/filters"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateExchangeFilterResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateExchangeFilterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteExchangeFilterRequest request) { String path = String.format("/api/2.0/marketplace-exchange/filters/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteExchangeFilterResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteExchangeFilterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListExchangeFiltersResponse list(ListExchangeFiltersRequest request) { String path = "/api/2.0/marketplace-exchange/filters"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListExchangeFiltersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExchangeFiltersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpdateExchangeFilterResponse update(UpdateExchangeFilterRequest request) { String path = String.format("/api/2.0/marketplace-exchange/filters/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateExchangeFilterResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateExchangeFilterResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java index 2c47a19e8..5ac2520b3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderExchanges */ @Generated @@ -18,78 +19,123 @@ public ProviderExchangesImpl(ApiClient apiClient) { @Override public AddExchangeForListingResponse addListingToExchange(AddExchangeForListingRequest request) { String path = "/api/2.0/marketplace-exchange/exchanges-for-listing"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, AddExchangeForListingResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AddExchangeForListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateExchangeResponse create(CreateExchangeRequest request) { String path = "/api/2.0/marketplace-exchange/exchanges"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateExchangeResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateExchangeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteExchangeRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteExchangeResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteExchangeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteListingFromExchange(RemoveExchangeForListingRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges-for-listing/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, RemoveExchangeForListingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, RemoveExchangeForListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetExchangeResponse get(GetExchangeRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetExchangeResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetExchangeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListExchangesResponse list(ListExchangesRequest request) { String path = "/api/2.0/marketplace-exchange/exchanges"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListExchangesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExchangesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListExchangesForListingResponse listExchangesForListing( ListExchangesForListingRequest request) { String path = "/api/2.0/marketplace-exchange/exchanges-for-listing"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListExchangesForListingResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExchangesForListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListListingsForExchangeResponse listListingsForExchange( ListListingsForExchangeRequest request) { String path = "/api/2.0/marketplace-exchange/listings-for-exchange"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListListingsForExchangeResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListListingsForExchangeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpdateExchangeResponse update(UpdateExchangeRequest request) { String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateExchangeResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateExchangeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java index df582e455..e8fe25bac 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderFiles */ @Generated @@ -18,33 +19,53 @@ public ProviderFilesImpl(ApiClient apiClient) { @Override public CreateFileResponse create(CreateFileRequest request) { String path = "/api/2.0/marketplace-provider/files"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateFileResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateFileResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteFileRequest request) { String path = String.format("/api/2.0/marketplace-provider/files/%s", request.getFileId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteFileResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteFileResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetFileResponse get(GetFileRequest request) { String path = String.format("/api/2.0/marketplace-provider/files/%s", request.getFileId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetFileResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetFileResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListFilesResponse list(ListFilesRequest request) { String path = "/api/2.0/marketplace-provider/files"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListFilesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListFilesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java index ba119793a..ed54330ad 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderListings */ @Generated @@ -18,42 +19,67 @@ public ProviderListingsImpl(ApiClient apiClient) { @Override public CreateListingResponse create(CreateListingRequest request) { String path = "/api/2.0/marketplace-provider/listing"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateListingResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteListingRequest request) { String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteListingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetListingResponse get(GetListingRequest request) { String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetListingResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetListingsResponse list(GetListingsRequest request) { String path = "/api/2.0/marketplace-provider/listings"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetListingsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetListingsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpdateListingResponse update(UpdateListingRequest request) { String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateListingResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateListingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java index ba791c43d..a268b3199 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderPersonalizationRequests */ @Generated @@ -19,9 +20,14 @@ public ProviderPersonalizationRequestsImpl(ApiClient apiClient) { public ListAllPersonalizationRequestsResponse list( ListAllPersonalizationRequestsRequest request) { String path = "/api/2.0/marketplace-provider/personalization-requests"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAllPersonalizationRequestsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAllPersonalizationRequestsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -30,9 +36,14 @@ public UpdatePersonalizationRequestResponse update(UpdatePersonalizationRequestR String.format( "/api/2.0/marketplace-provider/listings/%s/personalization-requests/%s/request-status", request.getListingId(), request.getRequestId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdatePersonalizationRequestResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdatePersonalizationRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java index c0f1473b7..08bb2d3a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderProviderAnalyticsDashboards */ @Generated @@ -19,25 +20,37 @@ public ProviderProviderAnalyticsDashboardsImpl(ApiClient apiClient) { @Override public ProviderAnalyticsDashboard create() { String path = "/api/2.0/marketplace-provider/analytics_dashboard"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.POST(path, ProviderAnalyticsDashboard.class, headers); + try { + Request req = new Request("POST", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ProviderAnalyticsDashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListProviderAnalyticsDashboardResponse get() { String path = "/api/2.0/marketplace-provider/analytics_dashboard"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListProviderAnalyticsDashboardResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProviderAnalyticsDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetLatestVersionProviderAnalyticsDashboardResponse getLatestVersion() { String path = "/api/2.0/marketplace-provider/analytics_dashboard/latest"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetLatestVersionProviderAnalyticsDashboardResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetLatestVersionProviderAnalyticsDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -45,9 +58,14 @@ public UpdateProviderAnalyticsDashboardResponse update( UpdateProviderAnalyticsDashboardRequest request) { String path = String.format("/api/2.0/marketplace-provider/analytics_dashboard/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateProviderAnalyticsDashboardResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateProviderAnalyticsDashboardResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java index d25b538bc..618decdf9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.marketplace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ProviderProviders */ @Generated @@ -18,42 +19,67 @@ public ProviderProvidersImpl(ApiClient apiClient) { @Override public CreateProviderResponse create(CreateProviderRequest request) { String path = "/api/2.0/marketplace-provider/provider"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateProviderResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateProviderResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteProviderRequest request) { String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteProviderResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteProviderResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetProviderResponse get(GetProviderRequest request) { String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetProviderResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetProviderResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListProvidersResponse list(ListProvidersRequest request) { String path = "/api/2.0/marketplace-provider/providers"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListProvidersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProvidersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpdateProviderResponse update(UpdateProviderRequest request) { String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, UpdateProviderResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateProviderResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java index 64bb0f4e2..d6c119d10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Experiments */ @Generated @@ -18,79 +19,124 @@ public ExperimentsImpl(ApiClient apiClient) { @Override public CreateExperimentResponse createExperiment(CreateExperiment request) { String path = "/api/2.0/mlflow/experiments/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateExperimentResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateRunResponse createRun(CreateRun request) { String path = "/api/2.0/mlflow/runs/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteExperiment(DeleteExperiment request) { String path = "/api/2.0/mlflow/experiments/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteExperimentResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteRun(DeleteRun request) { String path = "/api/2.0/mlflow/runs/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DeleteRunsResponse deleteRuns(DeleteRuns request) { String path = "/api/2.0/mlflow/databricks/runs/delete-runs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, DeleteRunsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DeleteRunsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteTag(DeleteTag request) { String path = "/api/2.0/mlflow/runs/delete-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteTagResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetExperimentResponse getByName(GetByNameRequest request) { String path = "/api/2.0/mlflow/experiments/get-by-name"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetExperimentResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetExperimentResponse getExperiment(GetExperimentRequest request) { String path = "/api/2.0/mlflow/experiments/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetExperimentResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetMetricHistoryResponse getHistory(GetHistoryRequest request) { String path = "/api/2.0/mlflow/metrics/get-history"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetMetricHistoryResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetMetricHistoryResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -99,184 +145,289 @@ public GetExperimentPermissionLevelsResponse getPermissionLevels( String path = String.format( "/api/2.0/permissions/experiments/%s/permissionLevels", request.getExperimentId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetExperimentPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetExperimentPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExperimentPermissions getPermissions(GetExperimentPermissionsRequest request) { String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ExperimentPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ExperimentPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetRunResponse getRun(GetRunRequest request) { String path = "/api/2.0/mlflow/runs/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetRunResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListArtifactsResponse listArtifacts(ListArtifactsRequest request) { String path = "/api/2.0/mlflow/artifacts/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListArtifactsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListArtifactsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListExperimentsResponse listExperiments(ListExperimentsRequest request) { String path = "/api/2.0/mlflow/experiments/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListExperimentsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListExperimentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void logBatch(LogBatch request) { String path = "/api/2.0/mlflow/runs/log-batch"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, LogBatchResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogBatchResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void logInputs(LogInputs request) { String path = "/api/2.0/mlflow/runs/log-inputs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, LogInputsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogInputsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void logMetric(LogMetric request) { String path = "/api/2.0/mlflow/runs/log-metric"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, LogMetricResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogMetricResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void logModel(LogModel request) { String path = "/api/2.0/mlflow/runs/log-model"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, LogModelResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void logParam(LogParam request) { String path = "/api/2.0/mlflow/runs/log-parameter"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, LogParamResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, LogParamResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void restoreExperiment(RestoreExperiment request) { String path = "/api/2.0/mlflow/experiments/restore"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, RestoreExperimentResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, RestoreExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void restoreRun(RestoreRun request) { String path = "/api/2.0/mlflow/runs/restore"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, RestoreRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, RestoreRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RestoreRunsResponse restoreRuns(RestoreRuns request) { String path = "/api/2.0/mlflow/databricks/runs/restore-runs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RestoreRunsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RestoreRunsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SearchExperimentsResponse searchExperiments(SearchExperiments request) { String path = "/api/2.0/mlflow/experiments/search"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, SearchExperimentsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SearchExperimentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SearchRunsResponse searchRuns(SearchRuns request) { String path = "/api/2.0/mlflow/runs/search"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, SearchRunsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SearchRunsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void setExperimentTag(SetExperimentTag request) { String path = "/api/2.0/mlflow/experiments/set-experiment-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, SetExperimentTagResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetExperimentTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExperimentPermissions setPermissions(ExperimentPermissionsRequest request) { String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ExperimentPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExperimentPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void setTag(SetTag request) { String path = "/api/2.0/mlflow/runs/set-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, SetTagResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void updateExperiment(UpdateExperiment request) { String path = "/api/2.0/mlflow/experiments/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, UpdateExperimentResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateExperimentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExperimentPermissions updatePermissions(ExperimentPermissionsRequest request) { String path = String.format("/api/2.0/permissions/experiments/%s", request.getExperimentId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ExperimentPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExperimentPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpdateRunResponse updateRun(UpdateRun request) { String path = "/api/2.0/mlflow/runs/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, UpdateRunResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateRunResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java index 5de9c8aee..357684e9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.ml; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ModelRegistry */ @Generated @@ -19,145 +20,230 @@ public ModelRegistryImpl(ApiClient apiClient) { public ApproveTransitionRequestResponse approveTransitionRequest( ApproveTransitionRequest request) { String path = "/api/2.0/mlflow/transition-requests/approve"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ApproveTransitionRequestResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ApproveTransitionRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateCommentResponse createComment(CreateComment request) { String path = "/api/2.0/mlflow/comments/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateCommentResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateCommentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateModelResponse createModel(CreateModelRequest request) { String path = "/api/2.0/mlflow/registered-models/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateModelResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateModelVersionResponse createModelVersion(CreateModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateModelVersionResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateModelVersionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateTransitionRequestResponse createTransitionRequest(CreateTransitionRequest request) { String path = "/api/2.0/mlflow/transition-requests/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateTransitionRequestResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateTransitionRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public CreateWebhookResponse createWebhook(CreateRegistryWebhook request) { String path = "/api/2.0/mlflow/registry-webhooks/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateWebhookResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateWebhookResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteComment(DeleteCommentRequest request) { String path = "/api/2.0/mlflow/comments/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteCommentResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteCommentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteModel(DeleteModelRequest request) { String path = "/api/2.0/mlflow/registered-models/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteModelResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteModelTag(DeleteModelTagRequest request) { String path = "/api/2.0/mlflow/registered-models/delete-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteModelTagResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteModelTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteModelVersion(DeleteModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteModelVersionResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteModelVersionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteModelVersionTag(DeleteModelVersionTagRequest request) { String path = "/api/2.0/mlflow/model-versions/delete-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteModelVersionTagResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteModelVersionTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteTransitionRequest(DeleteTransitionRequestRequest request) { String path = "/api/2.0/mlflow/transition-requests/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteTransitionRequestResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteTransitionRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteWebhook(DeleteWebhookRequest request) { String path = "/api/2.0/mlflow/registry-webhooks/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteWebhookResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteWebhookResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetLatestVersionsResponse getLatestVersions(GetLatestVersionsRequest request) { String path = "/api/2.0/mlflow/registered-models/get-latest-versions"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, GetLatestVersionsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, GetLatestVersionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetModelResponse getModel(GetModelRequest request) { String path = "/api/2.0/mlflow/databricks/registered-models/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetModelResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetModelVersionResponse getModelVersion(GetModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetModelVersionResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetModelVersionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetModelVersionDownloadUriResponse getModelVersionDownloadUri( GetModelVersionDownloadUriRequest request) { String path = "/api/2.0/mlflow/model-versions/get-download-uri"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetModelVersionDownloadUriResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetModelVersionDownloadUriResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -167,168 +253,263 @@ public GetRegisteredModelPermissionLevelsResponse getPermissionLevels( String.format( "/api/2.0/permissions/registered-models/%s/permissionLevels", request.getRegisteredModelId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetRegisteredModelPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetRegisteredModelPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RegisteredModelPermissions getPermissions(GetRegisteredModelPermissionsRequest request) { String path = String.format("/api/2.0/permissions/registered-models/%s", request.getRegisteredModelId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RegisteredModelPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RegisteredModelPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListModelsResponse listModels(ListModelsRequest request) { String path = "/api/2.0/mlflow/registered-models/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListModelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListModelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListTransitionRequestsResponse listTransitionRequests( ListTransitionRequestsRequest request) { String path = "/api/2.0/mlflow/transition-requests/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListTransitionRequestsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListTransitionRequestsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListRegistryWebhooks listWebhooks(ListWebhooksRequest request) { String path = "/api/2.0/mlflow/registry-webhooks/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListRegistryWebhooks.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListRegistryWebhooks.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RejectTransitionRequestResponse rejectTransitionRequest(RejectTransitionRequest request) { String path = "/api/2.0/mlflow/transition-requests/reject"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RejectTransitionRequestResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RejectTransitionRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RenameModelResponse renameModel(RenameModelRequest request) { String path = "/api/2.0/mlflow/registered-models/rename"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RenameModelResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RenameModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SearchModelVersionsResponse searchModelVersions(SearchModelVersionsRequest request) { String path = "/api/2.0/mlflow/model-versions/search"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, SearchModelVersionsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, SearchModelVersionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public SearchModelsResponse searchModels(SearchModelsRequest request) { String path = "/api/2.0/mlflow/registered-models/search"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, SearchModelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, SearchModelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void setModelTag(SetModelTagRequest request) { String path = "/api/2.0/mlflow/registered-models/set-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, SetModelTagResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetModelTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void setModelVersionTag(SetModelVersionTagRequest request) { String path = "/api/2.0/mlflow/model-versions/set-tag"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, SetModelVersionTagResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetModelVersionTagResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RegisteredModelPermissions setPermissions(RegisteredModelPermissionsRequest request) { String path = String.format("/api/2.0/permissions/registered-models/%s", request.getRegisteredModelId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, RegisteredModelPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RegisteredModelPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TestRegistryWebhookResponse testRegistryWebhook(TestRegistryWebhookRequest request) { String path = "/api/2.0/mlflow/registry-webhooks/test"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, TestRegistryWebhookResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TestRegistryWebhookResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TransitionStageResponse transitionStage(TransitionModelVersionStageDatabricks request) { String path = "/api/2.0/mlflow/databricks/model-versions/transition-stage"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, TransitionStageResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TransitionStageResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpdateCommentResponse updateComment(UpdateComment request) { String path = "/api/2.0/mlflow/comments/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, UpdateCommentResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpdateCommentResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void updateModel(UpdateModelRequest request) { String path = "/api/2.0/mlflow/registered-models/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateModelResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateModelResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void updateModelVersion(UpdateModelVersionRequest request) { String path = "/api/2.0/mlflow/model-versions/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateModelVersionResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateModelVersionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RegisteredModelPermissions updatePermissions(RegisteredModelPermissionsRequest request) { String path = String.format("/api/2.0/permissions/registered-models/%s", request.getRegisteredModelId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, RegisteredModelPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RegisteredModelPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void updateWebhook(UpdateRegistryWebhook request) { String path = "/api/2.0/mlflow/registry-webhooks/update"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateWebhookResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateWebhookResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java index 1572ff64b..e61b618f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountFederationPolicy */ @Generated @@ -19,10 +20,15 @@ public AccountFederationPolicyImpl(ApiClient apiClient) { public FederationPolicy create(CreateAccountFederationPolicyRequest request) { String path = String.format("/api/2.0/accounts/%s/federationPolicies", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getPolicy(), FederationPolicy.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,9 +37,14 @@ public void delete(DeleteAccountFederationPolicyRequest request) { String.format( "/api/2.0/accounts/%s/federationPolicies/%s", apiClient.configuredAccountID(), request.getPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,18 +53,28 @@ public FederationPolicy get(GetAccountFederationPolicyRequest request) { String.format( "/api/2.0/accounts/%s/federationPolicies/%s", apiClient.configuredAccountID(), request.getPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, FederationPolicy.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListFederationPoliciesResponse list(ListAccountFederationPoliciesRequest request) { String path = String.format("/api/2.0/accounts/%s/federationPolicies", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListFederationPoliciesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListFederationPoliciesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -62,9 +83,14 @@ public FederationPolicy update(UpdateAccountFederationPolicyRequest request) { String.format( "/api/2.0/accounts/%s/federationPolicies/%s", apiClient.configuredAccountID(), request.getPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request.getPolicy(), FederationPolicy.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java index 3439785ba..3aa7acba1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CustomAppIntegration */ @Generated @@ -20,10 +21,15 @@ public CreateCustomAppIntegrationOutput create(CreateCustomAppIntegration reques String path = String.format( "/api/2.0/accounts/%s/oauth2/custom-app-integrations", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateCustomAppIntegrationOutput.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateCustomAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public void delete(DeleteCustomAppIntegrationRequest request) { String.format( "/api/2.0/accounts/%s/oauth2/custom-app-integrations/%s", apiClient.configuredAccountID(), request.getIntegrationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteCustomAppIntegrationOutput.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteCustomAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,9 +54,14 @@ public GetCustomAppIntegrationOutput get(GetCustomAppIntegrationRequest request) String.format( "/api/2.0/accounts/%s/oauth2/custom-app-integrations/%s", apiClient.configuredAccountID(), request.getIntegrationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetCustomAppIntegrationOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCustomAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -53,9 +69,14 @@ public GetCustomAppIntegrationsOutput list(ListCustomAppIntegrationsRequest requ String path = String.format( "/api/2.0/accounts/%s/oauth2/custom-app-integrations", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetCustomAppIntegrationsOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCustomAppIntegrationsOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -64,9 +85,14 @@ public void update(UpdateCustomAppIntegration request) { String.format( "/api/2.0/accounts/%s/oauth2/custom-app-integrations/%s", apiClient.configuredAccountID(), request.getIntegrationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateCustomAppIntegrationOutput.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateCustomAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java index e16624d7d..0e53cf20a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OAuthPublishedAppsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of OAuthPublishedApps */ @Generated @@ -20,8 +21,13 @@ public GetPublishedAppsOutput list(ListOAuthPublishedAppsRequest request) { String path = String.format( "/api/2.0/accounts/%s/oauth2/published-apps", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPublishedAppsOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPublishedAppsOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java index ca48f2962..e2592779a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of PublishedAppIntegration */ @Generated @@ -21,10 +22,15 @@ public CreatePublishedAppIntegrationOutput create(CreatePublishedAppIntegration String.format( "/api/2.0/accounts/%s/oauth2/published-app-integrations", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreatePublishedAppIntegrationOutput.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreatePublishedAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,9 +39,14 @@ public void delete(DeletePublishedAppIntegrationRequest request) { String.format( "/api/2.0/accounts/%s/oauth2/published-app-integrations/%s", apiClient.configuredAccountID(), request.getIntegrationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeletePublishedAppIntegrationOutput.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeletePublishedAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public GetPublishedAppIntegrationOutput get(GetPublishedAppIntegrationRequest re String.format( "/api/2.0/accounts/%s/oauth2/published-app-integrations/%s", apiClient.configuredAccountID(), request.getIntegrationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPublishedAppIntegrationOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPublishedAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -55,9 +71,14 @@ public GetPublishedAppIntegrationsOutput list(ListPublishedAppIntegrationsReques String.format( "/api/2.0/accounts/%s/oauth2/published-app-integrations", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPublishedAppIntegrationsOutput.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPublishedAppIntegrationsOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -66,9 +87,14 @@ public void update(UpdatePublishedAppIntegration request) { String.format( "/api/2.0/accounts/%s/oauth2/published-app-integrations/%s", apiClient.configuredAccountID(), request.getIntegrationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdatePublishedAppIntegrationOutput.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdatePublishedAppIntegrationOutput.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java index 4e3570397..322518bc9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ServicePrincipalFederationPolicy */ @Generated @@ -21,10 +22,15 @@ public FederationPolicy create(CreateServicePrincipalFederationPolicyRequest req String.format( "/api/2.0/accounts/%s/servicePrincipals/%s/federationPolicies", apiClient.configuredAccountID(), request.getServicePrincipalId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request.getPolicy(), FederationPolicy.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -35,9 +41,14 @@ public void delete(DeleteServicePrincipalFederationPolicyRequest request) { apiClient.configuredAccountID(), request.getServicePrincipalId(), request.getPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -48,9 +59,14 @@ public FederationPolicy get(GetServicePrincipalFederationPolicyRequest request) apiClient.configuredAccountID(), request.getServicePrincipalId(), request.getPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, FederationPolicy.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -60,9 +76,14 @@ public ListFederationPoliciesResponse list( String.format( "/api/2.0/accounts/%s/servicePrincipals/%s/federationPolicies", apiClient.configuredAccountID(), request.getServicePrincipalId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListFederationPoliciesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListFederationPoliciesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -73,9 +94,14 @@ public FederationPolicy update(UpdateServicePrincipalFederationPolicyRequest req apiClient.configuredAccountID(), request.getServicePrincipalId(), request.getPolicyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request.getPolicy(), FederationPolicy.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, FederationPolicy.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java index 48584fed4..dedcff20f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.oauth2; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ServicePrincipalSecrets */ @Generated @@ -21,9 +22,14 @@ public CreateServicePrincipalSecretResponse create(CreateServicePrincipalSecretR String.format( "/api/2.0/accounts/%s/servicePrincipals/%s/credentials/secrets", apiClient.configuredAccountID(), request.getServicePrincipalId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.POST(path, null, CreateServicePrincipalSecretResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CreateServicePrincipalSecretResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -34,8 +40,13 @@ public void delete(DeleteServicePrincipalSecretRequest request) { apiClient.configuredAccountID(), request.getServicePrincipalId(), request.getSecretId()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,8 +55,13 @@ public ListServicePrincipalSecretsResponse list(ListServicePrincipalSecretsReque String.format( "/api/2.0/accounts/%s/servicePrincipals/%s/credentials/secrets", apiClient.configuredAccountID(), request.getServicePrincipalId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListServicePrincipalSecretsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListServicePrincipalSecretsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java index 7c302ddbb..91077477d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.pipelines; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Pipelines */ @Generated @@ -18,26 +19,41 @@ public PipelinesImpl(ApiClient apiClient) { @Override public CreatePipelineResponse create(CreatePipeline request) { String path = "/api/2.0/pipelines"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreatePipelineResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreatePipelineResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeletePipelineRequest request) { String path = String.format("/api/2.0/pipelines/%s", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeletePipelineResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeletePipelineResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetPipelineResponse get(GetPipelineRequest request) { String path = String.format("/api/2.0/pipelines/%s", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPipelineResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPipelineResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -46,17 +62,27 @@ public GetPipelinePermissionLevelsResponse getPermissionLevels( String path = String.format( "/api/2.0/permissions/pipelines/%s/permissionLevels", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetPipelinePermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetPipelinePermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PipelinePermissions getPermissions(GetPipelinePermissionsRequest request) { String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PipelinePermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PipelinePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -64,76 +90,121 @@ public GetUpdateResponse getUpdate(GetUpdateRequest request) { String path = String.format( "/api/2.0/pipelines/%s/updates/%s", request.getPipelineId(), request.getUpdateId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetUpdateResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetUpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListPipelineEventsResponse listPipelineEvents(ListPipelineEventsRequest request) { String path = String.format("/api/2.0/pipelines/%s/events", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListPipelineEventsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListPipelineEventsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListPipelinesResponse listPipelines(ListPipelinesRequest request) { String path = "/api/2.0/pipelines"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListPipelinesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListPipelinesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListUpdatesResponse listUpdates(ListUpdatesRequest request) { String path = String.format("/api/2.0/pipelines/%s/updates", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListUpdatesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListUpdatesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PipelinePermissions setPermissions(PipelinePermissionsRequest request) { String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, PipelinePermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PipelinePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public StartUpdateResponse startUpdate(StartUpdate request) { String path = String.format("/api/2.0/pipelines/%s/updates", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, StartUpdateResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, StartUpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void stop(StopRequest request) { String path = String.format("/api/2.0/pipelines/%s/stop", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.POST(path, null, StopPipelineResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, StopPipelineResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(EditPipeline request) { String path = String.format("/api/2.0/pipelines/%s", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, EditPipelineResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, EditPipelineResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PipelinePermissions updatePermissions(PipelinePermissionsRequest request) { String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, PipelinePermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PipelinePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java index dd442762e..581cd163a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CredentialsImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of Credentials */ @Generated @@ -20,10 +21,15 @@ public CredentialsImpl(ApiClient apiClient) { public Credential create(CreateCredentialRequest request) { String path = String.format("/api/2.0/accounts/%s/credentials", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Credential.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Credential.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public void delete(DeleteCredentialRequest request) { String.format( "/api/2.0/accounts/%s/credentials/%s", apiClient.configuredAccountID(), request.getCredentialsId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,17 +54,22 @@ public Credential get(GetCredentialRequest request) { String.format( "/api/2.0/accounts/%s/credentials/%s", apiClient.configuredAccountID(), request.getCredentialsId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Credential.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Credential.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Collection list() { String path = String.format("/api/2.0/accounts/%s/credentials", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, Credential.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, Credential.class); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java index 2d989c0a0..8e103d747 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/EncryptionKeysImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of EncryptionKeys */ @Generated @@ -21,10 +22,15 @@ public CustomerManagedKey create(CreateCustomerManagedKeyRequest request) { String path = String.format( "/api/2.0/accounts/%s/customer-managed-keys", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CustomerManagedKey.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CustomerManagedKey.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,9 +39,14 @@ public void delete(DeleteEncryptionKeyRequest request) { String.format( "/api/2.0/accounts/%s/customer-managed-keys/%s", apiClient.configuredAccountID(), request.getCustomerManagedKeyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public CustomerManagedKey get(GetEncryptionKeyRequest request) { String.format( "/api/2.0/accounts/%s/customer-managed-keys/%s", apiClient.configuredAccountID(), request.getCustomerManagedKeyId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CustomerManagedKey.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CustomerManagedKey.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -54,8 +70,8 @@ public Collection list() { String path = String.format( "/api/2.0/accounts/%s/customer-managed-keys", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, CustomerManagedKey.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, CustomerManagedKey.class); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java index 6ef7eb0c6..cdd5f594b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/NetworksImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of Networks */ @Generated @@ -19,10 +20,15 @@ public NetworksImpl(ApiClient apiClient) { @Override public Network create(CreateNetworkRequest request) { String path = String.format("/api/2.0/accounts/%s/networks", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Network.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Network.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,9 +37,14 @@ public void delete(DeleteNetworkRequest request) { String.format( "/api/2.0/accounts/%s/networks/%s", apiClient.configuredAccountID(), request.getNetworkId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,16 +53,21 @@ public Network get(GetNetworkRequest request) { String.format( "/api/2.0/accounts/%s/networks/%s", apiClient.configuredAccountID(), request.getNetworkId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Network.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Network.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Collection list() { String path = String.format("/api/2.0/accounts/%s/networks", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, Network.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, Network.class); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java index 16b1d94e3..5fd0babfc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/PrivateAccessImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of PrivateAccess */ @Generated @@ -21,10 +22,15 @@ public PrivateAccessSettings create(UpsertPrivateAccessSettingsRequest request) String path = String.format( "/api/2.0/accounts/%s/private-access-settings", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, PrivateAccessSettings.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PrivateAccessSettings.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,9 +39,14 @@ public void delete(DeletePrivateAccesRequest request) { String.format( "/api/2.0/accounts/%s/private-access-settings/%s", apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public PrivateAccessSettings get(GetPrivateAccesRequest request) { String.format( "/api/2.0/accounts/%s/private-access-settings/%s", apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PrivateAccessSettings.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PrivateAccessSettings.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -54,9 +70,9 @@ public Collection list() { String path = String.format( "/api/2.0/accounts/%s/private-access-settings", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, PrivateAccessSettings.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, PrivateAccessSettings.class); } @Override @@ -65,9 +81,14 @@ public void replace(UpsertPrivateAccessSettingsRequest request) { String.format( "/api/2.0/accounts/%s/private-access-settings/%s", apiClient.configuredAccountID(), request.getPrivateAccessSettingsId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, ReplaceResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ReplaceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java index 57b28d9b2..3489de525 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/StorageImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of Storage */ @Generated @@ -21,10 +22,15 @@ public StorageConfiguration create(CreateStorageConfigurationRequest request) { String path = String.format( "/api/2.0/accounts/%s/storage-configurations", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, StorageConfiguration.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, StorageConfiguration.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -33,9 +39,14 @@ public void delete(DeleteStorageRequest request) { String.format( "/api/2.0/accounts/%s/storage-configurations/%s", apiClient.configuredAccountID(), request.getStorageConfigurationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public StorageConfiguration get(GetStorageRequest request) { String.format( "/api/2.0/accounts/%s/storage-configurations/%s", apiClient.configuredAccountID(), request.getStorageConfigurationId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, StorageConfiguration.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, StorageConfiguration.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -54,8 +70,8 @@ public Collection list() { String path = String.format( "/api/2.0/accounts/%s/storage-configurations", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, StorageConfiguration.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, StorageConfiguration.class); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java index 4a4bf0623..68a709bc6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpointsImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of VpcEndpoints */ @Generated @@ -20,10 +21,15 @@ public VpcEndpointsImpl(ApiClient apiClient) { public VpcEndpoint create(CreateVpcEndpointRequest request) { String path = String.format("/api/2.0/accounts/%s/vpc-endpoints", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, VpcEndpoint.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, VpcEndpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public void delete(DeleteVpcEndpointRequest request) { String.format( "/api/2.0/accounts/%s/vpc-endpoints/%s", apiClient.configuredAccountID(), request.getVpcEndpointId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,17 +54,22 @@ public VpcEndpoint get(GetVpcEndpointRequest request) { String.format( "/api/2.0/accounts/%s/vpc-endpoints/%s", apiClient.configuredAccountID(), request.getVpcEndpointId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, VpcEndpoint.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, VpcEndpoint.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Collection list() { String path = String.format("/api/2.0/accounts/%s/vpc-endpoints", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, VpcEndpoint.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, VpcEndpoint.class); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java index 8911d364f..e700dac93 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.provisioning; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of Workspaces */ @Generated @@ -19,10 +20,15 @@ public WorkspacesImpl(ApiClient apiClient) { @Override public Workspace create(CreateWorkspaceRequest request) { String path = String.format("/api/2.0/accounts/%s/workspaces", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Workspace.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Workspace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,9 +37,14 @@ public void delete(DeleteWorkspaceRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s", apiClient.configuredAccountID(), request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,17 +53,22 @@ public Workspace get(GetWorkspaceRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s", apiClient.configuredAccountID(), request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Workspace.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Workspace.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Collection list() { String path = String.format("/api/2.0/accounts/%s/workspaces", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, Workspace.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, Workspace.class); } @Override @@ -61,9 +77,14 @@ public void update(UpdateWorkspaceRequest request) { String.format( "/api/2.0/accounts/%s/workspaces/%s", apiClient.configuredAccountID(), request.getWorkspaceId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java index b1bf77ab9..2dabe61d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsDataPlaneImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.serving; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ServingEndpointsDataPlane */ @Generated @@ -18,9 +19,14 @@ public ServingEndpointsDataPlaneImpl(ApiClient apiClient) { @Override public QueryEndpointResponse query(QueryEndpointInput request) { String path = String.format("/serving-endpoints/%s/invocations", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, QueryEndpointResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QueryEndpointResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java index 5e518c6f0..1f868b724 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.serving; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of ServingEndpoints */ @Generated @@ -22,50 +23,80 @@ public BuildLogsResponse buildLogs(BuildLogsRequest request) { String.format( "/api/2.0/serving-endpoints/%s/served-models/%s/build-logs", request.getName(), request.getServedModelName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, BuildLogsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, BuildLogsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServingEndpointDetailed create(CreateServingEndpoint request) { String path = "/api/2.0/serving-endpoints"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ServingEndpointDetailed.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServingEndpointDetailed.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteServingEndpointRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExportMetricsResponse exportMetrics(ExportMetricsRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/metrics", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "text/plain"); - return apiClient.GET(path, request, ExportMetricsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "text/plain"); + return apiClient.execute(req, ExportMetricsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServingEndpointDetailed get(GetServingEndpointRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ServingEndpointDetailed.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServingEndpointDetailed.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void getOpenApi(GetOpenApiRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/openapi", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.GET(path, request, GetOpenApiResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, GetOpenApiResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -75,26 +106,40 @@ public GetServingEndpointPermissionLevelsResponse getPermissionLevels( String.format( "/api/2.0/permissions/serving-endpoints/%s/permissionLevels", request.getServingEndpointId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetServingEndpointPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetServingEndpointPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServingEndpointPermissions getPermissions(GetServingEndpointPermissionsRequest request) { String path = String.format("/api/2.0/permissions/serving-endpoints/%s", request.getServingEndpointId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ServingEndpointPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServingEndpointPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListEndpointsResponse list() { String path = "/api/2.0/serving-endpoints"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListEndpointsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListEndpointsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -103,73 +148,108 @@ public ServerLogsResponse logs(LogsRequest request) { String.format( "/api/2.0/serving-endpoints/%s/served-models/%s/logs", request.getName(), request.getServedModelName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ServerLogsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ServerLogsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Collection patch(PatchServingEndpointTags request) { String path = String.format("/api/2.0/serving-endpoints/%s/tags", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.getCollection(path, null, EndpointTag.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.getCollection(req, EndpointTag.class); } @Override public PutResponse put(PutRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/rate-limits", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, PutResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PutResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public PutAiGatewayResponse putAiGateway(PutAiGatewayRequest request) { String path = String.format("/api/2.0/serving-endpoints/%s/ai-gateway", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, PutAiGatewayResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PutAiGatewayResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public QueryEndpointResponse query(QueryEndpointInput request) { String path = String.format("/serving-endpoints/%s/invocations", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, QueryEndpointResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QueryEndpointResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServingEndpointPermissions setPermissions(ServingEndpointPermissionsRequest request) { String path = String.format("/api/2.0/permissions/serving-endpoints/%s", request.getServingEndpointId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ServingEndpointPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServingEndpointPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServingEndpointDetailed updateConfig(EndpointCoreConfigInput request) { String path = String.format("/api/2.0/serving-endpoints/%s/config", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, ServingEndpointDetailed.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServingEndpointDetailed.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ServingEndpointPermissions updatePermissions(ServingEndpointPermissionsRequest request) { String path = String.format("/api/2.0/permissions/serving-endpoints/%s", request.getServingEndpointId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ServingEndpointPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ServingEndpointPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java index 7d0ee3ad6..d5367da61 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AccountIpAccessLists */ @Generated @@ -19,10 +20,15 @@ public AccountIpAccessListsImpl(ApiClient apiClient) { public CreateIpAccessListResponse create(CreateIpAccessList request) { String path = String.format("/api/2.0/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateIpAccessListResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateIpAccessListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,9 +37,14 @@ public void delete(DeleteAccountIpAccessListRequest request) { String.format( "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -42,18 +53,27 @@ public GetIpAccessListResponse get(GetAccountIpAccessListRequest request) { String.format( "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetIpAccessListResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetIpAccessListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetIpAccessListsResponse list() { String path = String.format("/api/2.0/accounts/%s/ip-access-lists", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetIpAccessListsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetIpAccessListsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -62,10 +82,15 @@ public void replace(ReplaceIpAccessList request) { String.format( "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, ReplaceResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ReplaceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -74,9 +99,14 @@ public void update(UpdateIpAccessList request) { String.format( "/api/2.0/accounts/%s/ip-access-lists/%s", apiClient.configuredAccountID(), request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java index bca27cc40..460c056e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingAccessPolicyImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AibiDashboardEmbeddingAccessPolicy */ @Generated @@ -19,28 +20,42 @@ public AibiDashboardEmbeddingAccessPolicyImpl(ApiClient apiClient) { public DeleteAibiDashboardEmbeddingAccessPolicySettingResponse delete( DeleteAibiDashboardEmbeddingAccessPolicySettingRequest request) { String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE( - path, request, DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AibiDashboardEmbeddingAccessPolicySetting get( GetAibiDashboardEmbeddingAccessPolicySettingRequest request) { String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AibiDashboardEmbeddingAccessPolicySetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AibiDashboardEmbeddingAccessPolicySetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AibiDashboardEmbeddingAccessPolicySetting update( UpdateAibiDashboardEmbeddingAccessPolicySettingRequest request) { String path = "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, AibiDashboardEmbeddingAccessPolicySetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AibiDashboardEmbeddingAccessPolicySetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java index c26b1f5e8..983226018 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AibiDashboardEmbeddingApprovedDomainsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AibiDashboardEmbeddingApprovedDomains */ @Generated @@ -20,30 +21,43 @@ public AibiDashboardEmbeddingApprovedDomainsImpl(ApiClient apiClient) { public DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse delete( DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE( - path, request, DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute( + req, DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AibiDashboardEmbeddingApprovedDomainsSetting get( GetAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET( - path, request, AibiDashboardEmbeddingApprovedDomainsSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AibiDashboardEmbeddingApprovedDomainsSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AibiDashboardEmbeddingApprovedDomainsSetting update( UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest request) { String path = "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH( - path, request, AibiDashboardEmbeddingApprovedDomainsSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AibiDashboardEmbeddingApprovedDomainsSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java index 9771064ad..381e6963e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of AutomaticClusterUpdate */ @Generated @@ -18,17 +19,27 @@ public AutomaticClusterUpdateImpl(ApiClient apiClient) { @Override public AutomaticClusterUpdateSetting get(GetAutomaticClusterUpdateSettingRequest request) { String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AutomaticClusterUpdateSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AutomaticClusterUpdateSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AutomaticClusterUpdateSetting update(UpdateAutomaticClusterUpdateSettingRequest request) { String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, AutomaticClusterUpdateSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, AutomaticClusterUpdateSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java index de288ccff..59531da21 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ComplianceSecurityProfileImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of ComplianceSecurityProfile */ @Generated @@ -18,18 +19,28 @@ public ComplianceSecurityProfileImpl(ApiClient apiClient) { @Override public ComplianceSecurityProfileSetting get(GetComplianceSecurityProfileSettingRequest request) { String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ComplianceSecurityProfileSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ComplianceSecurityProfileSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ComplianceSecurityProfileSetting update( UpdateComplianceSecurityProfileSettingRequest request) { String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ComplianceSecurityProfileSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ComplianceSecurityProfileSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java index 420e18c9a..23639b484 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CredentialsManager */ @Generated @@ -18,9 +19,14 @@ public CredentialsManagerImpl(ApiClient apiClient) { @Override public ExchangeTokenResponse exchangeToken(ExchangeTokenRequest request) { String path = "/api/2.0/credentials-manager/exchange-tokens/token"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ExchangeTokenResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ExchangeTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java index ee1e5cbb8..c30f6ef62 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of CspEnablementAccount */ @Generated @@ -21,9 +22,14 @@ public CspEnablementAccountSetting get(GetCspEnablementAccountSettingRequest req String.format( "/api/2.0/accounts/%s/settings/types/shield_csp_enablement_ac/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CspEnablementAccountSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, CspEnablementAccountSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public CspEnablementAccountSetting update(UpdateCspEnablementAccountSettingReque String.format( "/api/2.0/accounts/%s/settings/types/shield_csp_enablement_ac/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CspEnablementAccountSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CspEnablementAccountSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java index 6912944f9..aefab63c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of DefaultNamespace */ @Generated @@ -19,25 +20,40 @@ public DefaultNamespaceImpl(ApiClient apiClient) { public DeleteDefaultNamespaceSettingResponse delete( DeleteDefaultNamespaceSettingRequest request) { String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeleteDefaultNamespaceSettingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteDefaultNamespaceSettingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DefaultNamespaceSetting get(GetDefaultNamespaceSettingRequest request) { String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, DefaultNamespaceSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DefaultNamespaceSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DefaultNamespaceSetting update(UpdateDefaultNamespaceSettingRequest request) { String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, DefaultNamespaceSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DefaultNamespaceSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java index 2c7380c97..341d1925f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of DisableLegacyAccess */ @Generated @@ -18,25 +19,40 @@ public DisableLegacyAccessImpl(ApiClient apiClient) { @Override public DeleteDisableLegacyAccessResponse delete(DeleteDisableLegacyAccessRequest request) { String path = "/api/2.0/settings/types/disable_legacy_access/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeleteDisableLegacyAccessResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteDisableLegacyAccessResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DisableLegacyAccess get(GetDisableLegacyAccessRequest request) { String path = "/api/2.0/settings/types/disable_legacy_access/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, DisableLegacyAccess.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DisableLegacyAccess.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DisableLegacyAccess update(UpdateDisableLegacyAccessRequest request) { String path = "/api/2.0/settings/types/disable_legacy_access/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, DisableLegacyAccess.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DisableLegacyAccess.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java index 856c1d61d..f314a56f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of DisableLegacyDbfs */ @Generated @@ -18,25 +19,40 @@ public DisableLegacyDbfsImpl(ApiClient apiClient) { @Override public DeleteDisableLegacyDbfsResponse delete(DeleteDisableLegacyDbfsRequest request) { String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeleteDisableLegacyDbfsResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteDisableLegacyDbfsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DisableLegacyDbfs get(GetDisableLegacyDbfsRequest request) { String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, DisableLegacyDbfs.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DisableLegacyDbfs.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest request) { String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, DisableLegacyDbfs.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DisableLegacyDbfs.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java index e039d940b..d97523df1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of DisableLegacyFeatures */ @Generated @@ -21,9 +22,14 @@ public DeleteDisableLegacyFeaturesResponse delete(DeleteDisableLegacyFeaturesReq String.format( "/api/2.0/accounts/%s/settings/types/disable_legacy_features/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeleteDisableLegacyFeaturesResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteDisableLegacyFeaturesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public DisableLegacyFeatures get(GetDisableLegacyFeaturesRequest request) { String.format( "/api/2.0/accounts/%s/settings/types/disable_legacy_features/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, DisableLegacyFeatures.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DisableLegacyFeatures.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,9 +54,14 @@ public DisableLegacyFeatures update(UpdateDisableLegacyFeaturesRequest request) String.format( "/api/2.0/accounts/%s/settings/types/disable_legacy_features/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, DisableLegacyFeatures.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DisableLegacyFeatures.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java index d6d8ede2b..c1d90c461 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoringImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of EnhancedSecurityMonitoring */ @Generated @@ -19,18 +20,28 @@ public EnhancedSecurityMonitoringImpl(ApiClient apiClient) { public EnhancedSecurityMonitoringSetting get( GetEnhancedSecurityMonitoringSettingRequest request) { String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, EnhancedSecurityMonitoringSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EnhancedSecurityMonitoringSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public EnhancedSecurityMonitoringSetting update( UpdateEnhancedSecurityMonitoringSettingRequest request) { String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, EnhancedSecurityMonitoringSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EnhancedSecurityMonitoringSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java index 6e3fb4adc..16a15ef85 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of EsmEnablementAccount */ @Generated @@ -21,9 +22,14 @@ public EsmEnablementAccountSetting get(GetEsmEnablementAccountSettingRequest req String.format( "/api/2.0/accounts/%s/settings/types/shield_esm_enablement_ac/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, EsmEnablementAccountSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EsmEnablementAccountSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public EsmEnablementAccountSetting update(UpdateEsmEnablementAccountSettingReque String.format( "/api/2.0/accounts/%s/settings/types/shield_esm_enablement_ac/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, EsmEnablementAccountSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EsmEnablementAccountSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java index 7b4b956f3..d0cc96208 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of IpAccessLists */ @Generated @@ -18,51 +19,80 @@ public IpAccessListsImpl(ApiClient apiClient) { @Override public CreateIpAccessListResponse create(CreateIpAccessList request) { String path = "/api/2.0/ip-access-lists"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateIpAccessListResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateIpAccessListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteIpAccessListRequest request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public FetchIpAccessListResponse get(GetIpAccessListRequest request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, FetchIpAccessListResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, FetchIpAccessListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListIpAccessListResponse list() { String path = "/api/2.0/ip-access-lists"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListIpAccessListResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListIpAccessListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void replace(ReplaceIpAccessList request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, ReplaceResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ReplaceResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateIpAccessList request) { String path = String.format("/api/2.0/ip-access-lists/%s", request.getIpAccessListId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java index 6f9aea242..966a6bc80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of NetworkConnectivity */ @Generated @@ -21,10 +22,15 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( String path = String.format( "/api/2.0/accounts/%s/network-connectivity-configs", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, NetworkConnectivityConfiguration.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, NetworkConnectivityConfiguration.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -34,10 +40,15 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule( String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules", apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, NccAzurePrivateEndpointRule.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -47,9 +58,14 @@ public void deleteNetworkConnectivityConfiguration( String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s", apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteNetworkConnectivityConfigurationResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteNetworkConnectivityConfigurationResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -61,9 +77,14 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId(), request.getPrivateEndpointRuleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, NccAzurePrivateEndpointRule.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -73,9 +94,14 @@ public NetworkConnectivityConfiguration getNetworkConnectivityConfiguration( String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s", apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, NetworkConnectivityConfiguration.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, NetworkConnectivityConfiguration.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -86,9 +112,14 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(GetPrivateEndpointRule apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId(), request.getPrivateEndpointRuleId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, NccAzurePrivateEndpointRule.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, NccAzurePrivateEndpointRule.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -97,10 +128,14 @@ public ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConf String path = String.format( "/api/2.0/accounts/%s/network-connectivity-configs", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET( - path, request, ListNetworkConnectivityConfigurationsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListNetworkConnectivityConfigurationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -110,8 +145,13 @@ public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules( String.format( "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules", apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListNccAzurePrivateEndpointRulesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListNccAzurePrivateEndpointRulesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java index 2ad8ba634..498afdf6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NotificationDestinationsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of NotificationDestinations */ @Generated @@ -18,42 +19,67 @@ public NotificationDestinationsImpl(ApiClient apiClient) { @Override public NotificationDestination create(CreateNotificationDestinationRequest request) { String path = "/api/2.0/notification-destinations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, NotificationDestination.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, NotificationDestination.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteNotificationDestinationRequest request) { String path = String.format("/api/2.0/notification-destinations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, Empty.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Empty.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public NotificationDestination get(GetNotificationDestinationRequest request) { String path = String.format("/api/2.0/notification-destinations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, NotificationDestination.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, NotificationDestination.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListNotificationDestinationsResponse list(ListNotificationDestinationsRequest request) { String path = "/api/2.0/notification-destinations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListNotificationDestinationsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListNotificationDestinationsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public NotificationDestination update(UpdateNotificationDestinationRequest request) { String path = String.format("/api/2.0/notification-destinations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, NotificationDestination.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, NotificationDestination.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java index 1e45ec4c6..f584b80af 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of PersonalCompute */ @Generated @@ -21,9 +22,14 @@ public DeletePersonalComputeSettingResponse delete(DeletePersonalComputeSettingR String.format( "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeletePersonalComputeSettingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeletePersonalComputeSettingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,9 +38,14 @@ public PersonalComputeSetting get(GetPersonalComputeSettingRequest request) { String.format( "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PersonalComputeSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, PersonalComputeSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -43,9 +54,14 @@ public PersonalComputeSetting update(UpdatePersonalComputeSettingRequest request String.format( "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, PersonalComputeSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, PersonalComputeSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java index 10cafe064..69e5aa56a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of RestrictWorkspaceAdmins */ @Generated @@ -19,27 +20,41 @@ public RestrictWorkspaceAdminsImpl(ApiClient apiClient) { public DeleteRestrictWorkspaceAdminsSettingResponse delete( DeleteRestrictWorkspaceAdminsSettingRequest request) { String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE( - path, request, DeleteRestrictWorkspaceAdminsSettingResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, DeleteRestrictWorkspaceAdminsSettingResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RestrictWorkspaceAdminsSetting get(GetRestrictWorkspaceAdminsSettingRequest request) { String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RestrictWorkspaceAdminsSetting.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RestrictWorkspaceAdminsSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RestrictWorkspaceAdminsSetting update( UpdateRestrictWorkspaceAdminsSettingRequest request) { String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, RestrictWorkspaceAdminsSetting.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RestrictWorkspaceAdminsSetting.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java index c0bb29014..3eec231b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of TokenManagement */ @Generated @@ -18,67 +19,105 @@ public TokenManagementImpl(ApiClient apiClient) { @Override public CreateOboTokenResponse createOboToken(CreateOboTokenRequest request) { String path = "/api/2.0/token-management/on-behalf-of/tokens"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateOboTokenResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateOboTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteTokenManagementRequest request) { String path = String.format("/api/2.0/token-management/tokens/%s", request.getTokenId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetTokenResponse get(GetTokenManagementRequest request) { String path = String.format("/api/2.0/token-management/tokens/%s", request.getTokenId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetTokenResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetTokenPermissionLevelsResponse getPermissionLevels() { String path = "/api/2.0/permissions/authorization/tokens/permissionLevels"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetTokenPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetTokenPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TokenPermissions getPermissions() { String path = "/api/2.0/permissions/authorization/tokens"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, TokenPermissions.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, TokenPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListTokensResponse list(ListTokenManagementRequest request) { String path = "/api/2.0/token-management/tokens"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListTokensResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListTokensResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TokenPermissions setPermissions(TokenPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/tokens"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, TokenPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TokenPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public TokenPermissions updatePermissions(TokenPermissionsRequest request) { String path = "/api/2.0/permissions/authorization/tokens"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, TokenPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, TokenPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java index 48da0c28b..ed323fd5b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Tokens */ @Generated @@ -18,26 +19,40 @@ public TokensImpl(ApiClient apiClient) { @Override public CreateTokenResponse create(CreateTokenRequest request) { String path = "/api/2.0/token/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateTokenResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(RevokeTokenRequest request) { String path = "/api/2.0/token/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, RevokeTokenResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, RevokeTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListPublicTokensResponse list() { String path = "/api/2.0/token/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListPublicTokensResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListPublicTokensResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java index 7f56ddd26..b0f3313e7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfImpl.java @@ -2,8 +2,10 @@ package com.databricks.sdk.service.settings; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; +import java.io.IOException; import java.util.Map; /** Package-local implementation of WorkspaceConf */ @@ -18,16 +20,21 @@ public WorkspaceConfImpl(ApiClient apiClient) { @Override public Map getStatus(GetStatusRequest request) { String path = "/api/2.0/workspace-conf"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getStringMap(path, request, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getStringMap(req); } @Override public void setStatus(Map request) { String path = "/api/2.0/workspace-conf"; - Map headers = new HashMap<>(); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, SetStatusResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetStatusResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java index 550f2518d..9d49090a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Providers */ @Generated @@ -18,50 +19,80 @@ public ProvidersImpl(ApiClient apiClient) { @Override public ProviderInfo create(CreateProvider request) { String path = "/api/2.1/unity-catalog/providers"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ProviderInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ProviderInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteProviderRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ProviderInfo get(GetProviderRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ProviderInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ProviderInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListProvidersResponse list(ListProvidersRequest request) { String path = "/api/2.1/unity-catalog/providers"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListProvidersResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProvidersResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListProviderSharesResponse listShares(ListSharesRequest request) { String path = String.format("/api/2.1/unity-catalog/providers/%s/shares", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListProviderSharesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListProviderSharesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ProviderInfo update(UpdateProvider request) { String path = String.format("/api/2.1/unity-catalog/providers/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ProviderInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ProviderInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java index 29f06d698..59ca4edbf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientActivationImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of RecipientActivation */ @Generated @@ -21,9 +22,14 @@ public void getActivationUrlInfo(GetActivationUrlInfoRequest request) { String.format( "/api/2.1/unity-catalog/public/data_sharing_activation_info/%s", request.getActivationUrl()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.GET(path, request, GetActivationUrlInfoResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, GetActivationUrlInfoResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -31,8 +37,13 @@ public RetrieveTokenResponse retrieveToken(RetrieveTokenRequest request) { String path = String.format( "/api/2.1/unity-catalog/public/data_sharing_activation/%s", request.getActivationUrl()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RetrieveTokenResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RetrieveTokenResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java index b877a49ba..8b70cc8a6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Recipients */ @Generated @@ -18,61 +19,96 @@ public RecipientsImpl(ApiClient apiClient) { @Override public RecipientInfo create(CreateRecipient request) { String path = "/api/2.1/unity-catalog/recipients"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RecipientInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RecipientInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteRecipientRequest request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RecipientInfo get(GetRecipientRequest request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RecipientInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RecipientInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListRecipientsResponse list(ListRecipientsRequest request) { String path = "/api/2.1/unity-catalog/recipients"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListRecipientsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListRecipientsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RecipientInfo rotateToken(RotateRecipientToken request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s/rotate-token", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, RecipientInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RecipientInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetRecipientSharePermissionsResponse sharePermissions(SharePermissionsRequest request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s/share-permissions", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetRecipientSharePermissionsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetRecipientSharePermissionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateRecipient request) { String path = String.format("/api/2.1/unity-catalog/recipients/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java index cb5211ab9..d32ff58cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sharing; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Shares */ @Generated @@ -18,61 +19,95 @@ public SharesImpl(ApiClient apiClient) { @Override public ShareInfo create(CreateShare request) { String path = "/api/2.1/unity-catalog/shares"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ShareInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ShareInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteShareRequest request) { String path = String.format("/api/2.1/unity-catalog/shares/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ShareInfo get(GetShareRequest request) { String path = String.format("/api/2.1/unity-catalog/shares/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ShareInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ShareInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListSharesResponse list(ListSharesRequest request) { String path = "/api/2.1/unity-catalog/shares"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListSharesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListSharesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public com.databricks.sdk.service.catalog.PermissionsList sharePermissions( SharePermissionsRequest request) { String path = String.format("/api/2.1/unity-catalog/shares/%s/permissions", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET( - path, request, com.databricks.sdk.service.catalog.PermissionsList.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, com.databricks.sdk.service.catalog.PermissionsList.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ShareInfo update(UpdateShare request) { String path = String.format("/api/2.1/unity-catalog/shares/%s", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, ShareInfo.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ShareInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void updatePermissions(UpdateSharePermissions request) { String path = String.format("/api/2.1/unity-catalog/shares/%s/permissions", request.getName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdatePermissionsResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdatePermissionsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java index 360e063ac..33a394bf3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Alerts */ @Generated @@ -18,42 +19,67 @@ public AlertsImpl(ApiClient apiClient) { @Override public Alert create(CreateAlertRequest request) { String path = "/api/2.0/sql/alerts"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Alert.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Alert.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(TrashAlertRequest request) { String path = String.format("/api/2.0/sql/alerts/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, Empty.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Empty.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Alert get(GetAlertRequest request) { String path = String.format("/api/2.0/sql/alerts/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Alert.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Alert.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAlertsResponse list(ListAlertsRequest request) { String path = "/api/2.0/sql/alerts"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAlertsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAlertsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Alert update(UpdateAlertRequest request) { String path = String.format("/api/2.0/sql/alerts/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, Alert.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Alert.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java index 92a0fef56..286783571 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java @@ -2,10 +2,11 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of AlertsLegacy */ @Generated @@ -19,42 +20,62 @@ public AlertsLegacyImpl(ApiClient apiClient) { @Override public LegacyAlert create(CreateAlert request) { String path = "/api/2.0/preview/sql/alerts"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, LegacyAlert.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LegacyAlert.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteAlertsLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/alerts/%s", request.getAlertId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public LegacyAlert get(GetAlertsLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/alerts/%s", request.getAlertId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, LegacyAlert.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, LegacyAlert.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Collection list() { String path = "/api/2.0/preview/sql/alerts"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, LegacyAlert.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, LegacyAlert.class); } @Override public void update(EditAlert request) { String path = String.format("/api/2.0/preview/sql/alerts/%s", request.getAlertId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, UpdateResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java index 1e9fb42fb..55287384f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardWidgetsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of DashboardWidgets */ @Generated @@ -18,26 +19,41 @@ public DashboardWidgetsImpl(ApiClient apiClient) { @Override public Widget create(CreateWidget request) { String path = "/api/2.0/preview/sql/widgets"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Widget.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Widget.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteDashboardWidgetRequest request) { String path = String.format("/api/2.0/preview/sql/widgets/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Widget update(CreateWidget request) { String path = String.format("/api/2.0/preview/sql/widgets/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Widget.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Widget.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java index 5c79b8353..fb45272bc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Dashboards */ @Generated @@ -18,51 +19,81 @@ public DashboardsImpl(ApiClient apiClient) { @Override public Dashboard create(DashboardPostContent request) { String path = "/api/2.0/preview/sql/dashboards"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Dashboard.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteDashboardRequest request) { String path = String.format("/api/2.0/preview/sql/dashboards/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Dashboard get(GetDashboardRequest request) { String path = String.format("/api/2.0/preview/sql/dashboards/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Dashboard.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListResponse list(ListDashboardsRequest request) { String path = "/api/2.0/preview/sql/dashboards"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void restore(RestoreDashboardRequest request) { String path = String.format("/api/2.0/preview/sql/dashboards/trash/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.POST(path, null, RestoreResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, RestoreResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Dashboard update(DashboardEditContent request) { String path = String.format("/api/2.0/preview/sql/dashboards/%s", request.getDashboardId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Dashboard.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Dashboard.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesImpl.java index 13a373ff9..03b4f1e82 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DataSourcesImpl.java @@ -2,10 +2,9 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of DataSources */ @Generated @@ -19,8 +18,8 @@ public DataSourcesImpl(ApiClient apiClient) { @Override public Collection list() { String path = "/api/2.0/preview/sql/data_sources"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.getCollection(path, null, DataSource.class, headers); + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.getCollection(req, DataSource.class); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java index a49470dbc..858e01493 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DbsqlPermissionsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of DbsqlPermissions */ @Generated @@ -21,9 +22,14 @@ public GetResponse get(GetDbsqlPermissionRequest request) { String.format( "/api/2.0/preview/sql/permissions/%s/%s", request.getObjectType(), request.getObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -32,10 +38,15 @@ public SetResponse set(SetRequest request) { String.format( "/api/2.0/preview/sql/permissions/%s/%s", request.getObjectType(), request.getObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, SetResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, SetResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -44,9 +55,14 @@ public Success transferOwnership(TransferOwnershipRequest request) { String.format( "/api/2.0/preview/sql/permissions/%s/%s/transfer", request.getObjectType(), request.getObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Success.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Success.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java index 02fd619ca..27f88fa53 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Queries */ @Generated @@ -18,51 +19,81 @@ public QueriesImpl(ApiClient apiClient) { @Override public Query create(CreateQueryRequest request) { String path = "/api/2.0/sql/queries"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Query.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Query.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(TrashQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, Empty.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Empty.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Query get(GetQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, Query.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Query.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListQueryObjectsResponse list(ListQueriesRequest request) { String path = "/api/2.0/sql/queries"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListQueryObjectsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListQueryObjectsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListVisualizationsForQueryResponse listVisualizations( ListVisualizationsForQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s/visualizations", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListVisualizationsForQueryResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListVisualizationsForQueryResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Query update(UpdateQueryRequest request) { String path = String.format("/api/2.0/sql/queries/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, Query.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Query.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java index 88a90b8ff..cce4fb4eb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesLegacyImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of QueriesLegacy */ @Generated @@ -18,50 +19,80 @@ public QueriesLegacyImpl(ApiClient apiClient) { @Override public LegacyQuery create(QueryPostContent request) { String path = "/api/2.0/preview/sql/queries"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, LegacyQuery.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LegacyQuery.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteQueriesLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/queries/%s", request.getQueryId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public LegacyQuery get(GetQueriesLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/queries/%s", request.getQueryId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, LegacyQuery.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, LegacyQuery.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public QueryList list(ListQueriesLegacyRequest request) { String path = "/api/2.0/preview/sql/queries"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, QueryList.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, QueryList.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void restore(RestoreQueriesLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/queries/trash/%s", request.getQueryId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.POST(path, null, RestoreResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, RestoreResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public LegacyQuery update(QueryEditContent request) { String path = String.format("/api/2.0/preview/sql/queries/%s", request.getQueryId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, LegacyQuery.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LegacyQuery.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java index 1a07ce143..52f2deb6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryHistoryImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of QueryHistory */ @Generated @@ -18,8 +19,13 @@ public QueryHistoryImpl(ApiClient apiClient) { @Override public ListQueriesResponse list(ListQueryHistoryRequest request) { String path = "/api/2.0/sql/history/queries"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListQueriesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListQueriesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java index 2189a2d00..0dacc2b8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of QueryVisualizations */ @Generated @@ -18,26 +19,41 @@ public QueryVisualizationsImpl(ApiClient apiClient) { @Override public Visualization create(CreateVisualizationRequest request) { String path = "/api/2.0/sql/visualizations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, Visualization.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Visualization.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteVisualizationRequest request) { String path = String.format("/api/2.0/sql/visualizations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, Empty.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Empty.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public Visualization update(UpdateVisualizationRequest request) { String path = String.format("/api/2.0/sql/visualizations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, Visualization.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, Visualization.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java index 714e22132..e62786295 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsLegacyImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of QueryVisualizationsLegacy */ @Generated @@ -18,26 +19,41 @@ public QueryVisualizationsLegacyImpl(ApiClient apiClient) { @Override public LegacyVisualization create(CreateQueryVisualizationsLegacyRequest request) { String path = "/api/2.0/preview/sql/visualizations"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, LegacyVisualization.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LegacyVisualization.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteQueryVisualizationsLegacyRequest request) { String path = String.format("/api/2.0/preview/sql/visualizations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public LegacyVisualization update(LegacyVisualization request) { String path = String.format("/api/2.0/preview/sql/visualizations/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, LegacyVisualization.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, LegacyVisualization.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java index 7af4a2adc..e6b49df84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of StatementExecution */ @Generated @@ -18,25 +19,40 @@ public StatementExecutionImpl(ApiClient apiClient) { @Override public void cancelExecution(CancelExecutionRequest request) { String path = String.format("/api/2.0/sql/statements/%s/cancel", request.getStatementId()); - Map headers = new HashMap<>(); - apiClient.POST(path, null, CancelExecutionResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, CancelExecutionResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public StatementResponse executeStatement(ExecuteStatementRequest request) { String path = "/api/2.0/sql/statements/"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, StatementResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, StatementResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public StatementResponse getStatement(GetStatementRequest request) { String path = String.format("/api/2.0/sql/statements/%s", request.getStatementId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, StatementResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, StatementResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -45,8 +61,13 @@ public ResultData getStatementResultChunkN(GetStatementResultChunkNRequest reque String.format( "/api/2.0/sql/statements/%s/result/chunks/%s", request.getStatementId(), request.getChunkIndex()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ResultData.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ResultData.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java index a6a8a8169..d1fb0fda2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.sql; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Warehouses */ @Generated @@ -18,35 +19,55 @@ public WarehousesImpl(ApiClient apiClient) { @Override public CreateWarehouseResponse create(CreateWarehouseRequest request) { String path = "/api/2.0/sql/warehouses"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateWarehouseResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateWarehouseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteWarehouseRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteWarehouseResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteWarehouseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void edit(EditWarehouseRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s/edit", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, EditWarehouseResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, EditWarehouseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetWarehouseResponse get(GetWarehouseRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetWarehouseResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetWarehouseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -55,75 +76,119 @@ public GetWarehousePermissionLevelsResponse getPermissionLevels( String path = String.format( "/api/2.0/permissions/warehouses/%s/permissionLevels", request.getWarehouseId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetWarehousePermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetWarehousePermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public WarehousePermissions getPermissions(GetWarehousePermissionsRequest request) { String path = String.format("/api/2.0/permissions/warehouses/%s", request.getWarehouseId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, WarehousePermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WarehousePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetWorkspaceWarehouseConfigResponse getWorkspaceWarehouseConfig() { String path = "/api/2.0/sql/config/warehouses"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, GetWorkspaceWarehouseConfigResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetWorkspaceWarehouseConfigResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListWarehousesResponse list(ListWarehousesRequest request) { String path = "/api/2.0/sql/warehouses"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListWarehousesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListWarehousesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public WarehousePermissions setPermissions(WarehousePermissionsRequest request) { String path = String.format("/api/2.0/permissions/warehouses/%s", request.getWarehouseId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, WarehousePermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WarehousePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void setWorkspaceWarehouseConfig(SetWorkspaceWarehouseConfigRequest request) { String path = "/api/2.0/sql/config/warehouses"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, SetWorkspaceWarehouseConfigResponse.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, SetWorkspaceWarehouseConfigResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void start(StartRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s/start", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.POST(path, null, StartWarehouseResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, StartWarehouseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void stop(StopRequest request) { String path = String.format("/api/2.0/sql/warehouses/%s/stop", request.getId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.POST(path, null, StopWarehouseResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, StopWarehouseResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public WarehousePermissions updatePermissions(WarehousePermissionsRequest request) { String path = String.format("/api/2.0/permissions/warehouses/%s", request.getWarehouseId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, WarehousePermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WarehousePermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java index 4073e3840..c4641d5fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.vectorsearch; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of VectorSearchEndpoints */ @Generated @@ -18,32 +19,52 @@ public VectorSearchEndpointsImpl(ApiClient apiClient) { @Override public EndpointInfo createEndpoint(CreateEndpoint request) { String path = "/api/2.0/vector-search/endpoints"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, EndpointInfo.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, EndpointInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteEndpoint(DeleteEndpointRequest request) { String path = String.format("/api/2.0/vector-search/endpoints/%s", request.getEndpointName()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteEndpointResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteEndpointResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public EndpointInfo getEndpoint(GetEndpointRequest request) { String path = String.format("/api/2.0/vector-search/endpoints/%s", request.getEndpointName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, EndpointInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, EndpointInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListEndpointResponse listEndpoints(ListEndpointsRequest request) { String path = "/api/2.0/vector-search/endpoints"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListEndpointResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListEndpointResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java index 15429adcb..b49d5b65f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.vectorsearch; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of VectorSearchIndexes */ @Generated @@ -18,87 +19,137 @@ public VectorSearchIndexesImpl(ApiClient apiClient) { @Override public CreateVectorIndexResponse createIndex(CreateVectorIndexRequest request) { String path = "/api/2.0/vector-search/indexes"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateVectorIndexResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateVectorIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public DeleteDataVectorIndexResponse deleteDataVectorIndex(DeleteDataVectorIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/delete-data", request.getIndexName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, DeleteDataVectorIndexResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, DeleteDataVectorIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteIndex(DeleteIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s", request.getIndexName()); - Map headers = new HashMap<>(); - apiClient.DELETE(path, request, DeleteIndexResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, DeleteIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public VectorIndex getIndex(GetIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s", request.getIndexName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, VectorIndex.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, VectorIndex.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListVectorIndexesResponse listIndexes(ListIndexesRequest request) { String path = "/api/2.0/vector-search/indexes"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListVectorIndexesResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListVectorIndexesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public QueryVectorIndexResponse queryIndex(QueryVectorIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/query", request.getIndexName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, QueryVectorIndexResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QueryVectorIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public QueryVectorIndexResponse queryNextPage(QueryVectorIndexNextPageRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/query-next-page", request.getIndexName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, QueryVectorIndexResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, QueryVectorIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ScanVectorIndexResponse scanIndex(ScanVectorIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/scan", request.getIndexName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, ScanVectorIndexResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, ScanVectorIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void syncIndex(SyncIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/sync", request.getIndexName()); - Map headers = new HashMap<>(); - apiClient.POST(path, null, SyncIndexResponse.class, headers); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + apiClient.execute(req, SyncIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public UpsertDataVectorIndexResponse upsertDataVectorIndex(UpsertDataVectorIndexRequest request) { String path = String.format("/api/2.0/vector-search/indexes/%s/upsert-data", request.getIndexName()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, UpsertDataVectorIndexResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, UpsertDataVectorIndexResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java index 8837469e3..64260be1e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of GitCredentials */ @Generated @@ -18,42 +19,66 @@ public GitCredentialsImpl(ApiClient apiClient) { @Override public CreateCredentialsResponse create(CreateCredentialsRequest request) { String path = "/api/2.0/git-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateCredentialsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteCredentialsResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetCredentialsResponse get(GetCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetCredentialsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListCredentialsResponse list() { String path = "/api/2.0/git-credentials"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListCredentialsResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateCredentialsRequest request) { String path = String.format("/api/2.0/git-credentials/%s", request.getCredentialId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateCredentialsResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateCredentialsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java index 03def8f93..aa7b08443 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Repos */ @Generated @@ -18,26 +19,41 @@ public ReposImpl(ApiClient apiClient) { @Override public CreateRepoResponse create(CreateRepoRequest request) { String path = "/api/2.0/repos"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.POST(path, request, CreateRepoResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, CreateRepoResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void delete(DeleteRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - apiClient.DELETE(path, request, DeleteRepoResponse.class, headers); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, DeleteRepoResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetRepoResponse get(GetRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetRepoResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetRepoResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -45,51 +61,81 @@ public GetRepoPermissionLevelsResponse getPermissionLevels( GetRepoPermissionLevelsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s/permissionLevels", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetRepoPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetRepoPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RepoPermissions getPermissions(GetRepoPermissionsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RepoPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, RepoPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListReposResponse list(ListReposRequest request) { String path = "/api/2.0/repos"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListReposResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListReposResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RepoPermissions setPermissions(RepoPermissionsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, RepoPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RepoPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void update(UpdateRepoRequest request) { String path = String.format("/api/2.0/repos/%s", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.PATCH(path, request, UpdateRepoResponse.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, UpdateRepoResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public RepoPermissions updatePermissions(RepoPermissionsRequest request) { String path = String.format("/api/2.0/permissions/repos/%s", request.getRepoId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, RepoPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, RepoPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java index eb7d47a25..78a15a19c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Secrets */ @Generated @@ -18,94 +19,148 @@ public SecretsImpl(ApiClient apiClient) { @Override public void createScope(CreateScope request) { String path = "/api/2.0/secrets/scopes/create"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, CreateScopeResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, CreateScopeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteAcl(DeleteAcl request) { String path = "/api/2.0/secrets/acls/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteAclResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteAclResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteScope(DeleteScope request) { String path = "/api/2.0/secrets/scopes/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteScopeResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteScopeResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void deleteSecret(DeleteSecret request) { String path = "/api/2.0/secrets/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteSecretResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteSecretResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public AclItem getAcl(GetAclRequest request) { String path = "/api/2.0/secrets/acls/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AclItem.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, AclItem.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public GetSecretResponse getSecret(GetSecretRequest request) { String path = "/api/2.0/secrets/get"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetSecretResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetSecretResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListAclsResponse listAcls(ListAclsRequest request) { String path = "/api/2.0/secrets/acls/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListAclsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListAclsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListScopesResponse listScopes() { String path = "/api/2.0/secrets/scopes/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, ListScopesResponse.class, headers); + try { + Request req = new Request("GET", path); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListScopesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListSecretsResponse listSecrets(ListSecretsRequest request) { String path = "/api/2.0/secrets/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListSecretsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListSecretsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void putAcl(PutAcl request) { String path = "/api/2.0/secrets/acls/put"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, PutAclResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PutAclResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void putSecret(PutSecret request) { String path = "/api/2.0/secrets/put"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, PutSecretResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, PutSecretResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java index 70db9c8eb..477ceb249 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceImpl.java @@ -2,9 +2,10 @@ package com.databricks.sdk.service.workspace; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; /** Package-local implementation of Workspace */ @Generated @@ -18,18 +19,28 @@ public WorkspaceImpl(ApiClient apiClient) { @Override public void delete(Delete request) { String path = "/api/2.0/workspace/delete"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, DeleteResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, DeleteResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ExportResponse export(ExportRequest request) { String path = "/api/2.0/workspace/export"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ExportResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ExportResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -39,9 +50,14 @@ public GetWorkspaceObjectPermissionLevelsResponse getPermissionLevels( String.format( "/api/2.0/permissions/%s/%s/permissionLevels", request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, GetWorkspaceObjectPermissionLevelsResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GetWorkspaceObjectPermissionLevelsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -50,43 +66,68 @@ public WorkspaceObjectPermissions getPermissions(GetWorkspaceObjectPermissionsRe String.format( "/api/2.0/permissions/%s/%s", request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, WorkspaceObjectPermissions.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, WorkspaceObjectPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ObjectInfo getStatus(GetStatusRequest request) { String path = "/api/2.0/workspace/get-status"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ObjectInfo.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ObjectInfo.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void importContent(Import request) { String path = "/api/2.0/workspace/import"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, ImportResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, ImportResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public ListResponse list(ListWorkspaceRequest request) { String path = "/api/2.0/workspace/list"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ListResponse.class, headers); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override public void mkdirs(Mkdirs request) { String path = "/api/2.0/workspace/mkdirs"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, MkdirsResponse.class, headers); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + apiClient.execute(req, MkdirsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -95,10 +136,15 @@ public WorkspaceObjectPermissions setPermissions(WorkspaceObjectPermissionsReque String.format( "/api/2.0/permissions/%s/%s", request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PUT(path, request, WorkspaceObjectPermissions.class, headers); + try { + Request req = new Request("PUT", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceObjectPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } @Override @@ -107,9 +153,14 @@ public WorkspaceObjectPermissions updatePermissions(WorkspaceObjectPermissionsRe String.format( "/api/2.0/permissions/%s/%s", request.getWorkspaceObjectType(), request.getWorkspaceObjectId()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, WorkspaceObjectPermissions.class, headers); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, WorkspaceObjectPermissions.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java index 10a75a223..a9470c215 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.UnknownHostException; @@ -62,12 +63,15 @@ private ApiClient getApiClient(Request request, List responses } private void runApiClientTest( - ApiClient client, Request request, Class clazz, T expectedResponse) { + ApiClient client, Request request, Class clazz, T expectedResponse) + throws IOException { T response; if (request.getMethod().equals(Request.GET)) { - response = client.GET(request.getUri().getPath(), clazz, Collections.emptyMap()); + response = client.execute(new Request("GET", request.getUri().getPath()), clazz); } else if (request.getMethod().equals(Request.POST)) { - response = client.POST(request.getUri().getPath(), request, clazz, Collections.emptyMap()); + response = + client.execute( + new Request("POST", request.getUri().getPath(), client.serialize(request)), clazz); } else { throw new IllegalArgumentException("Unsupported method: " + request.getMethod()); } @@ -78,13 +82,15 @@ private void runApiClientTest( Request request, List responses, Class clazz, - T expectedResponse) { + T expectedResponse) + throws IOException { ApiClient client = getApiClient(request, responses); runApiClientTest(client, request, clazz, expectedResponse); } private void runFailingApiClientTest( - Request request, List responses, Class clazz, String expectedMessage) { + Request request, List responses, Class clazz, String expectedMessage) + throws IOException { DatabricksException exception = runFailingApiClientTest(request, responses, clazz, DatabricksException.class); assertEquals(exception.getMessage(), expectedMessage); @@ -96,11 +102,14 @@ private T runFailingApiClientTest( if (request.getMethod().equals(Request.GET)) { return assertThrows( exceptionClass, - () -> client.GET(request.getUri().getPath(), clazz, Collections.emptyMap())); + () -> client.execute(new Request("GET", request.getUri().getPath()), clazz)); } else if (request.getMethod().equals(Request.POST)) { return assertThrows( exceptionClass, - () -> client.POST(request.getUri().getPath(), request, clazz, Collections.emptyMap())); + () -> + client.execute( + new Request("POST", request.getUri().getPath(), client.serialize(request)), + clazz)); } else { throw new IllegalArgumentException("Unsupported method: " + request.getMethod()); } @@ -173,7 +182,7 @@ private SuccessfulResponse getTransientError(Request req, int statusCode, String } @Test - void happyPath() { + void happyPath() throws IOException { Request req = getBasicRequest(); runApiClientTest( req, @@ -183,7 +192,7 @@ void happyPath() { } @Test - void unknownKey() { + void unknownKey() throws IOException { Request req = getBasicRequest(); runApiClientTest( req, @@ -193,7 +202,7 @@ void unknownKey() { } @Test - void retry429() { + void retry429() throws IOException { Request req = getBasicRequest(); runApiClientTest( req, @@ -206,7 +215,7 @@ void retry429() { } @Test - void failAfterTooManyRetries() { + void failAfterTooManyRetries() throws IOException { Request req = getBasicRequest(); runFailingApiClientTest( req, @@ -221,7 +230,7 @@ void failAfterTooManyRetries() { } @Test - void checkExponentialBackoffForRetry() { + void checkExponentialBackoffForRetry() throws IOException { Request req = getBasicRequest(); ApiClient client = getApiClient(req, Collections.singletonList(getTooManyRequestsResponse(req))); @@ -234,7 +243,7 @@ void checkExponentialBackoffForRetry() { } @Test - void failIdempotentRequestAfterTooManyRetries() throws JsonProcessingException { + void failIdempotentRequestAfterTooManyRetries() throws IOException { Request req = getExampleIdempotentRequest(); runFailingApiClientTest( @@ -260,7 +269,7 @@ void failIdempotentRequestAfterTooManyRetries() throws JsonProcessingException { } @Test - void retryDatabricksApi12RetriableError() throws JsonProcessingException { + void retryDatabricksApi12RetriableError() throws IOException { Request req = getBasicRequest(); runApiClientTest( @@ -322,7 +331,7 @@ void errorDetails() throws JsonProcessingException { } @Test - void retryDatabricksRetriableError() throws JsonProcessingException { + void retryDatabricksRetriableError() throws IOException { Request req = getBasicRequest(); runApiClientTest( @@ -346,7 +355,7 @@ void retryDatabricksRetriableError() throws JsonProcessingException { } @Test - void retryUnknownHostException() { + void retryUnknownHostException() throws IOException { Request req = getBasicRequest(); runApiClientTest( @@ -379,7 +388,7 @@ public HeaderFactory configure(DatabricksConfig config) { } @Test - void populateHostFromCredentialProvider() { + void populateHostFromCredentialProvider() throws IOException { Request req = getBasicRequest(); DatabricksConfig config = new DatabricksConfig() @@ -413,8 +422,8 @@ void privateLinkRedirectBecomesPrivateLinkValidationError() throws MalformedURLE assertThrows( PrivateLinkValidationError.class, () -> - client.GET( - req.getUri().getPath(), MyEndpointResponse.class, Collections.emptyMap())); + client.execute( + new Request("GET", req.getUri().getPath()), MyEndpointResponse.class)); assertTrue(e.getMessage().contains("AWS PrivateLink")); } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/apps/AppsImplTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/apps/AppsImplTest.java new file mode 100644 index 000000000..7413c6c4c --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/apps/AppsImplTest.java @@ -0,0 +1,44 @@ +package com.databricks.sdk.service.apps; + +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.http.Request; +import java.io.IOException; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +public class AppsImplTest { + @Test + public void testCreateAppIncludesNoComputeParameter() throws IOException { + ApiClient apiClient = Mockito.mock(ApiClient.class); + String expectedPath = "/api/2.0/apps"; + when(apiClient.execute(any(), any())).thenReturn(null); + when(apiClient.serialize(any())).thenReturn(""); + + AppsService apps = new AppsImpl(apiClient); + apps.create(new CreateAppRequest().setNoCompute(true)); + + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("POST")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + if (!req.getQuery().containsKey("no_compute")) { + return false; + } + if (!req.getQuery().get("no_compute").get(0).equals("true")) { + return false; + } + return true; + }), + eq(App.class)); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java index ff9a8e634..e3964f651 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jobs/JobsImplTest.java @@ -5,6 +5,8 @@ import static org.mockito.Mockito.when; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.http.Request; +import java.io.IOException; import org.junit.jupiter.api.Test; import org.mockito.Mockito; @@ -16,74 +18,148 @@ public class JobsImplTest { */ @Test - public void testJobsCreateUsesApi2_1() { + public void testJobsCreateUsesApi2_1() throws IOException { ApiClient apiClient = Mockito.mock(ApiClient.class); String expectedPath = "/api/2.1/jobs/create"; - when(apiClient.POST(eq(expectedPath), any(), any(), any())).thenReturn(null); + when(apiClient.execute(any(), any())).thenReturn(null); + when(apiClient.serialize(any())).thenReturn(""); JobsService jobs = new JobsImpl(apiClient); jobs.create(new CreateJob()); - verify(apiClient).POST(eq(expectedPath), any(), any(), any()); + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("POST")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + return true; + }), + eq(CreateResponse.class)); } @Test - public void testJobsGetUsesApi2_1() { + public void testJobsGetUsesApi2_1() throws IOException { ApiClient apiClient = Mockito.mock(ApiClient.class); String expectedPath = "/api/2.1/jobs/get"; - when(apiClient.GET(eq(expectedPath), any(), any(), any())).thenReturn(null); + when(apiClient.execute(any(), any())).thenReturn(null); JobsService jobs = new JobsImpl(apiClient); jobs.get(new GetJobRequest()); - verify(apiClient).GET(eq(expectedPath), any(), any(), any()); + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("GET")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + return true; + }), + eq(Job.class)); } @Test - public void testJobsListUsesApi2_1() { + public void testJobsListUsesApi2_1() throws IOException { ApiClient apiClient = Mockito.mock(ApiClient.class); String expectedPath = "/api/2.1/jobs/list"; - when(apiClient.GET(eq(expectedPath), any(), any(), any())).thenReturn(null); + when(apiClient.execute(any(), any())).thenReturn(null); JobsService jobs = new JobsImpl(apiClient); jobs.list(new ListJobsRequest()); - verify(apiClient).GET(eq(expectedPath), any(), any(), any()); + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("GET")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + return true; + }), + eq(ListJobsResponse.class)); } @Test - public void testJobsUpdateUsesApi2_1() { + public void testJobsUpdateUsesApi2_1() throws IOException { ApiClient apiClient = Mockito.mock(ApiClient.class); String expectedPath = "/api/2.1/jobs/update"; - when(apiClient.POST(eq(expectedPath), any(), any(), any())).thenReturn(null); + when(apiClient.execute(any(), any())).thenReturn(null); JobsService jobs = new JobsImpl(apiClient); jobs.update(new UpdateJob()); - verify(apiClient).POST(eq(expectedPath), any(), any(), any()); + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("POST")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + return true; + }), + eq(UpdateResponse.class)); } @Test - public void testJobsResetUsesApi2_1() { + public void testJobsResetUsesApi2_1() throws IOException { ApiClient apiClient = Mockito.mock(ApiClient.class); String expectedPath = "/api/2.1/jobs/reset"; - when(apiClient.POST(eq(expectedPath), any(), any(), any())).thenReturn(null); + when(apiClient.execute(any(), any())).thenReturn(null); + when(apiClient.serialize(any())).thenReturn(""); JobsService jobs = new JobsImpl(apiClient); jobs.reset(new ResetJob()); - verify(apiClient).POST(eq(expectedPath), any(), any(), any()); + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("POST")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + return true; + }), + eq(ResetResponse.class)); } @Test - public void testJobsListRunsUsesApi2_1() { + public void testJobsListRunsUsesApi2_1() throws IOException { ApiClient apiClient = Mockito.mock(ApiClient.class); String expectedPath = "/api/2.1/jobs/runs/list"; - when(apiClient.GET(eq(expectedPath), any(), any(), any())).thenReturn(null); + when(apiClient.execute(any(), any())).thenReturn(null); JobsService jobs = new JobsImpl(apiClient); jobs.listRuns(new ListRunsRequest()); - verify(apiClient).GET(eq(expectedPath), any(), any(), any()); + verify(apiClient) + .execute( + argThat( + (Request req) -> { + if (!req.getMethod().equals("GET")) { + return false; + } + if (!req.getUrl().equals(expectedPath)) { + return false; + } + return true; + }), + eq(ListRunsResponse.class)); } }