diff --git a/internal/tiger/api/client.go b/internal/tiger/api/client.go index c0073974..86614a45 100644 --- a/internal/tiger/api/client.go +++ b/internal/tiger/api/client.go @@ -129,6 +129,27 @@ type ClientInterface interface { // PostProjectsProjectIdServicesServiceIdEnablePooler request PostProjectsProjectIdServicesServiceIdEnablePooler(ctx context.Context, projectId ProjectId, serviceId ServiceId, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetProjectsProjectIdServicesServiceIdFileimports request + GetProjectsProjectIdServicesServiceIdFileimports(ctx context.Context, projectId ProjectId, serviceId ServiceId, params *GetProjectsProjectIdServicesServiceIdFileimportsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostProjectsProjectIdServicesServiceIdFileimportsWithBody request with any body + PostProjectsProjectIdServicesServiceIdFileimportsWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostProjectsProjectIdServicesServiceIdFileimports(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody request with any body + PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetProjectsProjectIdServicesServiceIdFileimportsImportId request + GetProjectsProjectIdServicesServiceIdFileimportsImportId(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody request with any body + PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchProjectsProjectIdServicesServiceIdFileimportsImportId(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, body PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // PostProjectsProjectIdServicesServiceIdForkServiceWithBody request with any body PostProjectsProjectIdServicesServiceIdForkServiceWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -395,6 +416,102 @@ func (c *Client) PostProjectsProjectIdServicesServiceIdEnablePooler(ctx context. return c.Client.Do(req) } +func (c *Client) GetProjectsProjectIdServicesServiceIdFileimports(ctx context.Context, projectId ProjectId, serviceId ServiceId, params *GetProjectsProjectIdServicesServiceIdFileimportsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetProjectsProjectIdServicesServiceIdFileimportsRequest(c.Server, projectId, serviceId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostProjectsProjectIdServicesServiceIdFileimportsWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostProjectsProjectIdServicesServiceIdFileimportsRequestWithBody(c.Server, projectId, serviceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostProjectsProjectIdServicesServiceIdFileimports(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostProjectsProjectIdServicesServiceIdFileimportsRequest(c.Server, projectId, serviceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequestWithBody(c.Server, projectId, serviceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequest(c.Server, projectId, serviceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetProjectsProjectIdServicesServiceIdFileimportsImportId(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetProjectsProjectIdServicesServiceIdFileimportsImportIdRequest(c.Server, projectId, serviceId, importId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequestWithBody(c.Server, projectId, serviceId, importId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchProjectsProjectIdServicesServiceIdFileimportsImportId(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, body PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequest(c.Server, projectId, serviceId, importId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) PostProjectsProjectIdServicesServiceIdForkServiceWithBody(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(c.Server, projectId, serviceId, contentType, body) if err != nil { @@ -1212,19 +1329,8 @@ func NewPostProjectsProjectIdServicesServiceIdEnablePoolerRequest(server string, return req, nil } -// NewPostProjectsProjectIdServicesServiceIdForkServiceRequest calls the generic PostProjectsProjectIdServicesServiceIdForkService builder with application/json body -func NewPostProjectsProjectIdServicesServiceIdForkServiceRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdForkServiceJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(server, projectId, serviceId, "application/json", bodyReader) -} - -// NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdForkService with any type of body -func NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader) (*http.Request, error) { +// NewGetProjectsProjectIdServicesServiceIdFileimportsRequest generates requests for GetProjectsProjectIdServicesServiceIdFileimports +func NewGetProjectsProjectIdServicesServiceIdFileimportsRequest(server string, projectId ProjectId, serviceId ServiceId, params *GetProjectsProjectIdServicesServiceIdFileimportsParams) (*http.Request, error) { var err error var pathParam0 string @@ -1246,7 +1352,7 @@ func NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(server return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/forkService", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/projects/%s/services/%s/fileimports", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1256,47 +1362,138 @@ func NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(server return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } + if params != nil { + queryValues := queryURL.Query() - req.Header.Add("Content-Type", contentType) + if params.First != nil { - return req, nil -} + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "first", runtime.ParamLocationQuery, *params.First); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } -// NewGetProjectsProjectIdServicesServiceIdReplicaSetsRequest generates requests for GetProjectsProjectIdServicesServiceIdReplicaSets -func NewGetProjectsProjectIdServicesServiceIdReplicaSetsRequest(server string, projectId ProjectId, serviceId ServiceId) (*http.Request, error) { - var err error + } - var pathParam0 string + if params.Last != nil { - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) - if err != nil { - return nil, err - } + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "last", runtime.ParamLocationQuery, *params.Last); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } - var pathParam1 string + } - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) - if err != nil { - return nil, err - } + if params.After != nil { - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "after", runtime.ParamLocationQuery, *params.After); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + if params.Before != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "before", runtime.ParamLocationQuery, *params.Before); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.LabelSelector != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "label_selector", runtime.ParamLocationQuery, *params.LabelSelector); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.States != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "states", runtime.ParamLocationQuery, *params.States); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.S3KeyPrefix != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "s3_key_prefix", runtime.ParamLocationQuery, *params.S3KeyPrefix); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SourceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "source_type", runtime.ParamLocationQuery, *params.SourceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() } req, err := http.NewRequest("GET", queryURL.String(), nil) @@ -1307,19 +1504,19 @@ func NewGetProjectsProjectIdServicesServiceIdReplicaSetsRequest(server string, p return req, nil } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequest calls the generic PostProjectsProjectIdServicesServiceIdReplicaSets builder with application/json body -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdReplicaSetsJSONRequestBody) (*http.Request, error) { +// NewPostProjectsProjectIdServicesServiceIdFileimportsRequest calls the generic PostProjectsProjectIdServicesServiceIdFileimports builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdFileimportsRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody(server, projectId, serviceId, "application/json", bodyReader) + return NewPostProjectsProjectIdServicesServiceIdFileimportsRequestWithBody(server, projectId, serviceId, "application/json", bodyReader) } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdReplicaSets with any type of body -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader) (*http.Request, error) { +// NewPostProjectsProjectIdServicesServiceIdFileimportsRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdFileimports with any type of body +func NewPostProjectsProjectIdServicesServiceIdFileimportsRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -1341,7 +1538,7 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody(server return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets", pathParam0, pathParam1) + operationPath := fmt.Sprintf("/projects/%s/services/%s/fileimports", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1361,8 +1558,19 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody(server return req, nil } -// NewDeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdRequest generates requests for DeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetId -func NewDeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId) (*http.Request, error) { +// NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequest calls the generic PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequestWithBody(server, projectId, serviceId, "application/json", bodyReader) +} + +// NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl with any type of body +func NewPostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -1379,19 +1587,12 @@ func NewDeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdRequest(s return nil, err } - var pathParam2 string - - pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) - if err != nil { - return nil, err - } - serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s", pathParam0, pathParam1, pathParam2) + operationPath := fmt.Sprintf("/projects/%s/services/%s/fileimports/presigned-url", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1401,16 +1602,18 @@ func NewDeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdRequest(s return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err } + req.Header.Add("Content-Type", contentType) + return req, nil } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePoolerRequest generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePooler -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePoolerRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId) (*http.Request, error) { +// NewGetProjectsProjectIdServicesServiceIdFileimportsImportIdRequest generates requests for GetProjectsProjectIdServicesServiceIdFileimportsImportId +func NewGetProjectsProjectIdServicesServiceIdFileimportsImportIdRequest(server string, projectId ProjectId, serviceId ServiceId, importId FileImportId) (*http.Request, error) { var err error var pathParam0 string @@ -1429,7 +1632,7 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePool var pathParam2 string - pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "import_id", runtime.ParamLocationPath, importId) if err != nil { return nil, err } @@ -1439,7 +1642,7 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePool return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/disablePooler", pathParam0, pathParam1, pathParam2) + operationPath := fmt.Sprintf("/projects/%s/services/%s/fileimports/%s", pathParam0, pathParam1, pathParam2) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1449,7 +1652,7 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePool return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), nil) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } @@ -1457,8 +1660,19 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePool return req, nil } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoolerRequest generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePooler -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoolerRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId) (*http.Request, error) { +// NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequest calls the generic PatchProjectsProjectIdServicesServiceIdFileimportsImportId builder with application/json body +func NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequest(server string, projectId ProjectId, serviceId ServiceId, importId FileImportId, body PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequestWithBody(server, projectId, serviceId, importId, "application/json", bodyReader) +} + +// NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequestWithBody generates requests for PatchProjectsProjectIdServicesServiceIdFileimportsImportId with any type of body +func NewPatchProjectsProjectIdServicesServiceIdFileimportsImportIdRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, importId FileImportId, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -1477,7 +1691,7 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoole var pathParam2 string - pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "import_id", runtime.ParamLocationPath, importId) if err != nil { return nil, err } @@ -1487,7 +1701,7 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoole return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/enablePooler", pathParam0, pathParam1, pathParam2) + operationPath := fmt.Sprintf("/projects/%s/services/%s/fileimports/%s", pathParam0, pathParam1, pathParam2) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1497,27 +1711,29 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoole return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), nil) + req, err := http.NewRequest("PATCH", queryURL.String(), body) if err != nil { return nil, err } + req.Header.Add("Content-Type", contentType) + return req, nil } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequest calls the generic PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResize builder with application/json body -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, body PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeJSONRequestBody) (*http.Request, error) { +// NewPostProjectsProjectIdServicesServiceIdForkServiceRequest calls the generic PostProjectsProjectIdServicesServiceIdForkService builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdForkServiceRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdForkServiceJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequestWithBody(server, projectId, serviceId, replicaSetId, "application/json", bodyReader) + return NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(server, projectId, serviceId, "application/json", bodyReader) } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResize with any type of body -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, contentType string, body io.Reader) (*http.Request, error) { +// NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdForkService with any type of body +func NewPostProjectsProjectIdServicesServiceIdForkServiceRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string @@ -1534,19 +1750,12 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeReque return nil, err } - var pathParam2 string - - pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) - if err != nil { - return nil, err - } - serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/resize", pathParam0, pathParam1, pathParam2) + operationPath := fmt.Sprintf("/projects/%s/services/%s/forkService", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1566,19 +1775,8 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeReque return req, nil } -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequest calls the generic PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironment builder with application/json body -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, body PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequestWithBody(server, projectId, serviceId, replicaSetId, "application/json", bodyReader) -} - -// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironment with any type of body -func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, contentType string, body io.Reader) (*http.Request, error) { +// NewGetProjectsProjectIdServicesServiceIdReplicaSetsRequest generates requests for GetProjectsProjectIdServicesServiceIdReplicaSets +func NewGetProjectsProjectIdServicesServiceIdReplicaSetsRequest(server string, projectId ProjectId, serviceId ServiceId) (*http.Request, error) { var err error var pathParam0 string @@ -1595,19 +1793,12 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironm return nil, err } - var pathParam2 string - - pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) - if err != nil { - return nil, err - } - serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/setEnvironment", pathParam0, pathParam1, pathParam2) + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -1617,18 +1808,336 @@ func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironm return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - return req, nil } -// NewPostProjectsProjectIdServicesServiceIdResizeRequest calls the generic PostProjectsProjectIdServicesServiceIdResize builder with application/json body -func NewPostProjectsProjectIdServicesServiceIdResizeRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdResizeJSONRequestBody) (*http.Request, error) { +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequest calls the generic PostProjectsProjectIdServicesServiceIdReplicaSets builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdReplicaSetsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody(server, projectId, serviceId, "application/json", bodyReader) +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdReplicaSets with any type of body +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdRequest generates requests for DeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetId +func NewDeleteProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) + if err != nil { + return nil, err + } + + var pathParam2 string + + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s", pathParam0, pathParam1, pathParam2) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePoolerRequest generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePooler +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdDisablePoolerRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) + if err != nil { + return nil, err + } + + var pathParam2 string + + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/disablePooler", pathParam0, pathParam1, pathParam2) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoolerRequest generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePooler +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdEnablePoolerRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) + if err != nil { + return nil, err + } + + var pathParam2 string + + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/enablePooler", pathParam0, pathParam1, pathParam2) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequest calls the generic PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResize builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, body PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequestWithBody(server, projectId, serviceId, replicaSetId, "application/json", bodyReader) +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResize with any type of body +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdResizeRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) + if err != nil { + return nil, err + } + + var pathParam2 string + + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/resize", pathParam0, pathParam1, pathParam2) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequest calls the generic PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironment builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequest(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, body PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequestWithBody(server, projectId, serviceId, replicaSetId, "application/json", bodyReader) +} + +// NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequestWithBody generates requests for PostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironment with any type of body +func NewPostProjectsProjectIdServicesServiceIdReplicaSetsReplicaSetIdSetEnvironmentRequestWithBody(server string, projectId ProjectId, serviceId ServiceId, replicaSetId ReplicaSetId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "project_id", runtime.ParamLocationPath, projectId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "service_id", runtime.ParamLocationPath, serviceId) + if err != nil { + return nil, err + } + + var pathParam2 string + + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "replica_set_id", runtime.ParamLocationPath, replicaSetId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/projects/%s/services/%s/replicaSets/%s/setEnvironment", pathParam0, pathParam1, pathParam2) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostProjectsProjectIdServicesServiceIdResizeRequest calls the generic PostProjectsProjectIdServicesServiceIdResize builder with application/json body +func NewPostProjectsProjectIdServicesServiceIdResizeRequest(server string, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdResizeJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { @@ -2334,6 +2843,27 @@ type ClientWithResponsesInterface interface { // PostProjectsProjectIdServicesServiceIdEnablePoolerWithResponse request PostProjectsProjectIdServicesServiceIdEnablePoolerWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdEnablePoolerResponse, error) + // GetProjectsProjectIdServicesServiceIdFileimportsWithResponse request + GetProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, params *GetProjectsProjectIdServicesServiceIdFileimportsParams, reqEditors ...RequestEditorFn) (*GetProjectsProjectIdServicesServiceIdFileimportsResponse, error) + + // PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse request with any body + PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) + + PostProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) + + // PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse request with any body + PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) + + PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) + + // GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse request + GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, reqEditors ...RequestEditorFn) (*GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) + + // PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse request with any body + PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) + + PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, body PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) + // PostProjectsProjectIdServicesServiceIdForkServiceWithBodyWithResponse request with any body PostProjectsProjectIdServicesServiceIdForkServiceWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdForkServiceResponse, error) @@ -2420,15 +2950,129 @@ type ClientWithResponsesInterface interface { PostProjectsProjectIdVpcsVpcIdRenameWithResponse(ctx context.Context, projectId ProjectId, vpcId VPCId, body PostProjectsProjectIdVpcsVpcIdRenameJSONRequestBody, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdVpcsVpcIdRenameResponse, error) } -type PostAnalyticsIdentifyResponse struct { +type PostAnalyticsIdentifyResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AnalyticsResponse + JSON4XX *ClientError +} + +// Status returns HTTPResponse.Status +func (r PostAnalyticsIdentifyResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAnalyticsIdentifyResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAnalyticsTrackResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *AnalyticsResponse + JSON4XX *ClientError +} + +// Status returns HTTPResponse.Status +func (r PostAnalyticsTrackResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAnalyticsTrackResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetProjectsProjectIdServicesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]Service + JSON4XX *ClientError +} + +// Status returns HTTPResponse.Status +func (r GetProjectsProjectIdServicesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetProjectsProjectIdServicesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostProjectsProjectIdServicesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON202 *Service + JSON4XX *ClientError +} + +// Status returns HTTPResponse.Status +func (r PostProjectsProjectIdServicesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostProjectsProjectIdServicesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteProjectsProjectIdServicesServiceIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON4XX *ClientError +} + +// Status returns HTTPResponse.Status +func (r DeleteProjectsProjectIdServicesServiceIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteProjectsProjectIdServicesServiceIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetProjectsProjectIdServicesServiceIdResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *AnalyticsResponse + JSON200 *Service JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostAnalyticsIdentifyResponse) Status() string { +func (r GetProjectsProjectIdServicesServiceIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2436,22 +3080,22 @@ func (r PostAnalyticsIdentifyResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostAnalyticsIdentifyResponse) StatusCode() int { +func (r GetProjectsProjectIdServicesServiceIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostAnalyticsTrackResponse struct { +type PostProjectsProjectIdServicesServiceIdAttachToVPCResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *AnalyticsResponse + JSON202 *SuccessMessage JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostAnalyticsTrackResponse) Status() string { +func (r PostProjectsProjectIdServicesServiceIdAttachToVPCResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2459,22 +3103,22 @@ func (r PostAnalyticsTrackResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostAnalyticsTrackResponse) StatusCode() int { +func (r PostProjectsProjectIdServicesServiceIdAttachToVPCResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetProjectsProjectIdServicesResponse struct { +type PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *[]Service + JSON202 *SuccessMessage JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r GetProjectsProjectIdServicesResponse) Status() string { +func (r PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2482,22 +3126,22 @@ func (r GetProjectsProjectIdServicesResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetProjectsProjectIdServicesResponse) StatusCode() int { +func (r PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostProjectsProjectIdServicesResponse struct { +type PostProjectsProjectIdServicesServiceIdDisablePoolerResponse struct { Body []byte HTTPResponse *http.Response - JSON202 *Service + JSON200 *SuccessMessage JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostProjectsProjectIdServicesResponse) Status() string { +func (r PostProjectsProjectIdServicesServiceIdDisablePoolerResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2505,21 +3149,22 @@ func (r PostProjectsProjectIdServicesResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostProjectsProjectIdServicesResponse) StatusCode() int { +func (r PostProjectsProjectIdServicesServiceIdDisablePoolerResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type DeleteProjectsProjectIdServicesServiceIdResponse struct { +type PostProjectsProjectIdServicesServiceIdEnablePoolerResponse struct { Body []byte HTTPResponse *http.Response + JSON200 *SuccessMessage JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r DeleteProjectsProjectIdServicesServiceIdResponse) Status() string { +func (r PostProjectsProjectIdServicesServiceIdEnablePoolerResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2527,22 +3172,22 @@ func (r DeleteProjectsProjectIdServicesServiceIdResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteProjectsProjectIdServicesServiceIdResponse) StatusCode() int { +func (r PostProjectsProjectIdServicesServiceIdEnablePoolerResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetProjectsProjectIdServicesServiceIdResponse struct { +type GetProjectsProjectIdServicesServiceIdFileimportsResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *Service + JSON200 *ListFileImportResponse JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r GetProjectsProjectIdServicesServiceIdResponse) Status() string { +func (r GetProjectsProjectIdServicesServiceIdFileimportsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2550,22 +3195,25 @@ func (r GetProjectsProjectIdServicesServiceIdResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetProjectsProjectIdServicesServiceIdResponse) StatusCode() int { +func (r GetProjectsProjectIdServicesServiceIdFileimportsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostProjectsProjectIdServicesServiceIdAttachToVPCResponse struct { +type PostProjectsProjectIdServicesServiceIdFileimportsResponse struct { Body []byte HTTPResponse *http.Response - JSON202 *SuccessMessage - JSON4XX *ClientError + JSON201 *struct { + // Error Error message if the creation failed. + Error *string `json:"error,omitempty"` + } + JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostProjectsProjectIdServicesServiceIdAttachToVPCResponse) Status() string { +func (r PostProjectsProjectIdServicesServiceIdFileimportsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2573,22 +3221,22 @@ func (r PostProjectsProjectIdServicesServiceIdAttachToVPCResponse) Status() stri } // StatusCode returns HTTPResponse.StatusCode -func (r PostProjectsProjectIdServicesServiceIdAttachToVPCResponse) StatusCode() int { +func (r PostProjectsProjectIdServicesServiceIdFileimportsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse struct { +type PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse struct { Body []byte HTTPResponse *http.Response - JSON202 *SuccessMessage + JSON200 *GeneratePresignedURLResponse JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse) Status() string { +func (r PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2596,22 +3244,24 @@ func (r PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse) Status() st } // StatusCode returns HTTPResponse.StatusCode -func (r PostProjectsProjectIdServicesServiceIdDetachFromVPCResponse) StatusCode() int { +func (r PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostProjectsProjectIdServicesServiceIdDisablePoolerResponse struct { +type GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SuccessMessage - JSON4XX *ClientError + JSON200 *struct { + FileImport FileImport `json:"file_import"` + } + JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostProjectsProjectIdServicesServiceIdDisablePoolerResponse) Status() string { +func (r GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2619,22 +3269,25 @@ func (r PostProjectsProjectIdServicesServiceIdDisablePoolerResponse) Status() st } // StatusCode returns HTTPResponse.StatusCode -func (r PostProjectsProjectIdServicesServiceIdDisablePoolerResponse) StatusCode() int { +func (r GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostProjectsProjectIdServicesServiceIdEnablePoolerResponse struct { +type PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SuccessMessage - JSON4XX *ClientError + JSON200 *struct { + // Error Error message if the update failed. + Error *string `json:"error,omitempty"` + } + JSON4XX *ClientError } // Status returns HTTPResponse.Status -func (r PostProjectsProjectIdServicesServiceIdEnablePoolerResponse) Status() string { +func (r PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -2642,7 +3295,7 @@ func (r PostProjectsProjectIdServicesServiceIdEnablePoolerResponse) Status() str } // StatusCode returns HTTPResponse.StatusCode -func (r PostProjectsProjectIdServicesServiceIdEnablePoolerResponse) StatusCode() int { +func (r PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -3256,6 +3909,75 @@ func (c *ClientWithResponses) PostProjectsProjectIdServicesServiceIdEnablePooler return ParsePostProjectsProjectIdServicesServiceIdEnablePoolerResponse(rsp) } +// GetProjectsProjectIdServicesServiceIdFileimportsWithResponse request returning *GetProjectsProjectIdServicesServiceIdFileimportsResponse +func (c *ClientWithResponses) GetProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, params *GetProjectsProjectIdServicesServiceIdFileimportsParams, reqEditors ...RequestEditorFn) (*GetProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + rsp, err := c.GetProjectsProjectIdServicesServiceIdFileimports(ctx, projectId, serviceId, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetProjectsProjectIdServicesServiceIdFileimportsResponse(rsp) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse request with arbitrary body returning *PostProjectsProjectIdServicesServiceIdFileimportsResponse +func (c *ClientWithResponses) PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + rsp, err := c.PostProjectsProjectIdServicesServiceIdFileimportsWithBody(ctx, projectId, serviceId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostProjectsProjectIdServicesServiceIdFileimportsResponse(rsp) +} + +func (c *ClientWithResponses) PostProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + rsp, err := c.PostProjectsProjectIdServicesServiceIdFileimports(ctx, projectId, serviceId, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostProjectsProjectIdServicesServiceIdFileimportsResponse(rsp) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse request with arbitrary body returning *PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse +func (c *ClientWithResponses) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) { + rsp, err := c.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody(ctx, projectId, serviceId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse(rsp) +} + +func (c *ClientWithResponses) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, body PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) { + rsp, err := c.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl(ctx, projectId, serviceId, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse(rsp) +} + +// GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse request returning *GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse +func (c *ClientWithResponses) GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, reqEditors ...RequestEditorFn) (*GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + rsp, err := c.GetProjectsProjectIdServicesServiceIdFileimportsImportId(ctx, projectId, serviceId, importId, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse(rsp) +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse request with arbitrary body returning *PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse +func (c *ClientWithResponses) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + rsp, err := c.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody(ctx, projectId, serviceId, importId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse(rsp) +} + +func (c *ClientWithResponses) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, importId FileImportId, body PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + rsp, err := c.PatchProjectsProjectIdServicesServiceIdFileimportsImportId(ctx, projectId, serviceId, importId, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse(rsp) +} + // PostProjectsProjectIdServicesServiceIdForkServiceWithBodyWithResponse request with arbitrary body returning *PostProjectsProjectIdServicesServiceIdForkServiceResponse func (c *ClientWithResponses) PostProjectsProjectIdServicesServiceIdForkServiceWithBodyWithResponse(ctx context.Context, projectId ProjectId, serviceId ServiceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostProjectsProjectIdServicesServiceIdForkServiceResponse, error) { rsp, err := c.PostProjectsProjectIdServicesServiceIdForkServiceWithBody(ctx, projectId, serviceId, contentType, body, reqEditors...) @@ -3856,6 +4578,179 @@ func ParsePostProjectsProjectIdServicesServiceIdEnablePoolerResponse(rsp *http.R return response, nil } +// ParseGetProjectsProjectIdServicesServiceIdFileimportsResponse parses an HTTP response from a GetProjectsProjectIdServicesServiceIdFileimportsWithResponse call +func ParseGetProjectsProjectIdServicesServiceIdFileimportsResponse(rsp *http.Response) (*GetProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetProjectsProjectIdServicesServiceIdFileimportsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ListFileImportResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest ClientError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + } + + return response, nil +} + +// ParsePostProjectsProjectIdServicesServiceIdFileimportsResponse parses an HTTP response from a PostProjectsProjectIdServicesServiceIdFileimportsWithResponse call +func ParsePostProjectsProjectIdServicesServiceIdFileimportsResponse(rsp *http.Response) (*PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PostProjectsProjectIdServicesServiceIdFileimportsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: + var dest struct { + // Error Error message if the creation failed. + Error *string `json:"error,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON201 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest ClientError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + } + + return response, nil +} + +// ParsePostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse parses an HTTP response from a PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse call +func ParsePostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse(rsp *http.Response) (*PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest GeneratePresignedURLResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest ClientError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + } + + return response, nil +} + +// ParseGetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse parses an HTTP response from a GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse call +func ParseGetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse(rsp *http.Response) (*GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + FileImport FileImport `json:"file_import"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest ClientError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + } + + return response, nil +} + +// ParsePatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse parses an HTTP response from a PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse call +func ParsePatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse(rsp *http.Response) (*PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Error Error message if the update failed. + Error *string `json:"error,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest ClientError + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + } + + return response, nil +} + // ParsePostProjectsProjectIdServicesServiceIdForkServiceResponse parses an HTTP response from a PostProjectsProjectIdServicesServiceIdForkServiceWithResponse call func ParsePostProjectsProjectIdServicesServiceIdForkServiceResponse(rsp *http.Response) (*PostProjectsProjectIdServicesServiceIdForkServiceResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) diff --git a/internal/tiger/api/mocks/mock_client.go b/internal/tiger/api/mocks/mock_client.go index 33b0f492..49a66cb1 100644 --- a/internal/tiger/api/mocks/mock_client.go +++ b/internal/tiger/api/mocks/mock_client.go @@ -202,6 +202,46 @@ func (mr *MockClientInterfaceMockRecorder) GetProjectsProjectIdServicesServiceId return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdServicesServiceId", reflect.TypeOf((*MockClientInterface)(nil).GetProjectsProjectIdServicesServiceId), varargs...) } +// GetProjectsProjectIdServicesServiceIdFileimports mocks base method. +func (m *MockClientInterface) GetProjectsProjectIdServicesServiceIdFileimports(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, params *api.GetProjectsProjectIdServicesServiceIdFileimportsParams, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, params} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetProjectsProjectIdServicesServiceIdFileimports", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProjectsProjectIdServicesServiceIdFileimports indicates an expected call of GetProjectsProjectIdServicesServiceIdFileimports. +func (mr *MockClientInterfaceMockRecorder) GetProjectsProjectIdServicesServiceIdFileimports(ctx, projectId, serviceId, params any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, params}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdServicesServiceIdFileimports", reflect.TypeOf((*MockClientInterface)(nil).GetProjectsProjectIdServicesServiceIdFileimports), varargs...) +} + +// GetProjectsProjectIdServicesServiceIdFileimportsImportId mocks base method. +func (m *MockClientInterface) GetProjectsProjectIdServicesServiceIdFileimportsImportId(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, importId api.FileImportId, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, importId} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetProjectsProjectIdServicesServiceIdFileimportsImportId", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProjectsProjectIdServicesServiceIdFileimportsImportId indicates an expected call of GetProjectsProjectIdServicesServiceIdFileimportsImportId. +func (mr *MockClientInterfaceMockRecorder) GetProjectsProjectIdServicesServiceIdFileimportsImportId(ctx, projectId, serviceId, importId any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, importId}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdServicesServiceIdFileimportsImportId", reflect.TypeOf((*MockClientInterface)(nil).GetProjectsProjectIdServicesServiceIdFileimportsImportId), varargs...) +} + // GetProjectsProjectIdServicesServiceIdReplicaSets mocks base method. func (m *MockClientInterface) GetProjectsProjectIdServicesServiceIdReplicaSets(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -302,6 +342,46 @@ func (mr *MockClientInterfaceMockRecorder) GetProjectsProjectIdVpcsVpcIdPeerings return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdVpcsVpcIdPeeringsPeeringId", reflect.TypeOf((*MockClientInterface)(nil).GetProjectsProjectIdVpcsVpcIdPeeringsPeeringId), varargs...) } +// PatchProjectsProjectIdServicesServiceIdFileimportsImportId mocks base method. +func (m *MockClientInterface) PatchProjectsProjectIdServicesServiceIdFileimportsImportId(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, importId api.FileImportId, body api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, importId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PatchProjectsProjectIdServicesServiceIdFileimportsImportId", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportId indicates an expected call of PatchProjectsProjectIdServicesServiceIdFileimportsImportId. +func (mr *MockClientInterfaceMockRecorder) PatchProjectsProjectIdServicesServiceIdFileimportsImportId(ctx, projectId, serviceId, importId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, importId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PatchProjectsProjectIdServicesServiceIdFileimportsImportId", reflect.TypeOf((*MockClientInterface)(nil).PatchProjectsProjectIdServicesServiceIdFileimportsImportId), varargs...) +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody mocks base method. +func (m *MockClientInterface) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, importId api.FileImportId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, importId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody indicates an expected call of PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody. +func (mr *MockClientInterfaceMockRecorder) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody(ctx, projectId, serviceId, importId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, importId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody", reflect.TypeOf((*MockClientInterface)(nil).PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBody), varargs...) +} + // PostAnalyticsIdentify mocks base method. func (m *MockClientInterface) PostAnalyticsIdentify(ctx context.Context, body api.PostAnalyticsIdentifyJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -522,6 +602,86 @@ func (mr *MockClientInterfaceMockRecorder) PostProjectsProjectIdServicesServiceI return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdEnablePooler", reflect.TypeOf((*MockClientInterface)(nil).PostProjectsProjectIdServicesServiceIdEnablePooler), varargs...) } +// PostProjectsProjectIdServicesServiceIdFileimports mocks base method. +func (m *MockClientInterface) PostProjectsProjectIdServicesServiceIdFileimports(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, body api.PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimports", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimports indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimports. +func (mr *MockClientInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimports(ctx, projectId, serviceId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimports", reflect.TypeOf((*MockClientInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimports), varargs...) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl mocks base method. +func (m *MockClientInterface) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, body api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl. +func (mr *MockClientInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl(ctx, projectId, serviceId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl", reflect.TypeOf((*MockClientInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl), varargs...) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody mocks base method. +func (m *MockClientInterface) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody. +func (mr *MockClientInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody(ctx, projectId, serviceId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody", reflect.TypeOf((*MockClientInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBody), varargs...) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithBody mocks base method. +func (m *MockClientInterface) PostProjectsProjectIdServicesServiceIdFileimportsWithBody(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*http.Response, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsWithBody", varargs...) + ret0, _ := ret[0].(*http.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithBody indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsWithBody. +func (mr *MockClientInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsWithBody(ctx, projectId, serviceId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsWithBody", reflect.TypeOf((*MockClientInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsWithBody), varargs...) +} + // PostProjectsProjectIdServicesServiceIdForkService mocks base method. func (m *MockClientInterface) PostProjectsProjectIdServicesServiceIdForkService(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, body api.PostProjectsProjectIdServicesServiceIdForkServiceJSONRequestBody, reqEditors ...api.RequestEditorFn) (*http.Response, error) { m.ctrl.T.Helper() @@ -1126,6 +1286,46 @@ func (mr *MockClientWithResponsesInterfaceMockRecorder) DeleteProjectsProjectIdV return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteProjectsProjectIdVpcsVpcIdWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).DeleteProjectsProjectIdVpcsVpcIdWithResponse), varargs...) } +// GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, importId api.FileImportId, reqEditors ...api.RequestEditorFn) (*api.GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, importId} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse", varargs...) + ret0, _ := ret[0].(*api.GetProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse indicates an expected call of GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx, projectId, serviceId, importId any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, importId}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse), varargs...) +} + +// GetProjectsProjectIdServicesServiceIdFileimportsWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) GetProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, params *api.GetProjectsProjectIdServicesServiceIdFileimportsParams, reqEditors ...api.RequestEditorFn) (*api.GetProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, params} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetProjectsProjectIdServicesServiceIdFileimportsWithResponse", varargs...) + ret0, _ := ret[0].(*api.GetProjectsProjectIdServicesServiceIdFileimportsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProjectsProjectIdServicesServiceIdFileimportsWithResponse indicates an expected call of GetProjectsProjectIdServicesServiceIdFileimportsWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) GetProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx, projectId, serviceId, params any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, params}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdServicesServiceIdFileimportsWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).GetProjectsProjectIdServicesServiceIdFileimportsWithResponse), varargs...) +} + // GetProjectsProjectIdServicesServiceIdReplicaSetsWithResponse mocks base method. func (m *MockClientWithResponsesInterface) GetProjectsProjectIdServicesServiceIdReplicaSetsWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, reqEditors ...api.RequestEditorFn) (*api.GetProjectsProjectIdServicesServiceIdReplicaSetsResponse, error) { m.ctrl.T.Helper() @@ -1266,6 +1466,46 @@ func (mr *MockClientWithResponsesInterfaceMockRecorder) GetProjectsProjectIdVpcs return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsProjectIdVpcsWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).GetProjectsProjectIdVpcsWithResponse), varargs...) } +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, importId api.FileImportId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, importId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse", varargs...) + ret0, _ := ret[0].(*api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse indicates an expected call of PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse(ctx, projectId, serviceId, importId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, importId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithBodyWithResponse), varargs...) +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, importId api.FileImportId, body api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, importId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse", varargs...) + ret0, _ := ret[0].(*api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse indicates an expected call of PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx, projectId, serviceId, importId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, importId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse), varargs...) +} + // PostAnalyticsIdentifyWithBodyWithResponse mocks base method. func (m *MockClientWithResponsesInterface) PostAnalyticsIdentifyWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.PostAnalyticsIdentifyResponse, error) { m.ctrl.T.Helper() @@ -1466,6 +1706,86 @@ func (mr *MockClientWithResponsesInterfaceMockRecorder) PostProjectsProjectIdSer return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdEnablePoolerWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PostProjectsProjectIdServicesServiceIdEnablePoolerWithResponse), varargs...) } +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse", varargs...) + ret0, _ := ret[0].(*api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse(ctx, projectId, serviceId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithBodyWithResponse), varargs...) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, body api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse", varargs...) + ret0, _ := ret[0].(*api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse(ctx, projectId, serviceId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse), varargs...) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, contentType, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse", varargs...) + ret0, _ := ret[0].(*api.PostProjectsProjectIdServicesServiceIdFileimportsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse(ctx, projectId, serviceId, contentType, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, contentType, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsWithBodyWithResponse), varargs...) +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithResponse mocks base method. +func (m *MockClientWithResponsesInterface) PostProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, body api.PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody, reqEditors ...api.RequestEditorFn) (*api.PostProjectsProjectIdServicesServiceIdFileimportsResponse, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, projectId, serviceId, body} + for _, a := range reqEditors { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "PostProjectsProjectIdServicesServiceIdFileimportsWithResponse", varargs...) + ret0, _ := ret[0].(*api.PostProjectsProjectIdServicesServiceIdFileimportsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PostProjectsProjectIdServicesServiceIdFileimportsWithResponse indicates an expected call of PostProjectsProjectIdServicesServiceIdFileimportsWithResponse. +func (mr *MockClientWithResponsesInterfaceMockRecorder) PostProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx, projectId, serviceId, body any, reqEditors ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, projectId, serviceId, body}, reqEditors...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PostProjectsProjectIdServicesServiceIdFileimportsWithResponse", reflect.TypeOf((*MockClientWithResponsesInterface)(nil).PostProjectsProjectIdServicesServiceIdFileimportsWithResponse), varargs...) +} + // PostProjectsProjectIdServicesServiceIdForkServiceWithBodyWithResponse mocks base method. func (m *MockClientWithResponsesInterface) PostProjectsProjectIdServicesServiceIdForkServiceWithBodyWithResponse(ctx context.Context, projectId api.ProjectId, serviceId api.ServiceId, contentType string, body io.Reader, reqEditors ...api.RequestEditorFn) (*api.PostProjectsProjectIdServicesServiceIdForkServiceResponse, error) { m.ctrl.T.Helper() diff --git a/internal/tiger/api/types.go b/internal/tiger/api/types.go index 67b10f1b..58a5afcc 100644 --- a/internal/tiger/api/types.go +++ b/internal/tiger/api/types.go @@ -9,17 +9,17 @@ import ( // Defines values for DeployStatus. const ( - CONFIGURING DeployStatus = "CONFIGURING" - DELETED DeployStatus = "DELETED" - DELETING DeployStatus = "DELETING" - OPTIMIZING DeployStatus = "OPTIMIZING" - PAUSED DeployStatus = "PAUSED" - PAUSING DeployStatus = "PAUSING" - QUEUED DeployStatus = "QUEUED" - READY DeployStatus = "READY" - RESUMING DeployStatus = "RESUMING" - UNSTABLE DeployStatus = "UNSTABLE" - UPGRADING DeployStatus = "UPGRADING" + DeployStatusCONFIGURING DeployStatus = "CONFIGURING" + DeployStatusDELETED DeployStatus = "DELETED" + DeployStatusDELETING DeployStatus = "DELETING" + DeployStatusOPTIMIZING DeployStatus = "OPTIMIZING" + DeployStatusPAUSED DeployStatus = "PAUSED" + DeployStatusPAUSING DeployStatus = "PAUSING" + DeployStatusQUEUED DeployStatus = "QUEUED" + DeployStatusREADY DeployStatus = "READY" + DeployStatusRESUMING DeployStatus = "RESUMING" + DeployStatusUNSTABLE DeployStatus = "UNSTABLE" + DeployStatusUPGRADING DeployStatus = "UPGRADING" ) // Defines values for EnvironmentTag. @@ -28,6 +28,32 @@ const ( EnvironmentTagPROD EnvironmentTag = "PROD" ) +// Defines values for FileImportDefinitionType. +const ( + CSV FileImportDefinitionType = "CSV" + PARQUET FileImportDefinitionType = "PARQUET" +) + +// Defines values for FileImportSourceType. +const ( + FileImportSourceTypeHTTP FileImportSourceType = "HTTP" + FileImportSourceTypeINTERNAL FileImportSourceType = "INTERNAL" + FileImportSourceTypeS3 FileImportSourceType = "S3" +) + +// Defines values for FileImportState. +const ( + FileImportStateCANCELLED FileImportState = "CANCELLED" + FileImportStateFAILURE FileImportState = "FAILURE" + FileImportStateINQUEUE FileImportState = "IN_QUEUE" + FileImportStatePAUSED FileImportState = "PAUSED" + FileImportStateRETRYING FileImportState = "RETRYING" + FileImportStateRUNNING FileImportState = "RUNNING" + FileImportStateSKIPPED FileImportState = "SKIPPED" + FileImportStateSUCCESS FileImportState = "SUCCESS" + FileImportStateWAITINGFORUPLOAD FileImportState = "WAITING_FOR_UPLOAD" +) + // Defines values for ForkStrategy. const ( LASTSNAPSHOT ForkStrategy = "LAST_SNAPSHOT" @@ -63,11 +89,51 @@ const ( SetEnvironmentInputEnvironmentPROD SetEnvironmentInputEnvironment = "PROD" ) +// Defines values for UpdateFileImportRequestType. +const ( + Cancel UpdateFileImportRequestType = "cancel" + Labels UpdateFileImportRequestType = "labels" + Retry UpdateFileImportRequestType = "retry" +) + +// Defines values for GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType. +const ( + GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceTypeHTTP GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType = "HTTP" + GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceTypeINTERNAL GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType = "INTERNAL" + GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceTypeS3 GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType = "S3" +) + +// ColumnMapping defines model for ColumnMapping. +type ColumnMapping struct { + // Destination Destination column name in the target table. + Destination string `json:"destination"` + + // Source Source column name in the original file. + Source string `json:"source"` +} + // ConnectionPooler defines model for ConnectionPooler. type ConnectionPooler struct { Endpoint *Endpoint `json:"endpoint,omitempty"` } +// CreateFileImportInput defines model for CreateFileImportInput. +type CreateFileImportInput struct { + // Definition File format definition. Only csv or parquet should be set based on type. + Definition FileImportDefinition `json:"definition"` + + // Id Unique identifier for this file import. + Id string `json:"id"` + + // Labels Optional labels for filtering. + Labels *[]FileImportLabel `json:"labels,omitempty"` + Settings *ImportSettings `json:"settings,omitempty"` + + // Source Source configuration for the file import. Only one of s3, http, or internal should be set based on type. + Source FileImportSource `json:"source"` + TableIdentifier TableIdentifier `json:"table_identifier"` +} + // DeployStatus defines model for DeployStatus. type DeployStatus string @@ -86,6 +152,170 @@ type Error struct { Message *string `json:"message,omitempty"` } +// FileImport defines model for FileImport. +type FileImport struct { + // CreatedAt Creation timestamp. + CreatedAt time.Time `json:"created_at"` + + // Definition File format definition. Only csv or parquet should be set based on type. + Definition FileImportDefinition `json:"definition"` + + // Id Unique file import identifier. + Id string `json:"id"` + + // Labels Labels for filtering and organization. + Labels []FileImportLabel `json:"labels"` + + // ProjectId Project identifier. + ProjectId string `json:"project_id"` + + // ServiceId Service identifier. + ServiceId string `json:"service_id"` + Settings *ImportSettings `json:"settings,omitempty"` + + // Size File size in bytes. + Size int `json:"size"` + + // Source Source configuration for the file import. Only one of s3, http, or internal should be set based on type. + Source FileImportSource `json:"source"` + State FileImportStateInfo `json:"state"` + TableIdentifier TableIdentifier `json:"table_identifier"` + + // UpdatedAt Last update timestamp. + UpdatedAt time.Time `json:"updated_at"` +} + +// FileImportDefinition File format definition. Only csv or parquet should be set based on type. +type FileImportDefinition struct { + Csv *FileImportDefinitionCSV `json:"csv,omitempty"` + Parquet *FileImportDefinitionParquet `json:"parquet,omitempty"` + + // Type The file format type. + Type FileImportDefinitionType `json:"type"` +} + +// FileImportDefinitionCSV defines model for FileImportDefinitionCSV. +type FileImportDefinitionCSV struct { + // AutoColumnMapping Automatically map columns by matching header names. Requires skip_header=true. Mutually exclusive with column_names and column_mappings. + AutoColumnMapping *bool `json:"auto_column_mapping,omitempty"` + + // ColumnMappings Mappings from source to destination columns. Requires skip_header=true. Mutually exclusive with column_names and auto_column_mapping. + ColumnMappings *[]ColumnMapping `json:"column_mappings,omitempty"` + + // ColumnNames Column names matching the destination table. Mutually exclusive with column_mappings and auto_column_mapping. + ColumnNames *[]string `json:"column_names,omitempty"` + + // Delimiter The delimiter character for CSV files. + Delimiter *string `json:"delimiter,omitempty"` + + // SkipHeader Whether to skip the first row as a header. Required for column_mappings and auto_column_mapping. + SkipHeader *bool `json:"skip_header,omitempty"` +} + +// FileImportDefinitionParquet defines model for FileImportDefinitionParquet. +type FileImportDefinitionParquet struct { + // AutoColumnMapping Automatically map columns by matching header names. Mutually exclusive with column_mappings. + AutoColumnMapping *bool `json:"auto_column_mapping,omitempty"` + + // ColumnMappings Mappings from source to destination columns. Mutually exclusive with auto_column_mapping. + ColumnMappings *[]ColumnMapping `json:"column_mappings,omitempty"` +} + +// FileImportDefinitionType The file format type. +type FileImportDefinitionType string + +// FileImportHTTPSource defines model for FileImportHTTPSource. +type FileImportHTTPSource struct { + // Url The HTTP(S) URL to download the file from. + Url string `json:"url"` +} + +// FileImportInternalSource defines model for FileImportInternalSource. +type FileImportInternalSource struct { + // Id Internal identifier for the uploaded file. + Id string `json:"id"` +} + +// FileImportLabel defines model for FileImportLabel. +type FileImportLabel struct { + // Key Label key for filtering. + Key string `json:"key"` + + // Value Label value. + Value string `json:"value"` +} + +// FileImportS3Source defines model for FileImportS3Source. +type FileImportS3Source struct { + // Bucket The S3 bucket name. + Bucket string `json:"bucket"` + + // Id The identifier for this source (deprecated, not actively used). + Id *string `json:"id,omitempty"` + + // Key The S3 object key. + Key string `json:"key"` + + // RoleArn AWS IAM role ARN for accessing the S3 bucket. + RoleArn *string `json:"role_arn,omitempty"` +} + +// FileImportSource Source configuration for the file import. Only one of s3, http, or internal should be set based on type. +type FileImportSource struct { + Http *FileImportHTTPSource `json:"http,omitempty"` + Internal *FileImportInternalSource `json:"internal,omitempty"` + S3 *FileImportS3Source `json:"s3,omitempty"` + + // Type The type of source for the file import. + Type FileImportSourceType `json:"type"` +} + +// FileImportSourceType The type of source for the file import. +type FileImportSourceType string + +// FileImportState Current state of the file import operation. +type FileImportState string + +// FileImportStateInfo defines model for FileImportStateInfo. +type FileImportStateInfo struct { + // FailureReason Detailed failure reason if state is FAILURE. + FailureReason *string `json:"failure_reason,omitempty"` + Progress *struct { + // Current Current progress value. + Current *int `json:"current,omitempty"` + + // Message Human-readable progress message. + Message *string `json:"message,omitempty"` + + // Total Total expected value. + Total *int `json:"total,omitempty"` + } `json:"progress,omitempty"` + + // State Current state of the file import operation. + State FileImportState `json:"state"` + + // Timelines History of state transitions. + Timelines *[]FileImportStateTimeline `json:"timelines,omitempty"` +} + +// FileImportStateTimeline defines model for FileImportStateTimeline. +type FileImportStateTimeline struct { + // At Timestamp when this state was entered. + At *time.Time `json:"at,omitempty"` + + // Payload Additional information about the state change. + Payload *struct { + // FailureReason Reason for failure, if applicable. + FailureReason *string `json:"failure_reason,omitempty"` + + // RetryAt Scheduled retry time, if applicable. + RetryAt *time.Time `json:"retry_at,omitempty"` + } `json:"payload,omitempty"` + + // State The state that was entered. + State *string `json:"state,omitempty"` +} + // ForkServiceCreate Create a fork of an existing service. Service type, region code, and storage are always inherited from the parent service. // HA replica count is always set to 0 for forked services. type ForkServiceCreate struct { @@ -124,6 +354,21 @@ type ForkSpec struct { // - PITR: Point-in-time recovery using target_time type ForkStrategy string +// GeneratePresignedURLInput defines model for GeneratePresignedURLInput. +type GeneratePresignedURLInput struct { + // ContentLength Exact size of the file to upload in bytes. Must match the Content-Length header during upload. + ContentLength int `json:"content_length"` + + // Id Unique identifier for the file. This will be used later when creating the file import. + Id string `json:"id"` +} + +// GeneratePresignedURLResponse defines model for GeneratePresignedURLResponse. +type GeneratePresignedURLResponse struct { + // Url Presigned S3 URL for uploading the file. Valid for a limited time. + Url string `json:"url"` +} + // HAReplica defines model for HAReplica. type HAReplica struct { // ReplicaCount Number of high-availability replicas (all replicas are asynchronous by default). @@ -133,6 +378,34 @@ type HAReplica struct { SyncReplicaCount *int `json:"sync_replica_count,omitempty"` } +// ImportSettings defines model for ImportSettings. +type ImportSettings struct { + // OnConflictDoNothing Handle conflicts by ignoring conflicting rows. + OnConflictDoNothing *bool `json:"on_conflict_do_nothing,omitempty"` +} + +// ListFileImportResponse defines model for ListFileImportResponse. +type ListFileImportResponse struct { + // FileImports List of file imports. + FileImports []FileImport `json:"file_imports"` + PageInfo PageInfo `json:"page_info"` +} + +// PageInfo defines model for PageInfo. +type PageInfo struct { + // EndCursor Cursor for fetching the next page. + EndCursor *string `json:"end_cursor,omitempty"` + + // HasNextPage Whether there is a next page. + HasNextPage bool `json:"has_next_page"` + + // HasPreviousPage Whether there is a previous page. + HasPreviousPage bool `json:"has_previous_page"` + + // StartCursor Cursor for fetching the previous page. + StartCursor *string `json:"start_cursor,omitempty"` +} + // Peering defines model for Peering. type Peering struct { ErrorMessage *string `json:"error_message,omitempty"` @@ -318,6 +591,45 @@ type SetHAReplicaInput struct { SyncReplicaCount *int `json:"sync_replica_count,omitempty"` } +// TableIdentifier defines model for TableIdentifier. +type TableIdentifier struct { + // SchemaName PostgreSQL schema name. Defaults to 'public' if not specified. + SchemaName *string `json:"schema_name,omitempty"` + + // TableName PostgreSQL table name. + TableName string `json:"table_name"` +} + +// UpdateFileImportInput defines model for UpdateFileImportInput. +type UpdateFileImportInput struct { + // Requests List of update operations to perform. + Requests []UpdateFileImportRequest `json:"requests"` +} + +// UpdateFileImportRequest Update request. Only one of cancel or labels should be set based on type. +type UpdateFileImportRequest struct { + Cancel *UpdateFileImportRequestCancel `json:"cancel,omitempty"` + Labels *UpdateFileImportRequestLabels `json:"labels,omitempty"` + + // Type Type of update operation to perform. + Type UpdateFileImportRequestType `json:"type"` +} + +// UpdateFileImportRequestCancel defines model for UpdateFileImportRequestCancel. +type UpdateFileImportRequestCancel struct { + // Reason Reason for cancellation. + Reason string `json:"reason"` +} + +// UpdateFileImportRequestLabels defines model for UpdateFileImportRequestLabels. +type UpdateFileImportRequestLabels struct { + // Labels New labels to set. + Labels []FileImportLabel `json:"labels"` +} + +// UpdateFileImportRequestType Type of update operation to perform. +type UpdateFileImportRequestType string + // UpdatePasswordInput defines model for UpdatePasswordInput. type UpdatePasswordInput struct { // Password The new password. @@ -345,6 +657,9 @@ type VPCRename struct { Name string `json:"name"` } +// FileImportId defines model for FileImportId. +type FileImportId = string + // PeeringId defines model for PeeringId. type PeeringId = string @@ -389,6 +704,44 @@ type PostAnalyticsTrackJSONBody struct { Properties *map[string]interface{} `json:"properties,omitempty"` } +// GetProjectsProjectIdServicesServiceIdFileimportsParams defines parameters for GetProjectsProjectIdServicesServiceIdFileimports. +type GetProjectsProjectIdServicesServiceIdFileimportsParams struct { + // First Number of items to fetch for forward pagination. + First *int `form:"first,omitempty" json:"first,omitempty"` + + // Last Number of items to fetch for backward pagination. + Last *int `form:"last,omitempty" json:"last,omitempty"` + + // After Cursor for fetching the next page. + After *string `form:"after,omitempty" json:"after,omitempty"` + + // Before Cursor for fetching the previous page. + Before *string `form:"before,omitempty" json:"before,omitempty"` + + // LabelSelector Filter by label key and value using k8s label selector syntax (e.g., 'source=s3-live-sync,s3-live-sync-id=123'). + LabelSelector *string `form:"label_selector,omitempty" json:"label_selector,omitempty"` + + // States Filter by import states (comma-separated). Valid states are: + // - IN_QUEUE: Import is queued for processing + // - RUNNING: Import is currently processing + // - RETRYING: Import is being retried after a failure + // - SUCCESS: Import completed successfully + // - FAILURE: Import failed + // - CANCELLED: Import was cancelled + // - PAUSED: Import is paused + // - SKIPPED: Import was skipped + States *string `form:"states,omitempty" json:"states,omitempty"` + + // S3KeyPrefix Filter S3 imports by key prefix (e.g., 'data/2024/'). Only affects S3 source imports. + S3KeyPrefix *string `form:"s3_key_prefix,omitempty" json:"s3_key_prefix,omitempty"` + + // SourceType Filter by source type for optimized ordering. Supported values are S3, HTTP, INTERNAL. + SourceType *GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType `form:"source_type,omitempty" json:"source_type,omitempty"` +} + +// GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType defines parameters for GetProjectsProjectIdServicesServiceIdFileimports. +type GetProjectsProjectIdServicesServiceIdFileimportsParamsSourceType string + // PostAnalyticsIdentifyJSONRequestBody defines body for PostAnalyticsIdentify for application/json ContentType. type PostAnalyticsIdentifyJSONRequestBody PostAnalyticsIdentifyJSONBody @@ -404,6 +757,15 @@ type PostProjectsProjectIdServicesServiceIdAttachToVPCJSONRequestBody = ServiceV // PostProjectsProjectIdServicesServiceIdDetachFromVPCJSONRequestBody defines body for PostProjectsProjectIdServicesServiceIdDetachFromVPC for application/json ContentType. type PostProjectsProjectIdServicesServiceIdDetachFromVPCJSONRequestBody = ServiceVPCInput +// PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody defines body for PostProjectsProjectIdServicesServiceIdFileimports for application/json ContentType. +type PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody = CreateFileImportInput + +// PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody defines body for PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrl for application/json ContentType. +type PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody = GeneratePresignedURLInput + +// PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody defines body for PatchProjectsProjectIdServicesServiceIdFileimportsImportId for application/json ContentType. +type PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody = UpdateFileImportInput + // PostProjectsProjectIdServicesServiceIdForkServiceJSONRequestBody defines body for PostProjectsProjectIdServicesServiceIdForkService for application/json ContentType. type PostProjectsProjectIdServicesServiceIdForkServiceJSONRequestBody = ForkServiceCreate diff --git a/internal/tiger/cmd/fileimport.go b/internal/tiger/cmd/fileimport.go new file mode 100644 index 00000000..8a6856d4 --- /dev/null +++ b/internal/tiger/cmd/fileimport.go @@ -0,0 +1,676 @@ +package cmd + +import ( + "bufio" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "strings" + "time" + + "github.com/olekukonko/tablewriter" + "github.com/spf13/cobra" + + "github.com/timescale/tiger-cli/internal/tiger/api" + "github.com/timescale/tiger-cli/internal/tiger/config" + "github.com/timescale/tiger-cli/internal/tiger/util" +) + +// buildFileImportCmd creates the main file-import command with all subcommands +func buildFileImportCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "file-import", + Aliases: []string{"fileimport", "import"}, + Short: "Manage file imports", + Long: `Import CSV and Parquet files into database tables.`, + } + + // Add all subcommands + cmd.AddCommand(buildFileImportUploadCmd()) + cmd.AddCommand(buildFileImportListCmd()) + cmd.AddCommand(buildFileImportGetCmd()) + cmd.AddCommand(buildFileImportCancelCmd()) + + return cmd +} + +// buildFileImportUploadCmd creates the upload command +func buildFileImportUploadCmd() *cobra.Command { + var table string + var schema string + var skipHeader bool + var delimiter string + var autoColumnMapping bool + var noWait bool + var waitTimeout time.Duration + + cmd := &cobra.Command{ + Use: "upload ", + Short: "Upload and import a file", + Long: `Upload a CSV or Parquet file and import it into a database table. + +This command performs the complete workflow: +1. Generates a presigned S3 URL +2. Uploads the file to S3 +3. Creates a file import operation +4. Waits for the import to complete (by default) + +Examples: + # Upload CSV file to table 'sales_data' + tiger file-import upload data.csv --table sales_data + + # Upload with custom schema and skip header + tiger file-import upload data.csv --table orders --schema analytics --skip-header + + # Upload without waiting for completion + tiger file-import upload large.csv --table big_table --no-wait`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + filePath := args[0] + + // Validate required flags + if table == "" { + return fmt.Errorf("--table flag is required") + } + + cmd.SilenceUsage = true + + // Get config + cfg, err := config.Load() + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + // Get credentials + apiKey, projectID, err := getCredentialsForService() + if err != nil { + return exitWithCode(ExitAuthenticationError, fmt.Errorf("authentication required: %w. Please run 'tiger auth login'", err)) + } + + serviceID := cfg.ServiceID + if serviceID == "" { + return fmt.Errorf("service ID is required. Set it with 'tiger config set service_id '") + } + + // Get file info + fileInfo, err := os.Stat(filePath) + if err != nil { + return fmt.Errorf("failed to read file: %w", err) + } + + contentLength := int(fileInfo.Size()) + if contentLength > 524288000 { + return fmt.Errorf("file size exceeds 500MB limit") + } + + // Create API client + client, err := api.NewTigerClient(cfg, apiKey) + if err != nil { + return fmt.Errorf("failed to create API client: %w", err) + } + + ctx := context.Background() + + // Generate unique ID for this upload + importID := fmt.Sprintf("cli-upload-%d", time.Now().Unix()) + + // Step 1: Generate presigned URL + fmt.Fprintf(cmd.OutOrStdout(), "Generating upload URL...\n") + presignedResp, err := client.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlWithResponse( + ctx, + projectID, + serviceID, + api.PostProjectsProjectIdServicesServiceIdFileimportsPresignedUrlJSONRequestBody{ + Id: importID, + ContentLength: contentLength, + }, + ) + if err != nil { + return fmt.Errorf("failed to generate presigned URL: %w", err) + } + + if presignedResp.StatusCode() != 200 || presignedResp.JSON200 == nil { + return exitWithErrorFromStatusCode(presignedResp.StatusCode(), presignedResp.JSON4XX) + } + + presignedURL := presignedResp.JSON200.Url + + // Step 2: Upload file to S3 + fmt.Fprintf(cmd.OutOrStdout(), "Uploading file (%d bytes)...\n", contentLength) + file, err := os.Open(filePath) + if err != nil { + return fmt.Errorf("failed to open file: %w", err) + } + defer file.Close() + + if err := uploadFileToS3(presignedURL, file, contentLength); err != nil { + return fmt.Errorf("failed to upload file: %w", err) + } + + fmt.Fprintf(cmd.OutOrStdout(), "Upload successful\n") + + // Step 3: Create file import + fmt.Fprintf(cmd.OutOrStdout(), "Creating file import...\n") + + // Determine file type from extension + fileType := "CSV" + if strings.HasSuffix(strings.ToLower(filePath), ".parquet") { + fileType = "PARQUET" + } + + // Build definition based on file type + var definition api.FileImportDefinition + definition.Type = api.FileImportDefinitionType(fileType) + + if fileType == "CSV" { + definition.Csv = &api.FileImportDefinitionCSV{ + Delimiter: &delimiter, + SkipHeader: &skipHeader, + AutoColumnMapping: &autoColumnMapping, + } + } else { + definition.Parquet = &api.FileImportDefinitionParquet{ + AutoColumnMapping: &autoColumnMapping, + } + } + + tableIdentifier := api.TableIdentifier{ + TableName: table, + } + if schema != "" { + tableIdentifier.SchemaName = &schema + } + + createResp, err := client.PostProjectsProjectIdServicesServiceIdFileimportsWithResponse( + ctx, + projectID, + serviceID, + api.PostProjectsProjectIdServicesServiceIdFileimportsJSONRequestBody{ + Id: importID, + Source: api.FileImportSource{ + Type: api.FileImportSourceTypeINTERNAL, + Internal: &api.FileImportInternalSource{ + Id: importID, + }, + }, + Definition: definition, + TableIdentifier: tableIdentifier, + Labels: &[]api.FileImportLabel{ + {Key: "source", Value: "tiger-cli"}, + {Key: "file_name", Value: fileInfo.Name()}, + }, + }, + ) + if err != nil { + return fmt.Errorf("failed to create file import: %w", err) + } + + if createResp.StatusCode() != 201 { + return exitWithErrorFromStatusCode(createResp.StatusCode(), createResp.JSON4XX) + } + + fmt.Fprintf(cmd.OutOrStdout(), "File import created: %s\n", importID) + + // Step 4: Wait for completion (unless --no-wait) + if !noWait { + fmt.Fprintf(cmd.OutOrStdout(), "Waiting for import to complete...\n") + if err := waitForImportCompletion(ctx, client, projectID, serviceID, importID, waitTimeout, cmd.OutOrStdout()); err != nil { + return err + } + } else { + fmt.Fprintf(cmd.OutOrStdout(), "Import started. Use 'tiger file-import get %s' to check status\n", importID) + } + + return nil + }, + } + + cmd.Flags().StringVar(&table, "table", "", "destination table name (required)") + cmd.Flags().StringVar(&schema, "schema", "", "destination schema name (defaults to 'public')") + cmd.Flags().BoolVar(&skipHeader, "skip-header", true, "skip first row as header") + cmd.Flags().StringVar(&delimiter, "delimiter", ",", "CSV delimiter character") + cmd.Flags().BoolVar(&autoColumnMapping, "auto-column-mapping", true, "automatically map columns by name") + cmd.Flags().BoolVar(&noWait, "no-wait", false, "don't wait for import to complete") + cmd.Flags().DurationVar(&waitTimeout, "wait-timeout", 30*time.Minute, "maximum time to wait for import completion") + + cmd.MarkFlagRequired("table") + + return cmd +} + +// buildFileImportListCmd creates the list command +func buildFileImportListCmd() *cobra.Command { + var output string + var first int + var states []string + var labelSelector string + + cmd := &cobra.Command{ + Use: "list", + Aliases: []string{"ls"}, + Short: "List file imports", + Long: `List file imports for the current service with optional filtering. + +Examples: + # List recent imports + tiger file-import list + + # List first 20 imports + tiger file-import list --first 20 + + # List only successful imports + tiger file-import list --states SUCCESS + + # List failed and cancelled imports + tiger file-import list --states FAILURE,CANCELLED + + # List imports with specific label + tiger file-import list --label-selector source=tiger-cli`, + RunE: func(cmd *cobra.Command, args []string) error { + // Get config + cfg, err := config.Load() + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + if cmd.Flags().Changed("output") { + cfg.Output = output + } + + cmd.SilenceUsage = true + + // Get credentials + apiKey, projectID, err := getCredentialsForService() + if err != nil { + return exitWithCode(ExitAuthenticationError, fmt.Errorf("authentication required: %w. Please run 'tiger auth login'", err)) + } + + serviceID := cfg.ServiceID + if serviceID == "" { + return fmt.Errorf("service ID is required. Set it with 'tiger config set service_id '") + } + + // Create API client + client, err := api.NewTigerClient(cfg, apiKey) + if err != nil { + return fmt.Errorf("failed to create API client: %w", err) + } + + ctx, cancel := context.WithTimeout(cmd.Context(), 30*time.Second) + defer cancel() + + // Build query params + params := &api.GetProjectsProjectIdServicesServiceIdFileimportsParams{ + First: &first, + } + + if len(states) > 0 { + statesStr := strings.Join(states, ",") + params.States = &statesStr + } + + if labelSelector != "" { + params.LabelSelector = &labelSelector + } + + // Make API call + resp, err := client.GetProjectsProjectIdServicesServiceIdFileimportsWithResponse(ctx, projectID, serviceID, params) + if err != nil { + return fmt.Errorf("failed to list file imports: %w", err) + } + + if resp.StatusCode() != 200 { + return exitWithErrorFromStatusCode(resp.StatusCode(), resp.JSON4XX) + } + + if resp.JSON200 == nil { + return fmt.Errorf("empty response from API") + } + + // Output file imports + return outputFileImportList(cmd, resp.JSON200.FileImports, cfg.Output) + }, + } + + cmd.Flags().VarP((*outputWithEnvFlag)(&output), "output", "o", "output format (json, yaml, table)") + cmd.Flags().IntVar(&first, "first", 10, "number of imports to fetch") + cmd.Flags().StringSliceVar(&states, "states", nil, "filter by states (e.g., SUCCESS,FAILURE)") + cmd.Flags().StringVar(&labelSelector, "label-selector", "", "filter by labels (e.g., source=tiger-cli)") + + return cmd +} + +// buildFileImportGetCmd creates the get command +func buildFileImportGetCmd() *cobra.Command { + var output string + + cmd := &cobra.Command{ + Use: "get ", + Aliases: []string{"show", "describe"}, + Short: "Get file import details", + Long: `Get detailed information about a specific file import. + +Examples: + # Get import details + tiger file-import get cli-upload-1234567890 + + # Get details in JSON format + tiger file-import get cli-upload-1234567890 --output json`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + importID := args[0] + + // Get config + cfg, err := config.Load() + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + if cmd.Flags().Changed("output") { + cfg.Output = output + } + + cmd.SilenceUsage = true + + // Get credentials + apiKey, projectID, err := getCredentialsForService() + if err != nil { + return exitWithCode(ExitAuthenticationError, fmt.Errorf("authentication required: %w. Please run 'tiger auth login'", err)) + } + + serviceID := cfg.ServiceID + if serviceID == "" { + return fmt.Errorf("service ID is required. Set it with 'tiger config set service_id '") + } + + // Create API client + client, err := api.NewTigerClient(cfg, apiKey) + if err != nil { + return fmt.Errorf("failed to create API client: %w", err) + } + + ctx, cancel := context.WithTimeout(cmd.Context(), 30*time.Second) + defer cancel() + + // Make API call + resp, err := client.GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx, projectID, serviceID, importID) + if err != nil { + return fmt.Errorf("failed to get file import: %w", err) + } + + if resp.StatusCode() != 200 { + return exitWithErrorFromStatusCode(resp.StatusCode(), resp.JSON4XX) + } + + if resp.JSON200 == nil { + return fmt.Errorf("empty response from API") + } + + // Output file import details + return outputFileImport(cmd, resp.JSON200.FileImport, cfg.Output) + }, + } + + cmd.Flags().VarP((*outputWithEnvFlag)(&output), "output", "o", "output format (json, yaml, table)") + + return cmd +} + +// buildFileImportCancelCmd creates the cancel command +func buildFileImportCancelCmd() *cobra.Command { + var reason string + var confirm bool + + cmd := &cobra.Command{ + Use: "cancel ", + Short: "Cancel a running file import", + Long: `Cancel a file import that is currently running or queued. + +Note: This is a destructive operation. Use --confirm to skip confirmation prompt. + +Examples: + # Cancel an import (with confirmation) + tiger file-import cancel cli-upload-1234567890 + + # Cancel without confirmation prompt + tiger file-import cancel cli-upload-1234567890 --confirm --reason "user requested"`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + importID := args[0] + + if reason == "" { + reason = "cancelled by user via tiger-cli" + } + + // Interactive confirmation unless --confirm + if !confirm { + fmt.Fprintf(cmd.ErrOrStderr(), "Are you sure you want to cancel import '%s'? (yes/no): ", importID) + reader := bufio.NewReader(cmd.InOrStdin()) + confirmation, _ := reader.ReadString('\n') + confirmation = strings.TrimSpace(strings.ToLower(confirmation)) + if confirmation != "yes" && confirmation != "y" { + return fmt.Errorf("operation cancelled") + } + } + + cmd.SilenceUsage = true + + // Get config + cfg, err := config.Load() + if err != nil { + return fmt.Errorf("failed to load config: %w", err) + } + + // Get credentials + apiKey, projectID, err := getCredentialsForService() + if err != nil { + return exitWithCode(ExitAuthenticationError, fmt.Errorf("authentication required: %w. Please run 'tiger auth login'", err)) + } + + serviceID := cfg.ServiceID + if serviceID == "" { + return fmt.Errorf("service ID is required. Set it with 'tiger config set service_id '") + } + + // Create API client + client, err := api.NewTigerClient(cfg, apiKey) + if err != nil { + return fmt.Errorf("failed to create API client: %w", err) + } + + ctx, cancel := context.WithTimeout(cmd.Context(), 30*time.Second) + defer cancel() + + // Make API call to cancel + cancelType := api.Cancel + resp, err := client.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse( + ctx, + projectID, + serviceID, + importID, + api.PatchProjectsProjectIdServicesServiceIdFileimportsImportIdJSONRequestBody{ + Requests: []api.UpdateFileImportRequest{ + { + Type: cancelType, + Cancel: &api.UpdateFileImportRequestCancel{ + Reason: reason, + }, + }, + }, + }, + ) + if err != nil { + return fmt.Errorf("failed to cancel file import: %w", err) + } + + if resp.StatusCode() != 200 { + return exitWithErrorFromStatusCode(resp.StatusCode(), resp.JSON4XX) + } + + fmt.Fprintf(cmd.OutOrStdout(), "File import cancelled successfully\n") + return nil + }, + } + + cmd.Flags().StringVar(&reason, "reason", "", "reason for cancellation") + cmd.Flags().BoolVar(&confirm, "confirm", false, "skip confirmation prompt") + + return cmd +} + +// Helper functions + +func uploadFileToS3(presignedURL string, file io.Reader, contentLength int) error { + req, err := http.NewRequest("PUT", presignedURL, file) + if err != nil { + return err + } + + req.Header.Set("Content-Length", fmt.Sprintf("%d", contentLength)) + req.ContentLength = int64(contentLength) + + client := &http.Client{Timeout: 10 * time.Minute} + resp, err := client.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + body, _ := io.ReadAll(resp.Body) + return fmt.Errorf("S3 upload failed with status %d: %s", resp.StatusCode, string(body)) + } + + return nil +} + +func waitForImportCompletion(ctx context.Context, client api.ClientWithResponsesInterface, projectID, serviceID, importID string, timeout time.Duration, out io.Writer) error { + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + ticker := time.NewTicker(2 * time.Second) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return exitWithCode(ExitTimeout, fmt.Errorf("timeout waiting for import to complete")) + case <-ticker.C: + resp, err := client.GetProjectsProjectIdServicesServiceIdFileimportsImportIdWithResponse(ctx, projectID, serviceID, importID) + if err != nil { + return fmt.Errorf("failed to check import status: %w", err) + } + + if resp.StatusCode() != 200 || resp.JSON200 == nil { + return fmt.Errorf("failed to get import status") + } + + fileImport := resp.JSON200.FileImport + state := string(fileImport.State.State) + + // Show progress if available + if fileImport.State.Progress != nil { + fmt.Fprintf(out, "Progress: %d/%d - %s\n", + fileImport.State.Progress.Current, + *fileImport.State.Progress.Total, + *fileImport.State.Progress.Message, + ) + } + + // Check terminal states + switch state { + case "SUCCESS": + fmt.Fprintf(out, "Import completed successfully\n") + return nil + case "FAILURE": + failureReason := "unknown error" + if fileImport.State.FailureReason != nil { + failureReason = *fileImport.State.FailureReason + } + return fmt.Errorf("import failed: %s", failureReason) + case "CANCELLED": + return fmt.Errorf("import was cancelled") + } + } + } +} + +func outputFileImportList(cmd *cobra.Command, fileImports []api.FileImport, outputFormat string) error { + switch strings.ToLower(outputFormat) { + case "json": + enc := json.NewEncoder(cmd.OutOrStdout()) + enc.SetIndent("", " ") + return enc.Encode(fileImports) + case "yaml": + return util.SerializeToYAML(cmd.OutOrStdout(), fileImports, true) + default: + // Table output + table := tablewriter.NewWriter(cmd.OutOrStdout()) + table.Header("ID", "STATE", "TABLE", "SIZE", "CREATED") + + for _, fi := range fileImports { + state := string(fi.State.State) + tableName := fi.TableIdentifier.TableName + if fi.TableIdentifier.SchemaName != nil { + tableName = *fi.TableIdentifier.SchemaName + "." + tableName + } + + sizeStr := fmt.Sprintf("%d bytes", fi.Size) + if fi.Size > 1024*1024 { + sizeStr = fmt.Sprintf("%.2f MB", float64(fi.Size)/(1024*1024)) + } else if fi.Size > 1024 { + sizeStr = fmt.Sprintf("%.2f KB", float64(fi.Size)/1024) + } + + table.Append( + fi.Id, + state, + tableName, + sizeStr, + fi.CreatedAt.Format(time.RFC3339), + ) + } + + return table.Render() + } +} + +func outputFileImport(cmd *cobra.Command, fileImport api.FileImport, outputFormat string) error { + switch strings.ToLower(outputFormat) { + case "json": + enc := json.NewEncoder(cmd.OutOrStdout()) + enc.SetIndent("", " ") + return enc.Encode(fileImport) + case "yaml": + return util.SerializeToYAML(cmd.OutOrStdout(), fileImport, true) + default: + // Table output + fmt.Fprintf(cmd.OutOrStdout(), "Import ID: %s\n", fileImport.Id) + fmt.Fprintf(cmd.OutOrStdout(), "State: %s\n", fileImport.State.State) + + tableName := fileImport.TableIdentifier.TableName + if fileImport.TableIdentifier.SchemaName != nil { + tableName = *fileImport.TableIdentifier.SchemaName + "." + tableName + } + fmt.Fprintf(cmd.OutOrStdout(), "Table: %s\n", tableName) + fmt.Fprintf(cmd.OutOrStdout(), "Size: %d bytes\n", fileImport.Size) + fmt.Fprintf(cmd.OutOrStdout(), "Source Type: %s\n", fileImport.Source.Type) + fmt.Fprintf(cmd.OutOrStdout(), "Created: %s\n", fileImport.CreatedAt.Format(time.RFC3339)) + + if fileImport.State.FailureReason != nil && *fileImport.State.FailureReason != "" { + fmt.Fprintf(cmd.OutOrStdout(), "Failure: %s\n", *fileImport.State.FailureReason) + } + + if len(fileImport.Labels) > 0 { + fmt.Fprintf(cmd.OutOrStdout(), "Labels:\n") + for _, label := range fileImport.Labels { + fmt.Fprintf(cmd.OutOrStdout(), " %s=%s\n", label.Key, label.Value) + } + } + + return nil + } +} diff --git a/internal/tiger/cmd/root.go b/internal/tiger/cmd/root.go index 52b4f20e..32a83414 100644 --- a/internal/tiger/cmd/root.go +++ b/internal/tiger/cmd/root.go @@ -124,6 +124,7 @@ tiger auth login cmd.AddCommand(buildAuthCmd()) cmd.AddCommand(buildServiceCmd()) cmd.AddCommand(buildDbCmd()) + cmd.AddCommand(buildFileImportCmd()) cmd.AddCommand(buildMCPCmd()) wrapCommandsWithAnalytics(cmd) diff --git a/openapi.yaml b/openapi.yaml index f14a58c7..a522910b 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -23,6 +23,8 @@ tags: description: Manage services, read replicas, and their associated actions. - name: Analytics description: Track analytics events. + - name: File Imports + description: Manage file import operations for CSV and Parquet files. paths: /analytics/identify: @@ -654,6 +656,192 @@ paths: '4XX': $ref: '#/components/responses/ClientError' + /projects/{project_id}/services/{service_id}/fileimports/presigned-url: + post: + tags: + - File Imports + summary: Generate Presigned Upload URL + description: Generates a presigned S3 URL to upload a file up to 500MB. The file can then be imported using the INTERNAL source type. + parameters: + - $ref: '#/components/parameters/ProjectId' + - $ref: '#/components/parameters/ServiceId' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/GeneratePresignedURLInput' + responses: + '200': + description: Presigned URL generated successfully. + content: + application/json: + schema: + $ref: '#/components/schemas/GeneratePresignedURLResponse' + '4XX': + $ref: '#/components/responses/ClientError' + + /projects/{project_id}/services/{service_id}/fileimports: + get: + tags: + - File Imports + summary: List File Imports + description: Retrieves a paginated list of file imports for a specific service. + parameters: + - $ref: '#/components/parameters/ProjectId' + - $ref: '#/components/parameters/ServiceId' + - name: first + in: query + description: Number of items to fetch for forward pagination. + schema: + type: integer + minimum: 1 + maximum: 100 + example: 20 + - name: last + in: query + description: Number of items to fetch for backward pagination. + schema: + type: integer + minimum: 1 + maximum: 100 + example: 20 + - name: after + in: query + description: Cursor for fetching the next page. + schema: + type: string + example: 'eyJpZCI6IjEwMCJ9' + - name: before + in: query + description: Cursor for fetching the previous page. + schema: + type: string + example: 'eyJpZCI6IjIwMCJ9' + - name: label_selector + in: query + description: Filter by label key and value using k8s label selector syntax (e.g., 'source=s3-live-sync,s3-live-sync-id=123'). + schema: + type: string + example: 'source=s3-live-sync' + - name: states + in: query + description: | + Filter by import states (comma-separated). Valid states are: + - IN_QUEUE: Import is queued for processing + - RUNNING: Import is currently processing + - RETRYING: Import is being retried after a failure + - SUCCESS: Import completed successfully + - FAILURE: Import failed + - CANCELLED: Import was cancelled + - PAUSED: Import is paused + - SKIPPED: Import was skipped + schema: + type: string + example: 'SUCCESS,FAILURE' + - name: s3_key_prefix + in: query + description: Filter S3 imports by key prefix (e.g., 'data/2024/'). Only affects S3 source imports. + schema: + type: string + example: 'data/2024/' + - name: source_type + in: query + description: Filter by source type for optimized ordering. Supported values are S3, HTTP, INTERNAL. + schema: + type: string + enum: [S3, HTTP, INTERNAL] + example: 'S3' + responses: + '200': + description: A paginated list of file imports. + content: + application/json: + schema: + $ref: '#/components/schemas/ListFileImportResponse' + '4XX': + $ref: '#/components/responses/ClientError' + post: + tags: + - File Imports + summary: Create a File Import + description: Creates a new file import operation for importing CSV or Parquet files into a database table. + parameters: + - $ref: '#/components/parameters/ProjectId' + - $ref: '#/components/parameters/ServiceId' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/CreateFileImportInput' + responses: + '201': + description: File import created successfully. + content: + application/json: + schema: + type: object + properties: + error: + type: string + description: Error message if the creation failed. + '4XX': + $ref: '#/components/responses/ClientError' + + /projects/{project_id}/services/{service_id}/fileimports/{import_id}: + get: + tags: + - File Imports + summary: Get a File Import + description: Retrieves the details of a specific file import by its ID. + parameters: + - $ref: '#/components/parameters/ProjectId' + - $ref: '#/components/parameters/ServiceId' + - $ref: '#/components/parameters/FileImportId' + responses: + '200': + description: File import details. + content: + application/json: + schema: + type: object + required: + - file_import + properties: + file_import: + $ref: '#/components/schemas/FileImport' + '4XX': + $ref: '#/components/responses/ClientError' + patch: + tags: + - File Imports + summary: Update a File Import + description: Updates a file import by applying one or more operations such as canceling, retrying, or updating labels. + parameters: + - $ref: '#/components/parameters/ProjectId' + - $ref: '#/components/parameters/ServiceId' + - $ref: '#/components/parameters/FileImportId' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/UpdateFileImportInput' + responses: + '200': + description: File import updated successfully. + content: + application/json: + schema: + type: object + properties: + error: + type: string + description: Error message if the update failed. + '4XX': + $ref: '#/components/responses/ClientError' + components: parameters: ProjectId: @@ -696,6 +884,14 @@ components: schema: type: string example: "1234567890" + FileImportId: + name: import_id + in: path + required: true + description: The unique identifier of the file import. + schema: + type: string + example: 'import-123abc' schemas: VPC: @@ -1121,6 +1317,429 @@ components: type: string description: The ID of the VPC to attach the service to. example: "1234567890" + FileImportSourceType: + type: string + enum: + - S3 + - HTTP + - INTERNAL + description: The type of source for the file import. + FileImportS3Source: + type: object + required: + - key + - bucket + properties: + id: + type: string + description: The identifier for this source (deprecated, not actively used). + example: 'source-123' + key: + type: string + description: The S3 object key. + example: 'data/2024/sales.csv' + bucket: + type: string + description: The S3 bucket name. + example: 'my-data-bucket' + role_arn: + type: string + description: AWS IAM role ARN for accessing the S3 bucket. + example: 'arn:aws:iam::123456789012:role/S3AccessRole' + FileImportHTTPSource: + type: object + required: + - url + properties: + url: + type: string + description: The HTTP(S) URL to download the file from. + example: 'https://example.com/data/sales.csv' + FileImportInternalSource: + type: object + required: + - id + properties: + id: + type: string + description: Internal identifier for the uploaded file. + example: 'internal-upload-123' + FileImportSource: + type: object + required: + - type + properties: + type: + $ref: '#/components/schemas/FileImportSourceType' + s3: + $ref: '#/components/schemas/FileImportS3Source' + http: + $ref: '#/components/schemas/FileImportHTTPSource' + internal: + $ref: '#/components/schemas/FileImportInternalSource' + description: Source configuration for the file import. Only one of s3, http, or internal should be set based on type. + FileImportDefinitionType: + type: string + enum: + - CSV + - PARQUET + description: The file format type. + ColumnMapping: + type: object + required: + - source + - destination + properties: + source: + type: string + description: Source column name in the original file. + example: 'customer_name' + destination: + type: string + description: Destination column name in the target table. + example: 'name' + FileImportDefinitionCSV: + type: object + properties: + delimiter: + type: string + description: The delimiter character for CSV files. + default: ',' + example: ',' + skip_header: + type: boolean + description: Whether to skip the first row as a header. Required for column_mappings and auto_column_mapping. + default: false + column_names: + type: array + items: + type: string + description: Column names matching the destination table. Mutually exclusive with column_mappings and auto_column_mapping. + example: ['id', 'name', 'created_at'] + column_mappings: + type: array + items: + $ref: '#/components/schemas/ColumnMapping' + description: Mappings from source to destination columns. Requires skip_header=true. Mutually exclusive with column_names and auto_column_mapping. + auto_column_mapping: + type: boolean + description: Automatically map columns by matching header names. Requires skip_header=true. Mutually exclusive with column_names and column_mappings. + default: false + FileImportDefinitionParquet: + type: object + properties: + column_mappings: + type: array + items: + $ref: '#/components/schemas/ColumnMapping' + description: Mappings from source to destination columns. Mutually exclusive with auto_column_mapping. + auto_column_mapping: + type: boolean + description: Automatically map columns by matching header names. Mutually exclusive with column_mappings. + default: false + FileImportDefinition: + type: object + required: + - type + properties: + type: + $ref: '#/components/schemas/FileImportDefinitionType' + csv: + $ref: '#/components/schemas/FileImportDefinitionCSV' + parquet: + $ref: '#/components/schemas/FileImportDefinitionParquet' + description: File format definition. Only csv or parquet should be set based on type. + TableIdentifier: + type: object + required: + - table_name + properties: + schema_name: + type: string + description: PostgreSQL schema name. Defaults to 'public' if not specified. + example: 'analytics' + table_name: + type: string + description: PostgreSQL table name. + example: 'sales_data' + FileImportState: + type: string + enum: + - WAITING_FOR_UPLOAD + - IN_QUEUE + - RUNNING + - RETRYING + - SUCCESS + - FAILURE + - CANCELLED + - PAUSED + - SKIPPED + description: Current state of the file import operation. + FileImportStateTimeline: + type: object + properties: + at: + type: string + format: date-time + description: Timestamp when this state was entered. + example: '2024-01-15T10:30:00Z' + state: + type: string + description: The state that was entered. + example: 'SUCCESS' + payload: + type: object + properties: + failure_reason: + type: string + description: Reason for failure, if applicable. + retry_at: + type: string + format: date-time + description: Scheduled retry time, if applicable. + description: Additional information about the state change. + FileImportStateInfo: + type: object + required: + - state + properties: + state: + $ref: '#/components/schemas/FileImportState' + progress: + type: object + properties: + current: + type: integer + description: Current progress value. + example: 5000 + total: + type: integer + description: Total expected value. + example: 10000 + message: + type: string + description: Human-readable progress message. + example: 'Processing rows' + failure_reason: + type: string + description: Detailed failure reason if state is FAILURE. + example: 'Invalid data format in row 1234' + timelines: + type: array + items: + $ref: '#/components/schemas/FileImportStateTimeline' + description: History of state transitions. + FileImportLabel: + type: object + required: + - key + - value + properties: + key: + type: string + description: Label key for filtering. + example: 'source' + value: + type: string + description: Label value. + example: 's3-live-sync' + ImportSettings: + type: object + properties: + on_conflict_do_nothing: + type: boolean + description: Handle conflicts by ignoring conflicting rows. + default: false + FileImport: + type: object + required: + - service_id + - project_id + - id + - created_at + - updated_at + - source + - definition + - table_identifier + - state + - labels + - size + properties: + service_id: + type: string + description: Service identifier. + example: 'd1k5vk7hf2' + project_id: + type: string + description: Project identifier. + example: 'rp1pz7uyae' + id: + type: string + description: Unique file import identifier. + example: 'import-123abc' + created_at: + type: string + format: date-time + description: Creation timestamp. + example: '2024-01-15T10:00:00Z' + updated_at: + type: string + format: date-time + description: Last update timestamp. + example: '2024-01-15T10:30:00Z' + source: + $ref: '#/components/schemas/FileImportSource' + definition: + $ref: '#/components/schemas/FileImportDefinition' + table_identifier: + $ref: '#/components/schemas/TableIdentifier' + state: + $ref: '#/components/schemas/FileImportStateInfo' + labels: + type: array + items: + $ref: '#/components/schemas/FileImportLabel' + description: Labels for filtering and organization. + size: + type: integer + description: File size in bytes. + example: 1048576 + settings: + $ref: '#/components/schemas/ImportSettings' + CreateFileImportInput: + type: object + required: + - id + - source + - definition + - table_identifier + properties: + id: + type: string + description: Unique identifier for this file import. + example: 'import-123abc' + source: + $ref: '#/components/schemas/FileImportSource' + definition: + $ref: '#/components/schemas/FileImportDefinition' + table_identifier: + $ref: '#/components/schemas/TableIdentifier' + labels: + type: array + items: + $ref: '#/components/schemas/FileImportLabel' + description: Optional labels for filtering. + settings: + $ref: '#/components/schemas/ImportSettings' + UpdateFileImportRequestType: + type: string + enum: + - cancel + - labels + - retry + description: Type of update operation to perform. + UpdateFileImportRequestCancel: + type: object + required: + - reason + properties: + reason: + type: string + description: Reason for cancellation. + example: 'User requested cancellation' + UpdateFileImportRequestLabels: + type: object + required: + - labels + properties: + labels: + type: array + items: + $ref: '#/components/schemas/FileImportLabel' + description: New labels to set. + UpdateFileImportRequest: + type: object + required: + - type + properties: + type: + $ref: '#/components/schemas/UpdateFileImportRequestType' + cancel: + $ref: '#/components/schemas/UpdateFileImportRequestCancel' + labels: + $ref: '#/components/schemas/UpdateFileImportRequestLabels' + description: Update request. Only one of cancel or labels should be set based on type. + UpdateFileImportInput: + type: object + required: + - requests + properties: + requests: + type: array + items: + $ref: '#/components/schemas/UpdateFileImportRequest' + description: List of update operations to perform. + minItems: 1 + PageInfo: + type: object + required: + - has_next_page + - has_previous_page + properties: + end_cursor: + type: string + description: Cursor for fetching the next page. + example: 'eyJpZCI6IjEyMyJ9' + start_cursor: + type: string + description: Cursor for fetching the previous page. + example: 'eyJpZCI6IjEwMCJ9' + has_next_page: + type: boolean + description: Whether there is a next page. + example: true + has_previous_page: + type: boolean + description: Whether there is a previous page. + example: false + ListFileImportResponse: + type: object + required: + - file_imports + - page_info + properties: + file_imports: + type: array + items: + $ref: '#/components/schemas/FileImport' + description: List of file imports. + page_info: + $ref: '#/components/schemas/PageInfo' + GeneratePresignedURLInput: + type: object + required: + - id + - content_length + properties: + id: + type: string + description: Unique identifier for the file. This will be used later when creating the file import. + example: "my-upload-123" + content_length: + type: integer + description: Exact size of the file to upload in bytes. Must match the Content-Length header during upload. + minimum: 1 + maximum: 524288000 + example: 1048576 + GeneratePresignedURLResponse: + type: object + required: + - url + properties: + url: + type: string + description: Presigned S3 URL for uploading the file. Valid for a limited time. + example: "https://s3.amazonaws.com/bucket/key?X-Amz-Signature=..." Error: type: object properties: