diff --git a/tests/integration/godog/features/model/deployment.feature b/tests/integration/godog/features/model/deployment.feature index ff225aa74e..fd43d6baea 100644 --- a/tests/integration/godog/features/model/deployment.feature +++ b/tests/integration/godog/features/model/deployment.feature @@ -4,16 +4,28 @@ Feature: Model deployment As a model user I need to create a Model resource and verify it is deployed - Scenario: Success - Load a model - Given I have an "iris" model + Scenario Outline: Success - Load a model + Given I have an "" model When the model is applied Then the model should eventually become Ready + Examples: + | model | + | iris | + | income-xgb | + | mnist-onnx | + | income-lgb | + | wine | + | mnist-pytorch | + | tfsimple1 | + - Scenario: Success - Load a model again + Scenario: Success - Load a model and expect status model available Given I have an "iris" model When the model is applied - Then the model should eventually become Ready + And the model eventually becomes Ready + Then the model status message should eventually be "ModelAvailable" + Scenario: Load a specific model Given I deploy model spec with timeout "10s": @@ -31,22 +43,4 @@ Feature: Model deployment """ Then the model should eventually become Ready - Scenario: Success - Load a model and expect status model available - Given I have an "iris" model - When the model is applied - And the model eventually becomes Ready - Then the model status message should eventually be "ModelAvailable" - - Scenario: Success - Load a model with min replicas - Given I have an "iris" model - And the model has "1" min replicas - When the model is applied - Then the model should eventually become Ready - -# todo: change model type - Scenario: Success - Load a big model - Given I have an "iris" model - When the model is applied - Then the model should eventually become Ready - diff --git a/tests/integration/godog/features/model/inference.feature b/tests/integration/godog/features/model/inference.feature index 06b018cb1f..f87542e192 100644 --- a/tests/integration/godog/features/model/inference.feature +++ b/tests/integration/godog/features/model/inference.feature @@ -1,16 +1,20 @@ -#@ModelInference @Models @Inference -#Feature Basic model inferencing -# -# Background: -# Given a clean test namespace -# -# Scenario: Model can serve prediction -# Given I have an "iris" model -# And the model is applied -# And the model eventually becomes Ready -# When I send a prediction request with payload: -# """ -# { "inputs": [1.0, 2.0, 3.0] } -# """ -# Then the response status should be 200 -# And the response body should contain "predictions" \ No newline at end of file +@ModelInference @Models @Inference @Functional +Feature: Basic model inferencing + + Scenario Outline: Success - Inference for model + Given I have an "" model + When the model is applied + Then the model should eventually become Ready + When I send a valid HTTP inference request with timeout "20s" + Then expect http response status code "200" + When I send a valid gRPC inference request with timeout "20s" + + Examples: + | model | + | iris | +# | income-xgb | having errors with GRPC +# | mnist-onnx | +# | income-lgb | having errors with response + | tfsimple1 | + | wine | +# | mnist-pytorch | having errors with response diff --git a/tests/integration/godog/k8sclient/watcher_store.go b/tests/integration/godog/k8sclient/watcher_store.go index cdd84cb67c..32f12531d8 100644 --- a/tests/integration/godog/k8sclient/watcher_store.go +++ b/tests/integration/godog/k8sclient/watcher_store.go @@ -90,7 +90,7 @@ func (s *WatcherStore) Start() { if err != nil { s.logger.WithError(err).Error("failed to access model watcher") } else { - s.logger.Debugf("new model watch event with name: %s on namespace: %s", accessor.GetName(), accessor.GetNamespace()) + s.logger.WithField("event", event).Tracef("new model watch event with name: %s on namespace: %s", accessor.GetName(), accessor.GetNamespace()) } if event.Object == nil { diff --git a/tests/integration/godog/steps/infer.go b/tests/integration/godog/steps/infer.go index ab70a1b188..fab43eab37 100644 --- a/tests/integration/godog/steps/infer.go +++ b/tests/integration/godog/steps/infer.go @@ -25,24 +25,48 @@ import ( "google.golang.org/grpc/metadata" ) -func (i *inference) sendHTTPModelInferenceRequest(ctx context.Context, model string, payload *godog.DocString) error { - req, err := http.NewRequestWithContext(ctx, http.MethodPost, - fmt.Sprintf("%s://%s:%d/v2/models/%s/infer", httpScheme(i.ssl), i.host, i.httpPort, model), strings.NewReader(payload.Content)) +func (i *inference) doHTTPModelInferenceRequest(ctx context.Context, modelName, body string) error { + url := fmt.Sprintf( + "%s://%s:%d/v2/models/%s/infer", + httpScheme(i.ssl), + i.host, + i.httpPort, + modelName, + ) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, strings.NewReader(body)) if err != nil { return fmt.Errorf("could not create http request: %w", err) } + req.Header.Add("Content-Type", "application/json") req.Header.Add("Host", "seldon-mesh.inference.seldon") - req.Header.Add("Seldon-model", model) + req.Header.Add("Seldon-model", modelName) resp, err := i.http.Do(req) if err != nil { return fmt.Errorf("could not send http request: %w", err) } + i.lastHTTPResponse = resp return nil } +// Used from steps that pass an explicit payload (DocString) +func (i *inference) sendHTTPModelInferenceRequest(ctx context.Context, model string, payload *godog.DocString) error { + return i.doHTTPModelInferenceRequest(ctx, model, payload.Content) +} + +// Used from steps that work from a *Model and testModels table +func (i *inference) sendHTTPModelInferenceRequestFromModel(ctx context.Context, m *Model) error { + testModel, ok := testModels[m.modelType] + if !ok { + return fmt.Errorf("could not find test model %s", m.model.Name) + } + + return i.doHTTPModelInferenceRequest(ctx, m.modelName, testModel.ValidInferenceRequest) +} + func httpScheme(useSSL bool) string { if useSSL { return "https" @@ -51,20 +75,35 @@ func httpScheme(useSSL bool) string { } func (i *inference) sendGRPCModelInferenceRequest(ctx context.Context, model string, payload *godog.DocString) error { - var msg *v2_dataplane.ModelInferRequest - if err := json.Unmarshal([]byte(payload.Content), &msg); err != nil { + return i.doGRPCModelInferenceRequest(ctx, model, payload.Content) +} + +func (i *inference) sendGRPCModelInferenceRequestFromModel(ctx context.Context, m *Model) error { + testModel, ok := testModels[m.modelType] + if !ok { + return fmt.Errorf("could not find test model %s", m.model.Name) + } + return i.doGRPCModelInferenceRequest(ctx, m.modelName, testModel.ValidInferenceRequest) +} + +func (i *inference) doGRPCModelInferenceRequest( + ctx context.Context, + model string, + payload string, +) error { + var req v2_dataplane.ModelInferRequest + if err := json.Unmarshal([]byte(payload), &req); err != nil { return fmt.Errorf("could not unmarshal gRPC json payload: %w", err) } - msg.ModelName = model + req.ModelName = model md := metadata.Pairs("seldon-model", model) - ctx = metadata.NewOutgoingContext(context.Background(), md) - resp, err := i.grpc.ModelInfer(ctx, msg) - if err != nil { - i.lastGRPCResponse.err = err - } + ctx = metadata.NewOutgoingContext(ctx, md) + + resp, err := i.grpc.ModelInfer(ctx, &req) i.lastGRPCResponse.response = resp + i.lastGRPCResponse.err = err return nil } @@ -196,7 +235,12 @@ func (i *inference) httpRespCheckStatus(status int) error { return errors.New("no http response found") } if status != i.lastHTTPResponse.StatusCode { - return fmt.Errorf("expected http response status code %d, got %d", status, i.lastHTTPResponse.StatusCode) + body, err := io.ReadAll(i.lastHTTPResponse.Body) + if err != nil { + return fmt.Errorf("expected http response status code %d, got %d", status, i.lastHTTPResponse.StatusCode) + } + return fmt.Errorf("expected http response status code %d, got %d with body: %s", status, i.lastHTTPResponse.StatusCode, body) + } return nil } diff --git a/tests/integration/godog/steps/model_steps.go b/tests/integration/godog/steps/model_steps.go index 2d6d7fa63e..20821eb298 100644 --- a/tests/integration/godog/steps/model_steps.go +++ b/tests/integration/godog/steps/model_steps.go @@ -29,29 +29,66 @@ type Model struct { label map[string]string namespace string model *mlopsv1alpha1.Model + modelName string + modelType string k8sClient versioned.Interface watcherStorage k8sclient.WatcherStorage log logrus.FieldLogger } type TestModelConfig struct { - Name string - StorageURI string - Requirements []string // requirements might have to be applied on the applied of k8s + Name string + StorageURI string + Requirements []string // requirements might have to be applied on the applied of k8s + ValidInferenceRequest string + ValidJSONResponse string } // map to have all common testing model definitions for testing popular models // todo: this requirements might have to be empty and automatically selected by the applier based on config if they aren't explicitly added by the scenario var testModels = map[string]TestModelConfig{ "iris": { - Name: "iris", - StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/iris-sklearn", - Requirements: []string{"sklearn"}, + Name: "iris", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/iris-sklearn", + Requirements: []string{"sklearn"}, + ValidInferenceRequest: `{"inputs": [{"name": "predict", "shape": [1, 4], "datatype": "FP32", "data": [[1, 2, 3, 4]]}]}`, }, - "fraud-detector": { - Name: "fraud-detector", - StorageURI: "gs://other-bucket/models/fraud/", - Requirements: []string{"sklearn"}, + "income-xgb": { + Name: "income-xgb", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/income-xgb", + Requirements: []string{"xgboost"}, + ValidInferenceRequest: `{ "parameters": {"content_type": "pd"}, "inputs": [{"name": "Age", "shape": [1, 1], "datatype": "INT64", "data": [47]},{"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Education", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "data": [3]},{"name": "Race", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Sex", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "data": [40]},{"name": "Country", "shape": [1, 1], "datatype": "INT64", "data": [9]}]}`, + }, + "mnist-onnx": { + Name: "mnist-onnx", + StorageURI: "gs://seldon-models/scv2/samples/triton_23-03/mnist-onnx", + Requirements: []string{"onnx"}, + ValidInferenceRequest: `{"inputs":[{"name":"Input3","data":[],"datatype":"FP32","shape":[]}]}`, + }, + "income-lgb": { + Name: "income-lgb", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/income-lgb", + Requirements: []string{"lightgbm"}, + ValidInferenceRequest: `{"inputs": [{"name": "Age", "shape": [1, 1], "datatype": "INT64", "data": [47]},{"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Education", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "data": [3]},{"name": "Race", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Sex", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "data": [40]},{"name": "Country", "shape": [1, 1], "datatype": "INT64", "data": [9]}]}`, + }, + "wine": { + Name: "wine", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/wine-mlflow", + Requirements: []string{"mlflow"}, + ValidInferenceRequest: `{ "inputs": [ { "name": "fixed acidity", "shape": [1], "datatype": "FP32", "data": [7.4] }, { "name": "volatile acidity", "shape": [1], "datatype": "FP32", "data": [0.7000] }, { "name": "citric acid", "shape": [1], "datatype": "FP32", "data": [0] }, { "name": "residual sugar", "shape": [1], "datatype": "FP32", "data": [1.9] }, { "name": "chlorides", "shape": [1], "datatype": "FP32", "data": [0.076] }, { "name": "free sulfur dioxide", "shape": [1], "datatype": "FP32", "data": [11] }, { "name": "total sulfur dioxide", "shape": [1], "datatype": "FP32", "data": [34] }, { "name": "density", "shape": [1], "datatype": "FP32", "data": [0.9978] }, { "name": "pH", "shape": [1], "datatype": "FP32", "data": [3.51] }, { "name": "sulphates", "shape": [1], "datatype": "FP32", "data": [0.56] }, { "name": "alcohol", "shape": [1], "datatype": "FP32", "data": [9.4] } ] }`, + }, + "mnist-pytorch": { + Name: "mnist-pytorch", + StorageURI: "gs://seldon-models/scv2/samples/triton_23-03/mnist-pytorch", + Requirements: []string{"pytorch"}, + ValidInferenceRequest: `{'inputs': [{'name': 'x__0', 'data': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3294117748737335, 0.7254902124404907, 0.6235294342041016, 0.5921568870544434, 0.23529411852359772, 0.1411764770746231, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8705882430076599, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9450980424880981, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.6666666865348816, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26274511218070984, 0.4470588266849518, 0.2823529541492462, 0.4470588266849518, 0.6392157077789307, 0.8901960849761963, 0.9960784316062927, 0.8823529481887817, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9803921580314636, 0.8980392217636108, 0.9960784316062927, 0.9960784316062927, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06666667014360428, 0.25882354378700256, 0.054901961237192154, 0.26274511218070984, 0.26274511218070984, 0.26274511218070984, 0.23137255012989044, 0.08235294371843338, 0.9254902005195618, 0.9960784316062927, 0.4156862795352936, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32549020648002625, 0.9921568632125854, 0.8196078538894653, 0.07058823853731155, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08627451211214066, 0.9137254953384399, 1.0, 0.32549020648002625, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5058823823928833, 0.9960784316062927, 0.9333333373069763, 0.1725490242242813, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23137255012989044, 0.9764705896377563, 0.9960784316062927, 0.24313725531101227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5215686559677124, 0.9960784316062927, 0.7333333492279053, 0.019607843831181526, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03529411926865578, 0.8039215803146362, 0.9725490212440491, 0.22745098173618317, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4941176474094391, 0.9960784316062927, 0.7137255072593689, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.29411765933036804, 0.9843137264251709, 0.9411764740943909, 0.2235294133424759, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07450980693101883, 0.8666666746139526, 0.9960784316062927, 0.6509804129600525, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0117647061124444, 0.7960784435272217, 0.9960784316062927, 0.8588235378265381, 0.13725490868091583, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14901961386203766, 0.9960784316062927, 0.9960784316062927, 0.3019607961177826, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12156862765550613, 0.8784313797950745, 0.9960784316062927, 0.45098039507865906, 0.003921568859368563, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5215686559677124, 0.9960784316062927, 0.9960784316062927, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.239215686917305, 0.9490196108818054, 0.9960784316062927, 0.9960784316062927, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4745098054409027, 0.9960784316062927, 0.9960784316062927, 0.8588235378265381, 0.1568627506494522, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4745098054409027, 0.9960784316062927, 0.8117647171020508, 0.07058823853731155, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'datatype': 'FP32', 'shape': [1, 1, 28, 28]}]}`, + }, + "tfsimple1": { + Name: "tfsimple1", + StorageURI: "gs://seldon-models/triton/simple", + Requirements: []string{"tensorflow"}, + ValidInferenceRequest: `{"inputs":[{"name":"INPUT0","data":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16],"datatype":"INT32","shape":[1,16]},{"name":"INPUT1","data":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16],"datatype":"INT32","shape":[1,16]}]}`, + ValidJSONResponse: `[ { "name": "OUTPUT0", "datatype": "INT32", "shape": [ 1, 16 ], "data": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32 ] }, { "name": "OUTPUT1", "datatype": "INT32", "shape": [ 1, 16 ], "data": [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] } ]`, }, } @@ -107,10 +144,22 @@ func LoadInferenceSteps(scenario *godog.ScenarioContext, w *World) { return w.infer.sendGRPCModelInferenceRequest(ctx, model, payload) }) }) + scenario.Step(`^(?:I )send a valid gRPC inference request with timeout "([^"]+)"`, func(timeout string) error { + return withTimeoutCtx(timeout, func(ctx context.Context) error { + return w.infer.sendGRPCModelInferenceRequestFromModel(ctx, w.currentModel) + }) + }) + scenario.Step(`^(?:I )send a valid HTTP inference request with timeout "([^"]+)"`, func(timeout string) error { + return withTimeoutCtx(timeout, func(ctx context.Context) error { + return w.infer.sendHTTPModelInferenceRequestFromModel(ctx, w.currentModel) + }) + }) + scenario.Step(`^expect http response status code "([^"]*)"$`, w.infer.httpRespCheckStatus) scenario.Step(`^expect http response body to contain JSON:$`, w.infer.httpRespCheckBodyContainsJSON) scenario.Step(`^expect gRPC response body to contain JSON:$`, w.infer.gRPCRespCheckBodyContainsJSON) scenario.Step(`^expect gRPC response error to contain "([^"]+)"`, w.infer.gRPCRespContainsError) + } func (m *Model) deployModelSpec(ctx context.Context, spec *godog.DocString) error { @@ -147,7 +196,8 @@ func (m *Model) IHaveAModel(model string) error { } modelName := fmt.Sprintf("%s-%s", testModel.Name, randomString(3)) - + m.modelName = modelName + m.modelType = model m.model = &mlopsv1alpha1.Model{ TypeMeta: metav1.TypeMeta{ Kind: "Model", diff --git a/tests/integration/godog/suite/suite.go b/tests/integration/godog/suite/suite.go index e9dc5d8f1e..ded10c893e 100644 --- a/tests/integration/godog/suite/suite.go +++ b/tests/integration/godog/suite/suite.go @@ -154,7 +154,7 @@ func InitializeScenario(scenarioCtx *godog.ScenarioContext) { return ctx, fmt.Errorf("error when deleting models on before steps: %w", err) } - return ctx, err + return ctx, nil }) // Register step definitions with access to world + k8sClient