diff --git a/tests/integration/godog/features/model/inference.feature b/tests/integration/godog/features/model/inference.feature index f87542e192..139be52bbe 100644 --- a/tests/integration/godog/features/model/inference.feature +++ b/tests/integration/godog/features/model/inference.feature @@ -7,14 +7,16 @@ Feature: Basic model inferencing Then the model should eventually become Ready When I send a valid HTTP inference request with timeout "20s" Then expect http response status code "200" + And expect http response body to contain valid JSON When I send a valid gRPC inference request with timeout "20s" + And expect gRPC response to not return an error Examples: - | model | - | iris | -# | income-xgb | having errors with GRPC -# | mnist-onnx | -# | income-lgb | having errors with response - | tfsimple1 | - | wine | -# | mnist-pytorch | having errors with response + | model | + | mnist-pytorch | + | wine | + | tfsimple1 | + | iris | + | income-xgb | + | income-lgb | + | mnist-onnx | diff --git a/tests/integration/godog/features/model/server_setup.feature b/tests/integration/godog/features/model/server_setup.feature index 7895b3c6a3..a88546b8fb 100644 --- a/tests/integration/godog/features/model/server_setup.feature +++ b/tests/integration/godog/features/model/server_setup.feature @@ -1,10 +1,10 @@ -@Server +@ServerSetup Feature: Server setup Deploys an mlserver with one replica. We ensure the pods become ready and remove any other server pods for different servers. - @ServerSetup + @ServerSetup @ServerSetupMLServer Scenario: Deploy mlserver Server and remove other servers Given I deploy server spec with timeout "10s": """ @@ -15,14 +15,29 @@ Feature: Server setup spec: replicas: 1 serverConfig: mlserver - requirements: - - sklearn - - mlserver - storageUri: gs://seldon-models/scv2/samples/mlserver_1.3.5/iris-sklearn """ When the server should eventually become Ready with timeout "30s" Then ensure only "1" pod(s) are deployed for server and they are Ready - And remove any other server deployments + + @ServerSetup @ServerSetupTritonServer + Scenario: Deploy triton Server + Given I deploy server spec with timeout "10s": + """ + apiVersion: mlops.seldon.io/v1alpha1 + kind: Server + metadata: + name: godog-triton + spec: + replicas: 1 + serverConfig: triton + """ + When the server should eventually become Ready with timeout "30s" + Then ensure only "1" pod(s) are deployed for server and they are Ready + + + @ServerSetup @ServerClean + Scenario: Remove any other pre-existing servers + Given I remove any other server deployments which are not "godog-mlserver,godog-triton" # TODO decide if we want to keep this, if we keep testers will need to ensure they don't run this tag when running all # all features in this directory, as tests will fail when server is deleted. We can not delete and it's up to the diff --git a/tests/integration/godog/steps/infer_steps.go b/tests/integration/godog/steps/infer_steps.go index 526bdcf8c2..1c84ad465e 100644 --- a/tests/integration/godog/steps/infer_steps.go +++ b/tests/integration/godog/steps/infer_steps.go @@ -22,7 +22,9 @@ import ( "github.com/cucumber/godog" "github.com/seldonio/seldon-core/apis/go/v2/mlops/v2_dataplane" + "github.com/sirupsen/logrus" "google.golang.org/grpc/metadata" + "google.golang.org/protobuf/encoding/protojson" ) type inference struct { @@ -33,6 +35,7 @@ type inference struct { httpPort uint lastHTTPResponse *http.Response lastGRPCResponse lastGRPCResponse + log logrus.FieldLogger } func LoadInferenceSteps(scenario *godog.ScenarioContext, w *World) { @@ -61,6 +64,14 @@ func LoadInferenceSteps(scenario *godog.ScenarioContext, w *World) { scenario.Step(`^expect http response body to contain JSON:$`, w.infer.httpRespCheckBodyContainsJSON) scenario.Step(`^expect gRPC response body to contain JSON:$`, w.infer.gRPCRespCheckBodyContainsJSON) scenario.Step(`^expect gRPC response error to contain "([^"]+)"`, w.infer.gRPCRespContainsError) + scenario.Step(`^expect gRPC response to not return an error$`, w.infer.gRPCRespContainsNoError) + scenario.Step(`^expect http response body to contain valid JSON$`, func() error { + testModel, ok := testModels[w.currentModel.modelType] + if !ok { + return fmt.Errorf("model %s not found", w.currentModel.modelType) + } + return w.infer.doHttpRespCheckBodyContainsJSON(testModel.ValidJSONResponse) + }) } func (i *inference) doHTTPModelInferenceRequest(ctx context.Context, modelName, body string) error { @@ -102,7 +113,7 @@ func (i *inference) sendHTTPModelInferenceRequestFromModel(ctx context.Context, return fmt.Errorf("could not find test model %s", m.model.Name) } - return i.doHTTPModelInferenceRequest(ctx, m.modelName, testModel.ValidInferenceRequest) + return i.doHTTPModelInferenceRequest(ctx, m.modelName, testModel.ValidHTTPInferenceRequest) } func httpScheme(useSSL bool) string { @@ -121,7 +132,7 @@ func (i *inference) sendGRPCModelInferenceRequestFromModel(ctx context.Context, if !ok { return fmt.Errorf("could not find test model %s", m.model.Name) } - return i.doGRPCModelInferenceRequest(ctx, m.modelName, testModel.ValidInferenceRequest) + return i.doGRPCModelInferenceRequest(ctx, m.modelName, testModel.ValidGRPCInferenceRequest) } func (i *inference) doGRPCModelInferenceRequest( @@ -130,7 +141,7 @@ func (i *inference) doGRPCModelInferenceRequest( payload string, ) error { var req v2_dataplane.ModelInferRequest - if err := json.Unmarshal([]byte(payload), &req); err != nil { + if err := protojson.Unmarshal([]byte(payload), &req); err != nil { return fmt.Errorf("could not unmarshal gRPC json payload: %w", err) } req.ModelName = model @@ -138,7 +149,11 @@ func (i *inference) doGRPCModelInferenceRequest( md := metadata.Pairs("seldon-model", model) ctx = metadata.NewOutgoingContext(ctx, md) + i.log.Debugf("sending gRPC model inference %+v", &req) + resp, err := i.grpc.ModelInfer(ctx, &req) + i.log.Debugf("grpc model infer response: %+v", resp) + i.log.Debugf("grpc model infer error: %+v", err) i.lastGRPCResponse.response = resp i.lastGRPCResponse.err = err @@ -212,6 +227,16 @@ func jsonContainsObjectSubset(jsonStr, needleStr string) (bool, error) { return containsSubset(needle, hay), nil } +func (i *inference) gRPCRespContainsNoError() error { + if i.lastGRPCResponse.err != nil { + return fmt.Errorf("grpc response contains error: %w", i.lastGRPCResponse.err) + } + if i.lastGRPCResponse.response == nil { + return errors.New("grpc contains no response") + } + return nil +} + func (i *inference) gRPCRespContainsError(err string) error { if i.lastGRPCResponse.err == nil { return errors.New("no gRPC response error found") @@ -226,6 +251,9 @@ func (i *inference) gRPCRespContainsError(err string) error { func (i *inference) gRPCRespCheckBodyContainsJSON(expectJSON *godog.DocString) error { if i.lastGRPCResponse.response == nil { + if i.lastGRPCResponse.err != nil { + return fmt.Errorf("no gRPC response, error found: %s", i.lastGRPCResponse.err.Error()) + } return errors.New("no gRPC response found") } @@ -234,6 +262,7 @@ func (i *inference) gRPCRespCheckBodyContainsJSON(expectJSON *godog.DocString) e return fmt.Errorf("could not marshal gRPC json: %w", err) } + i.log.Debugf("checking gRPC response: %s contains %s", string(gotJson), expectJSON.Content) ok, err := jsonContainsObjectSubset(string(gotJson), expectJSON.Content) if err != nil { return fmt.Errorf("could not check if json contains object: %w", err) @@ -247,6 +276,10 @@ func (i *inference) gRPCRespCheckBodyContainsJSON(expectJSON *godog.DocString) e } func (i *inference) httpRespCheckBodyContainsJSON(expectJSON *godog.DocString) error { + return i.doHttpRespCheckBodyContainsJSON(expectJSON.Content) +} + +func (i *inference) doHttpRespCheckBodyContainsJSON(expectJSON string) error { if i.lastHTTPResponse == nil { return errors.New("no http response found") } @@ -256,7 +289,8 @@ func (i *inference) httpRespCheckBodyContainsJSON(expectJSON *godog.DocString) e return fmt.Errorf("could not read response body: %w", err) } - ok, err := jsonContainsObjectSubset(string(body), expectJSON.Content) + i.log.Debugf("checking HTTP response: %s contains %s", string(body), expectJSON) + ok, err := jsonContainsObjectSubset(string(body), expectJSON) if err != nil { return fmt.Errorf("could not check if json contains object: %w", err) } diff --git a/tests/integration/godog/steps/model_steps.go b/tests/integration/godog/steps/model_steps.go index 90e577dd6f..1790bcc213 100644 --- a/tests/integration/godog/steps/model_steps.go +++ b/tests/integration/godog/steps/model_steps.go @@ -37,58 +37,126 @@ type Model struct { } type TestModelConfig struct { - Name string - StorageURI string - Requirements []string // requirements might have to be applied on the applied of k8s - ValidInferenceRequest string - ValidJSONResponse string + Name string + StorageURI string + Requirements []string // requirements might have to be applied on the applied of k8s + ValidHTTPInferenceRequest string + ValidGRPCInferenceRequest string + ValidJSONResponse string } // map to have all common testing model definitions for testing popular models // todo: this requirements might have to be empty and automatically selected by the applier based on config if they aren't explicitly added by the scenario var testModels = map[string]TestModelConfig{ "iris": { - Name: "iris", - StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/iris-sklearn", - Requirements: []string{"sklearn"}, - ValidInferenceRequest: `{"inputs": [{"name": "predict", "shape": [1, 4], "datatype": "FP32", "data": [[1, 2, 3, 4]]}]}`, + Name: "iris", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/iris-sklearn", + Requirements: []string{"sklearn"}, + ValidHTTPInferenceRequest: `{"inputs": [{"name": "predict", "shape": [1, 4], "datatype": "FP32", "data": [[1, 2, 3, 4]]}]}`, + ValidGRPCInferenceRequest: `{"inputs": [{"name": "predict", "shape": [1, 4], "datatype": "FP32", "contents": { "fp32_contents" : [1, 2, 3, 4] } }]}`, + ValidJSONResponse: `{ "outputs":[{"name":"predict","shape":[1,1],"datatype":"INT64","parameters":{"content_type":"np"},"data":[2]}]}`, }, "income-xgb": { - Name: "income-xgb", - StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/income-xgb", - Requirements: []string{"xgboost"}, - ValidInferenceRequest: `{ "parameters": {"content_type": "pd"}, "inputs": [{"name": "Age", "shape": [1, 1], "datatype": "INT64", "data": [47]},{"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Education", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "data": [3]},{"name": "Race", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Sex", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "data": [40]},{"name": "Country", "shape": [1, 1], "datatype": "INT64", "data": [9]}]}`, + Name: "income-xgb", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/income-xgb", + Requirements: []string{"xgboost"}, + ValidJSONResponse: `{"outputs":[{"name":"predict","shape":[1,1],"datatype":"FP32","parameters":{"content_type":"np"},"data":[-1.8380107879638672]}]}`, + ValidGRPCInferenceRequest: `{ + "parameters": { + "content_type": { + "string_param": "pd" + } + }, + "inputs": [ + {"name": "Age", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [47]}}, + {"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [4]}}, + {"name": "Education", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [3]}}, + {"name": "Race", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [4]}}, + {"name": "Sex", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [0]}}, + {"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [0]}}, + {"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [40]}}, + {"name": "Country", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [9]}} + ] +}`, + ValidHTTPInferenceRequest: `{ "parameters": { "content_type" : "pd" }, "inputs": [{"name": "Age", "shape": [1, 1], "datatype": "INT64", "data": [47]},{"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Education", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "data": [3]},{"name": "Race", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Sex", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "data": [40]},{"name": "Country", "shape": [1, 1], "datatype": "INT64", "data": [9]}]}`, }, "mnist-onnx": { - Name: "mnist-onnx", - StorageURI: "gs://seldon-models/scv2/samples/triton_23-03/mnist-onnx", - Requirements: []string{"onnx"}, - ValidInferenceRequest: `{"inputs":[{"name":"Input3","data":[],"datatype":"FP32","shape":[]}]}`, + Name: "mnist-onnx", + StorageURI: "gs://seldon-models/scv2/samples/triton_23-03/mnist-onnx", + Requirements: []string{"onnx"}, + ValidJSONResponse: `{"outputs":[{"name":"Plus214_Output_0","datatype":"FP32","shape":[1,10],"data":[-0.04485602676868439,0.007791661191731691,0.06810081750154495,0.02999374084174633,-0.1264096349477768,0.14021874964237213,-0.055284902453422546,-0.04938381537795067,0.08432205021381378,-0.05454041436314583]}]}`, + ValidHTTPInferenceRequest: `{"inputs":[ {"name" : "Input3" , "data": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "datatype":"FP32","shape":[1,1,28,28]}]}`, + ValidGRPCInferenceRequest: `{"inputs":[ {"name" : "Input3", "contents" : { "fp32_contents": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] }, "datatype":"FP32","shape":[1,1,28,28]}]}`, }, "income-lgb": { - Name: "income-lgb", - StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/income-lgb", - Requirements: []string{"lightgbm"}, - ValidInferenceRequest: `{"inputs": [{"name": "Age", "shape": [1, 1], "datatype": "INT64", "data": [47]},{"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Education", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "data": [3]},{"name": "Race", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Sex", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "data": [40]},{"name": "Country", "shape": [1, 1], "datatype": "INT64", "data": [9]}]}`, + Name: "income-lgb", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/income-lgb", + Requirements: []string{"lightgbm"}, + ValidJSONResponse: `{ "outputs":[{"name":"predict","shape":[1,1],"datatype":"FP64","parameters":{"content_type":"np"},"data":[0.06279460120044741]}] }`, + ValidHTTPInferenceRequest: `{ "parameters": { "content_type" : "pd" }, "inputs": [{"name": "Age", "shape": [1, 1], "datatype": "INT64", "data": [47]},{"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Education", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "data": [3]},{"name": "Race", "shape": [1, 1], "datatype": "INT64", "data": [4]},{"name": "Sex", "shape": [1, 1], "datatype": "INT64", "data": [1]},{"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "data": [0]},{"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "data": [40]},{"name": "Country", "shape": [1, 1], "datatype": "INT64", "data": [9]}]}`, + ValidGRPCInferenceRequest: `{ + "parameters": { + "content_type": { + "string_param": "pd" + } + }, + "inputs": [ + {"name": "Age", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [47]}}, + {"name": "Workclass", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [4]}}, + {"name": "Education", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Marital Status", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Occupation", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Relationship", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [3]}}, + {"name": "Race", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [4]}}, + {"name": "Sex", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [1]}}, + {"name": "Capital Gain", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [0]}}, + {"name": "Capital Loss", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [0]}}, + {"name": "Hours per week", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [40]}}, + {"name": "Country", "shape": [1, 1], "datatype": "INT64", "contents": {"int64_contents": [9]}} + ] +}`, }, "wine": { - Name: "wine", - StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/wine-mlflow", - Requirements: []string{"mlflow"}, - ValidInferenceRequest: `{ "inputs": [ { "name": "fixed acidity", "shape": [1], "datatype": "FP32", "data": [7.4] }, { "name": "volatile acidity", "shape": [1], "datatype": "FP32", "data": [0.7000] }, { "name": "citric acid", "shape": [1], "datatype": "FP32", "data": [0] }, { "name": "residual sugar", "shape": [1], "datatype": "FP32", "data": [1.9] }, { "name": "chlorides", "shape": [1], "datatype": "FP32", "data": [0.076] }, { "name": "free sulfur dioxide", "shape": [1], "datatype": "FP32", "data": [11] }, { "name": "total sulfur dioxide", "shape": [1], "datatype": "FP32", "data": [34] }, { "name": "density", "shape": [1], "datatype": "FP32", "data": [0.9978] }, { "name": "pH", "shape": [1], "datatype": "FP32", "data": [3.51] }, { "name": "sulphates", "shape": [1], "datatype": "FP32", "data": [0.56] }, { "name": "alcohol", "shape": [1], "datatype": "FP32", "data": [9.4] } ] }`, + Name: "wine", + StorageURI: "gs://seldon-models/scv2/samples/mlserver_1.3.5/wine-mlflow", + Requirements: []string{"mlflow"}, + ValidJSONResponse: `{ "outputs":[{"name":"output-1","shape":[1,1],"datatype":"FP64","parameters":{"content_type":"np"},"data":[5.576883936610762]}]}`, + ValidHTTPInferenceRequest: `{ "inputs": [ { "name": "fixed acidity", "shape": [1], "datatype": "FP32", "data": [7.4] }, { "name": "volatile acidity", "shape": [1], "datatype": "FP32", "data": [0.7000] }, { "name": "citric acid", "shape": [1], "datatype": "FP32", "data": [0] }, { "name": "residual sugar", "shape": [1], "datatype": "FP32", "data": [1.9] }, { "name": "chlorides", "shape": [1], "datatype": "FP32", "data": [0.076] }, { "name": "free sulfur dioxide", "shape": [1], "datatype": "FP32", "data": [11] }, { "name": "total sulfur dioxide", "shape": [1], "datatype": "FP32", "data": [34] }, { "name": "density", "shape": [1], "datatype": "FP32", "data": [0.9978] }, { "name": "pH", "shape": [1], "datatype": "FP32", "data": [3.51] }, { "name": "sulphates", "shape": [1], "datatype": "FP32", "data": [0.56] }, { "name": "alcohol", "shape": [1], "datatype": "FP32", "data": [9.4] } ] }`, + ValidGRPCInferenceRequest: `{ + "inputs": [ + {"name": "fixed acidity", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [7.4]}}, + {"name": "volatile acidity", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [0.7000]}}, + {"name": "citric acid", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [0]}}, + {"name": "residual sugar", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [1.9]}}, + {"name": "chlorides", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [0.076]}}, + {"name": "free sulfur dioxide", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [11]}}, + {"name": "total sulfur dioxide", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [34]}}, + {"name": "density", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [0.9978]}}, + {"name": "pH", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [3.51]}}, + {"name": "sulphates", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [0.56]}}, + {"name": "alcohol", "shape": [1], "datatype": "FP32", "contents": {"fp32_contents": [9.4]}} + ] +}`, }, "mnist-pytorch": { - Name: "mnist-pytorch", - StorageURI: "gs://seldon-models/scv2/samples/triton_23-03/mnist-pytorch", - Requirements: []string{"pytorch"}, - ValidInferenceRequest: `{'inputs': [{'name': 'x__0', 'data': [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3294117748737335, 0.7254902124404907, 0.6235294342041016, 0.5921568870544434, 0.23529411852359772, 0.1411764770746231, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8705882430076599, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9450980424880981, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.6666666865348816, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26274511218070984, 0.4470588266849518, 0.2823529541492462, 0.4470588266849518, 0.6392157077789307, 0.8901960849761963, 0.9960784316062927, 0.8823529481887817, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9803921580314636, 0.8980392217636108, 0.9960784316062927, 0.9960784316062927, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06666667014360428, 0.25882354378700256, 0.054901961237192154, 0.26274511218070984, 0.26274511218070984, 0.26274511218070984, 0.23137255012989044, 0.08235294371843338, 0.9254902005195618, 0.9960784316062927, 0.4156862795352936, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32549020648002625, 0.9921568632125854, 0.8196078538894653, 0.07058823853731155, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08627451211214066, 0.9137254953384399, 1.0, 0.32549020648002625, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5058823823928833, 0.9960784316062927, 0.9333333373069763, 0.1725490242242813, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23137255012989044, 0.9764705896377563, 0.9960784316062927, 0.24313725531101227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5215686559677124, 0.9960784316062927, 0.7333333492279053, 0.019607843831181526, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03529411926865578, 0.8039215803146362, 0.9725490212440491, 0.22745098173618317, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4941176474094391, 0.9960784316062927, 0.7137255072593689, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.29411765933036804, 0.9843137264251709, 0.9411764740943909, 0.2235294133424759, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07450980693101883, 0.8666666746139526, 0.9960784316062927, 0.6509804129600525, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0117647061124444, 0.7960784435272217, 0.9960784316062927, 0.8588235378265381, 0.13725490868091583, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14901961386203766, 0.9960784316062927, 0.9960784316062927, 0.3019607961177826, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12156862765550613, 0.8784313797950745, 0.9960784316062927, 0.45098039507865906, 0.003921568859368563, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5215686559677124, 0.9960784316062927, 0.9960784316062927, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.239215686917305, 0.9490196108818054, 0.9960784316062927, 0.9960784316062927, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4745098054409027, 0.9960784316062927, 0.9960784316062927, 0.8588235378265381, 0.1568627506494522, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4745098054409027, 0.9960784316062927, 0.8117647171020508, 0.07058823853731155, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'datatype': 'FP32', 'shape': [1, 1, 28, 28]}]}`, + Name: "mnist-pytorch", + StorageURI: "gs://seldon-models/scv2/samples/triton_23-03/mnist-pytorch", + Requirements: []string{"pytorch"}, + ValidJSONResponse: `{ "outputs":[{"name":"y__0","datatype":"FP32","shape":[1,10],"data":[-3.897987127304077,-3.953615665435791,-3.0412650108337404,-3.4258205890655519,-4.411256790161133,-4.6279296875,-5.4005937576293949,-0.2461225539445877,-4.16152811050415,-2.8753538131713869]}]}`, + ValidHTTPInferenceRequest: `{"inputs":[{"name":"x__0","data":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3294117748737335,0.7254902124404907,0.6235294342041016,0.5921568870544434,0.23529411852359772,0.1411764770746231,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8705882430076599,0.9960784316062927,0.9960784316062927,0.9960784316062927,0.9960784316062927,0.9450980424880981,0.7764706015586853,0.7764706015586853,0.7764706015586853,0.7764706015586853,0.7764706015586853,0.7764706015586853,0.7764706015586853,0.7764706015586853,0.6666666865348816,0.20392157137393951,0,0,0,0,0,0,0,0,0,0,0,0,0.26274511218070984,0.4470588266849518,0.2823529541492462,0.4470588266849518,0.6392157077789307,0.8901960849761963,0.9960784316062927,0.8823529481887817,0.9960784316062927,0.9960784316062927,0.9960784316062927,0.9803921580314636,0.8980392217636108,0.9960784316062927,0.9960784316062927,0.5490196347236633,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06666667014360428,0.25882354378700256,0.054901961237192154,0.26274511218070984,0.26274511218070984,0.26274511218070984,0.23137255012989044,0.08235294371843338,0.9254902005195618,0.9960784316062927,0.4156862795352936,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32549020648002625,0.9921568632125854,0.8196078538894653,0.07058823853731155,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08627451211214066,0.9137254953384399,1,0.32549020648002625,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5058823823928833,0.9960784316062927,0.9333333373069763,0.1725490242242813,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23137255012989044,0.9764705896377563,0.9960784316062927,0.24313725531101227,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5215686559677124,0.9960784316062927,0.7333333492279053,0.019607843831181526,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03529411926865578,0.8039215803146362,0.9725490212440491,0.22745098173618317,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4941176474094391,0.9960784316062927,0.7137255072593689,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29411765933036804,0.9843137264251709,0.9411764740943909,0.2235294133424759,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07450980693101883,0.8666666746139526,0.9960784316062927,0.6509804129600525,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0117647061124444,0.7960784435272217,0.9960784316062927,0.8588235378265381,0.13725490868091583,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14901961386203766,0.9960784316062927,0.9960784316062927,0.3019607961177826,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12156862765550613,0.8784313797950745,0.9960784316062927,0.45098039507865906,0.003921568859368563,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5215686559677124,0.9960784316062927,0.9960784316062927,0.20392157137393951,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.239215686917305,0.9490196108818054,0.9960784316062927,0.9960784316062927,0.20392157137393951,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4745098054409027,0.9960784316062927,0.9960784316062927,0.8588235378265381,0.1568627506494522,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4745098054409027,0.9960784316062927,0.8117647171020508,0.07058823853731155,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"datatype":"FP32","shape":[1,1,28,28]}]}`, + ValidGRPCInferenceRequest: `{"inputs": [{"name": "x__0", "contents": { "fp32_contents" : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3294117748737335, 0.7254902124404907, 0.6235294342041016, 0.5921568870544434, 0.23529411852359772, 0.1411764770746231, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8705882430076599, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9450980424880981, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.7764706015586853, 0.6666666865348816, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26274511218070984, 0.4470588266849518, 0.2823529541492462, 0.4470588266849518, 0.6392157077789307, 0.8901960849761963, 0.9960784316062927, 0.8823529481887817, 0.9960784316062927, 0.9960784316062927, 0.9960784316062927, 0.9803921580314636, 0.8980392217636108, 0.9960784316062927, 0.9960784316062927, 0.5490196347236633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06666667014360428, 0.25882354378700256, 0.054901961237192154, 0.26274511218070984, 0.26274511218070984, 0.26274511218070984, 0.23137255012989044, 0.08235294371843338, 0.9254902005195618, 0.9960784316062927, 0.4156862795352936, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32549020648002625, 0.9921568632125854, 0.8196078538894653, 0.07058823853731155, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08627451211214066, 0.9137254953384399, 1.0, 0.32549020648002625, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5058823823928833, 0.9960784316062927, 0.9333333373069763, 0.1725490242242813, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23137255012989044, 0.9764705896377563, 0.9960784316062927, 0.24313725531101227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5215686559677124, 0.9960784316062927, 0.7333333492279053, 0.019607843831181526, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03529411926865578, 0.8039215803146362, 0.9725490212440491, 0.22745098173618317, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4941176474094391, 0.9960784316062927, 0.7137255072593689, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.29411765933036804, 0.9843137264251709, 0.9411764740943909, 0.2235294133424759, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07450980693101883, 0.8666666746139526, 0.9960784316062927, 0.6509804129600525, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0117647061124444, 0.7960784435272217, 0.9960784316062927, 0.8588235378265381, 0.13725490868091583, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14901961386203766, 0.9960784316062927, 0.9960784316062927, 0.3019607961177826, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12156862765550613, 0.8784313797950745, 0.9960784316062927, 0.45098039507865906, 0.003921568859368563, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5215686559677124, 0.9960784316062927, 0.9960784316062927, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.239215686917305, 0.9490196108818054, 0.9960784316062927, 0.9960784316062927, 0.20392157137393951, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4745098054409027, 0.9960784316062927, 0.9960784316062927, 0.8588235378265381, 0.1568627506494522, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4745098054409027, 0.9960784316062927, 0.8117647171020508, 0.07058823853731155, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]}, "datatype": "FP32", "shape": [1, 1, 28, 28]}]}`, }, "tfsimple1": { - Name: "tfsimple1", - StorageURI: "gs://seldon-models/triton/simple", - Requirements: []string{"tensorflow"}, - ValidInferenceRequest: `{"inputs":[{"name":"INPUT0","data":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16],"datatype":"INT32","shape":[1,16]},{"name":"INPUT1","data":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16],"datatype":"INT32","shape":[1,16]}]}`, - ValidJSONResponse: `[ { "name": "OUTPUT0", "datatype": "INT32", "shape": [ 1, 16 ], "data": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32 ] }, { "name": "OUTPUT1", "datatype": "INT32", "shape": [ 1, 16 ], "data": [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] } ]`, + Name: "tfsimple1", + StorageURI: "gs://seldon-models/triton/simple", + Requirements: []string{"tensorflow"}, + ValidHTTPInferenceRequest: `{"inputs":[{"name":"INPUT0","data":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16],"datatype":"INT32","shape":[1,16]},{"name":"INPUT1","data":[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16],"datatype":"INT32","shape":[1,16]}]}`, + ValidGRPCInferenceRequest: `{"inputs":[{"name":"INPUT0","contents": { "int_contents" : [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16] },"datatype":"INT32","shape":[1,16]},{"name":"INPUT1","contents": { "int_contents" : [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16] },"datatype":"INT32","shape":[1,16]}]}`, + ValidJSONResponse: `{ "outputs":[{"name":"OUTPUT0","datatype":"INT32","shape":[1,16],"data":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32]},{"name":"OUTPUT1","datatype":"INT32","shape":[1,16],"data":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] } ]}`, }, } @@ -216,7 +284,7 @@ func (m *Model) IHaveAModel(model string) error { return nil } -func NewModel(label map[string]string, namespace string, k8sClient versioned.Interface, log logrus.FieldLogger, watcherStorage k8sclient.WatcherStorage) *Model { +func newModel(label map[string]string, namespace string, k8sClient versioned.Interface, log logrus.FieldLogger, watcherStorage k8sclient.WatcherStorage) *Model { return &Model{label: label, model: &mlopsv1alpha1.Model{}, log: log, namespace: namespace, k8sClient: k8sClient, watcherStorage: watcherStorage} } diff --git a/tests/integration/godog/steps/pipeline_steps.go b/tests/integration/godog/steps/pipeline_steps.go index 12e2c051ed..14f20a414c 100644 --- a/tests/integration/godog/steps/pipeline_steps.go +++ b/tests/integration/godog/steps/pipeline_steps.go @@ -34,7 +34,7 @@ type Pipeline struct { log logrus.FieldLogger } -func NewPipeline(label map[string]string, namespace string, k8sClient versioned.Interface, log logrus.FieldLogger, watcherStorage k8sclient.WatcherStorage) *Pipeline { +func newPipeline(label map[string]string, namespace string, k8sClient versioned.Interface, log logrus.FieldLogger, watcherStorage k8sclient.WatcherStorage) *Pipeline { return &Pipeline{label: label, pipeline: &mlopsv1alpha1.Pipeline{}, log: log, namespace: namespace, k8sClient: k8sClient, watcherStorage: watcherStorage} } diff --git a/tests/integration/godog/steps/server_steps.go b/tests/integration/godog/steps/server_steps.go index 23c24ac043..0ef978cd2f 100644 --- a/tests/integration/godog/steps/server_steps.go +++ b/tests/integration/godog/steps/server_steps.go @@ -14,6 +14,7 @@ import ( "errors" "fmt" "maps" + "strings" "github.com/cucumber/godog" mlopsv1alpha1 "github.com/seldonio/seldon-core/operator/v2/apis/mlops/v1alpha1" @@ -75,6 +76,11 @@ func LoadServerSteps(scenario *godog.ScenarioContext, w *World) { }) }) }) + scenario.Step(`^I remove any other server deployments which are not "([^"]+)"$`, func(servers string) error { + return withTimeoutCtx("10s", func(ctx context.Context) error { + return w.server.removeServers(ctx, servers) + }) + }) scenario.Step(`^I delete server "([^"]+)" with timeout "([^"]+)"$`, func(server, timeout string) error { return withTimeoutCtx(timeout, func(ctx context.Context) error { return w.server.deleteServer(ctx, server) @@ -133,6 +139,26 @@ func (s *server) applyScenarioLabel() { } } +func (s *server) removeServers(ctx context.Context, keepServers string) error { + gotServers, err := s.seldonK8sClient.MlopsV1alpha1().Servers(s.namespace).List(ctx, metav1.ListOptions{}) + if err != nil { + return fmt.Errorf("failed listing servers: %w", err) + } + + for _, server := range gotServers.Items { + if strings.Contains(keepServers, server.Name) { + continue + } + s.log.Debugf("Removing server %s not in keep list %s", server.Name, keepServers) + if err := s.deleteServer(ctx, server.Name); err != nil { + return fmt.Errorf("failed deleting server: %w", err) + } + s.log.Infof("removed server %q", server) + } + + return nil +} + func (s *server) removeOtherServers(ctx context.Context) error { servers, err := s.seldonK8sClient.MlopsV1alpha1().Servers(s.namespace).List(ctx, metav1.ListOptions{}) if err != nil { diff --git a/tests/integration/godog/steps/world.go b/tests/integration/godog/steps/world.go index 0a9b0ac926..e1ffdef5fb 100644 --- a/tests/integration/godog/steps/world.go +++ b/tests/integration/godog/steps/world.go @@ -58,17 +58,18 @@ func NewWorld(c Config) (*World, error) { } w := &World{ - namespace: c.Namespace, - kubeClient: c.KubeClient, - watcherStorage: c.WatcherStorage, - currentModel: NewModel(label, c.Namespace, c.K8sClient, c.Logger, c.WatcherStorage), - currentPipeline: NewPipeline(label, c.Namespace, c.K8sClient, c.Logger, c.WatcherStorage), - server: newServer(label, c.Namespace, c.K8sClient, c.Logger, c.KubeClient), + namespace: c.Namespace, + kubeClient: c.KubeClient, + watcherStorage: c.WatcherStorage, + currentModel: newModel(label, c.Namespace, c.K8sClient, c.Logger, c.WatcherStorage), + currentPipeline: newPipeline(label, c.Namespace, c.K8sClient, c.Logger, c.WatcherStorage), + server: newServer(label, c.Namespace, c.K8sClient, c.Logger, c.KubeClient), infer: inference{ host: c.IngressHost, http: &http.Client{}, grpc: c.GRPC, httpPort: c.HTTPPort, + log: c.Logger, ssl: c.SSL}, Label: label, }