Skip to content

Commit 088c6b4

Browse files
authored
simpler model serving (#3479)
1 parent 5fe2b94 commit 088c6b4

File tree

1 file changed

+7
-89
lines changed

1 file changed

+7
-89
lines changed

internal/acceptance/model_serving_test.go

Lines changed: 7 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
11
package acceptance
22

33
import (
4-
"context"
54
"fmt"
65
"testing"
76

8-
"github.com/databricks/databricks-sdk-go"
9-
"github.com/databricks/databricks-sdk-go/service/compute"
107
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
11-
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
128
)
139

1410
func TestAccModelServing(t *testing.T) {
@@ -17,95 +13,23 @@ func TestAccModelServing(t *testing.T) {
1713
skipf(t)("not available on GCP")
1814
}
1915

20-
clusterID := GetEnvOrSkipTest(t, "TEST_DEFAULT_CLUSTER_ID")
21-
22-
name := fmt.Sprintf("terraform-test-model-serving-%[1]s",
16+
name := fmt.Sprintf("terraform-test-model-serving-%s",
2317
acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum))
2418
workspaceLevel(t, step{
2519
Template: fmt.Sprintf(`
26-
resource "databricks_mlflow_experiment" "exp" {
27-
name = "/Shared/%[1]s-exp"
28-
}
29-
resource "databricks_mlflow_model" "model" {
30-
name = "%[1]s-model"
31-
}
32-
resource "databricks_library" "fbprophet" {
33-
cluster_id = "{env.TEST_DEFAULT_CLUSTER_ID}"
34-
pypi {
35-
package = "mlflow"
36-
}
37-
}
38-
39-
`, name),
40-
Check: func(s *terraform.State) error {
41-
w := databricks.Must(databricks.NewWorkspaceClient())
42-
ctx := context.Background()
43-
executor, err := w.CommandExecution.Start(ctx, clusterID, compute.LanguagePython)
44-
if err != nil {
45-
return err
46-
}
47-
defer executor.Destroy(ctx)
48-
installResults, err := executor.Execute(ctx, `%pip install mlflow`)
49-
if err != nil {
50-
return err
51-
}
52-
if installResults.Err() != nil {
53-
return installResults.Err()
54-
}
55-
results, err := executor.Execute(ctx, fmt.Sprintf(`
56-
import time
57-
import mlflow
58-
import mlflow.pyfunc
59-
from mlflow.tracking.artifact_utils import get_artifact_uri
60-
from mlflow.tracking.client import MlflowClient
61-
62-
mlflow.set_experiment("/Shared/%[1]s-exp")
63-
64-
class SampleModel(mlflow.pyfunc.PythonModel):
65-
def predict(self, ctx, input_df):
66-
return 7
67-
artifact_path = 'sample_model'
68-
69-
with mlflow.start_run() as new_run:
70-
mlflow.pyfunc.log_model(python_model=SampleModel(), artifact_path=artifact_path)
71-
run1_id = new_run.info.run_id
72-
source = get_artifact_uri(run_id=run1_id, artifact_path=artifact_path)
73-
74-
client = MlflowClient()
75-
client.create_model_version(name="%[1]s-model", source=source, run_id=run1_id)
76-
client.create_model_version(name="%[1]s-model", source=source, run_id=run1_id)
77-
while client.get_model_version(name="%[1]s-model", version="1").status != "READY":
78-
time.sleep(10)
79-
while client.get_model_version(name="%[1]s-model", version="2").status != "READY":
80-
time.sleep(10)
81-
`, name))
82-
if err != nil {
83-
return err
84-
}
85-
return results.Err()
86-
},
87-
},
88-
step{
89-
Template: fmt.Sprintf(`
90-
resource "databricks_mlflow_experiment" "exp" {
91-
name = "/Shared/%[1]s-exp"
92-
}
93-
resource "databricks_mlflow_model" "model" {
94-
name = "%[1]s-model"
95-
}
9620
resource "databricks_model_serving" "endpoint" {
97-
name = "%[1]s"
21+
name = "%s"
9822
config {
9923
served_models {
10024
name = "prod_model"
101-
model_name = "%[1]s-model"
25+
model_name = "experiment-fixture-model"
10226
model_version = "1"
10327
workload_size = "Small"
10428
scale_to_zero_enabled = true
10529
}
10630
served_models {
10731
name = "candidate_model"
108-
model_name = "%[1]s-model"
32+
model_name = "experiment-fixture-model"
10933
model_version = "2"
11034
workload_size = "Small"
11135
scale_to_zero_enabled = false
@@ -132,21 +56,15 @@ func TestAccModelServing(t *testing.T) {
13256
}
13357
}
13458
`, name),
135-
},
59+
},
13660
step{
13761
Template: fmt.Sprintf(`
138-
resource "databricks_mlflow_experiment" "exp" {
139-
name = "/Shared/%[1]s-exp"
140-
}
141-
resource "databricks_mlflow_model" "model" {
142-
name = "%[1]s-model"
143-
}
14462
resource "databricks_model_serving" "endpoint" {
145-
name = "%[1]s"
63+
name = "%s"
14664
config {
14765
served_models {
14866
name = "prod_model"
149-
model_name = "%[1]s-model"
67+
model_name = "experiment-fixture-model"
15068
model_version = "1"
15169
workload_size = "Small"
15270
scale_to_zero_enabled = true

0 commit comments

Comments
 (0)