|
28 | 28 | PYTORCH_SQUEEZENET_RESOURCE_DIR, |
29 | 29 | PYTORCH_SQUEEZENET_MLFLOW_RESOURCE_DIR, |
30 | 30 | SERVE_SAGEMAKER_ENDPOINT_TIMEOUT, |
31 | | - SERVE_LOCAL_CONTAINER_TIMEOUT, |
| 31 | + # SERVE_LOCAL_CONTAINER_TIMEOUT, |
32 | 32 | PYTHON_VERSION_IS_NOT_310, |
33 | 33 | ) |
34 | 34 | from tests.integ.timeout import timeout |
@@ -128,36 +128,36 @@ def model_builder(request): |
128 | 128 | return request.getfixturevalue(request.param) |
129 | 129 |
|
130 | 130 |
|
131 | | -@pytest.mark.skipif( |
132 | | - PYTHON_VERSION_IS_NOT_310, |
133 | | - reason="The goal of these test are to test the serving components of our feature", |
134 | | -) |
135 | | -@pytest.mark.flaky(reruns=3, reruns_delay=2) |
136 | | -@pytest.mark.parametrize("model_builder", ["model_builder_local_builder"], indirect=True) |
137 | | -def test_happy_mlflow_pytorch_local_container_with_torch_serve( |
138 | | - sagemaker_session, model_builder, test_image |
139 | | -): |
140 | | - logger.info("Running in LOCAL_CONTAINER mode...") |
141 | | - caught_ex = None |
142 | | - |
143 | | - model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session) |
144 | | - |
145 | | - with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
146 | | - try: |
147 | | - logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
148 | | - predictor = model.deploy() |
149 | | - logger.info("Local container successfully deployed.") |
150 | | - predictor.predict(test_image) |
151 | | - except Exception as e: |
152 | | - logger.exception("test failed") |
153 | | - caught_ex = e |
154 | | - finally: |
155 | | - if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
156 | | - model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
157 | | - if caught_ex: |
158 | | - assert ( |
159 | | - False |
160 | | - ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
| 131 | +# @pytest.mark.skipif( |
| 132 | +# PYTHON_VERSION_IS_NOT_310, |
| 133 | +# reason="The goal of these test are to test the serving components of our feature", |
| 134 | +# ) |
| 135 | +# @pytest.mark.flaky(reruns=3, reruns_delay=2) |
| 136 | +# @pytest.mark.parametrize("model_builder", ["model_builder_local_builder"], indirect=True) |
| 137 | +# def test_happy_mlflow_pytorch_local_container_with_torch_serve( |
| 138 | +# sagemaker_session, model_builder, test_image |
| 139 | +# ): |
| 140 | +# logger.info("Running in LOCAL_CONTAINER mode...") |
| 141 | +# caught_ex = None |
| 142 | +# |
| 143 | +# model = model_builder.build(mode=Mode.LOCAL_CONTAINER, sagemaker_session=sagemaker_session) |
| 144 | +# |
| 145 | +# with timeout(minutes=SERVE_LOCAL_CONTAINER_TIMEOUT): |
| 146 | +# try: |
| 147 | +# logger.info("Deploying and predicting in LOCAL_CONTAINER mode...") |
| 148 | +# predictor = model.deploy() |
| 149 | +# logger.info("Local container successfully deployed.") |
| 150 | +# predictor.predict(test_image) |
| 151 | +# except Exception as e: |
| 152 | +# logger.exception("test failed") |
| 153 | +# caught_ex = e |
| 154 | +# finally: |
| 155 | +# if model.modes[str(Mode.LOCAL_CONTAINER)].container: |
| 156 | +# model.modes[str(Mode.LOCAL_CONTAINER)].container.kill() |
| 157 | +# if caught_ex: |
| 158 | +# assert ( |
| 159 | +# False |
| 160 | +# ), f"{caught_ex} was thrown when running pytorch squeezenet local container test" |
161 | 161 |
|
162 | 162 |
|
163 | 163 | @pytest.mark.skipif( |
|
0 commit comments