|
35 | 35 | SPARK_PATH = os.path.join(DATA_DIR, "spark") |
36 | 36 |
|
37 | 37 |
|
38 | | -@pytest.fixture(scope="module") |
| 38 | +@pytest.fixture(scope="module", autouse=True) |
39 | 39 | def build_jar(): |
40 | 40 | jar_file_path = os.path.join(SPARK_PATH, "code", "java", "hello-java-spark") |
41 | 41 | # compile java file |
@@ -207,11 +207,11 @@ def configuration() -> list: |
207 | 207 |
|
208 | 208 |
|
209 | 209 | def test_sagemaker_pyspark_v3( |
210 | | - spark_v3_py_processor, spark_v3_jar_processor, sagemaker_session, configuration, build_jar |
| 210 | + spark_v3_py_processor, spark_v3_jar_processor, sagemaker_session, configuration |
211 | 211 | ): |
212 | 212 | test_sagemaker_pyspark_multinode(spark_v3_py_processor, sagemaker_session, configuration) |
213 | 213 | test_sagemaker_java_jar_multinode( |
214 | | - spark_v3_jar_processor, sagemaker_session, configuration, build_jar |
| 214 | + spark_v3_jar_processor, sagemaker_session, configuration |
215 | 215 | ) |
216 | 216 |
|
217 | 217 |
|
@@ -281,7 +281,7 @@ def test_sagemaker_pyspark_multinode(spark_py_processor, sagemaker_session, conf |
281 | 281 |
|
282 | 282 |
|
283 | 283 | def test_sagemaker_java_jar_multinode( |
284 | | - spark_jar_processor, sagemaker_session, configuration, build_jar |
| 284 | + spark_jar_processor, sagemaker_session, configuration |
285 | 285 | ): |
286 | 286 | """Test SparkJarProcessor using Java application jar""" |
287 | 287 | bucket = spark_jar_processor.sagemaker_session.default_bucket() |
|
0 commit comments