|
35 | 35 | SPARK_PATH = os.path.join(DATA_DIR, "spark") |
36 | 36 |
|
37 | 37 |
|
38 | | -@pytest.fixture(scope="module") |
| 38 | +@pytest.fixture(scope="module", autouse=True) |
39 | 39 | def build_jar(): |
40 | 40 | jar_file_path = os.path.join(SPARK_PATH, "code", "java", "hello-java-spark") |
41 | 41 | # compile java file |
@@ -207,12 +207,10 @@ def configuration() -> list: |
207 | 207 |
|
208 | 208 |
|
209 | 209 | def test_sagemaker_pyspark_v3( |
210 | | - spark_v3_py_processor, spark_v3_jar_processor, sagemaker_session, configuration, build_jar |
| 210 | + spark_v3_py_processor, spark_v3_jar_processor, sagemaker_session, configuration |
211 | 211 | ): |
212 | 212 | test_sagemaker_pyspark_multinode(spark_v3_py_processor, sagemaker_session, configuration) |
213 | | - test_sagemaker_java_jar_multinode( |
214 | | - spark_v3_jar_processor, sagemaker_session, configuration, build_jar |
215 | | - ) |
| 213 | + test_sagemaker_java_jar_multinode(spark_v3_jar_processor, sagemaker_session, configuration) |
216 | 214 |
|
217 | 215 |
|
218 | 216 | def test_sagemaker_pyspark_multinode(spark_py_processor, sagemaker_session, configuration): |
@@ -280,9 +278,7 @@ def test_sagemaker_pyspark_multinode(spark_py_processor, sagemaker_session, conf |
280 | 278 | assert len(output_contents) != 0 |
281 | 279 |
|
282 | 280 |
|
283 | | -def test_sagemaker_java_jar_multinode( |
284 | | - spark_jar_processor, sagemaker_session, configuration, build_jar |
285 | | -): |
| 281 | +def test_sagemaker_java_jar_multinode(spark_jar_processor, sagemaker_session, configuration): |
286 | 282 | """Test SparkJarProcessor using Java application jar""" |
287 | 283 | bucket = spark_jar_processor.sagemaker_session.default_bucket() |
288 | 284 | with open(os.path.join(SPARK_PATH, "files", "data.jsonl")) as data: |
|
0 commit comments