Skip to content

Commit e097c60

Browse files
committed
added clean up logic
1 parent 8fa5f2f commit e097c60

File tree

1 file changed

+17
-1
lines changed

1 file changed

+17
-1
lines changed

tests/integration/test_session.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,17 +131,25 @@ def session_template_controller_client(test_client_options):
131131

132132
@pytest.fixture
133133
def connect_session(test_project, test_region, os_environment):
134-
return (
134+
session = (
135135
DataprocSparkSession.builder.projectId(test_project)
136136
.location(test_region)
137137
.getOrCreate()
138138
)
139+
yield session
140+
# Clean up the session after each test to prevent resource conflicts
141+
try:
142+
session.stop()
143+
except Exception:
144+
# Ignore cleanup errors to avoid masking the actual test failure
145+
pass
139146

140147

141148
@pytest.fixture
142149
def session_name(test_project, test_region, connect_session):
143150
return f"projects/{test_project}/locations/{test_region}/sessions/{DataprocSparkSession._active_s8s_session_id}"
144151

152+
145153
@pytest.mark.skip(
146154
reason="Skipping PyPI package installation test since it's not supported yet"
147155
)
@@ -178,6 +186,7 @@ def test_create_spark_session_with_default_notebook_behavior(
178186
]
179187
assert DataprocSparkSession._active_s8s_session_uuid is None
180188

189+
181190
@pytest.mark.skip(
182191
reason="Skipping PyPI package installation test since it's not supported yet"
183192
)
@@ -201,6 +210,7 @@ def test_reuse_s8s_spark_session(
201210

202211
connect_session.stop()
203212

213+
204214
@pytest.mark.skip(
205215
reason="Skipping PyPI package installation test since it's not supported yet"
206216
)
@@ -219,6 +229,7 @@ def test_stop_spark_session_with_deleted_serverless_session(
219229
assert DataprocSparkSession._active_s8s_session_uuid is None
220230
assert DataprocSparkSession._active_s8s_session_id is None
221231

232+
222233
@pytest.mark.skip(
223234
reason="Skipping PyPI package installation test since it's not supported yet"
224235
)
@@ -239,6 +250,7 @@ def test_stop_spark_session_with_terminated_serverless_session(
239250
assert DataprocSparkSession._active_s8s_session_uuid is None
240251
assert DataprocSparkSession._active_s8s_session_id is None
241252

253+
242254
@pytest.mark.skip(
243255
reason="Skipping PyPI package installation test since it's not supported yet"
244256
)
@@ -328,6 +340,7 @@ def session_template_name(
328340
delete_session_template_request
329341
)
330342

343+
331344
@pytest.mark.skip(
332345
reason="Skipping PyPI package installation test since it's not supported yet"
333346
)
@@ -409,6 +422,7 @@ def generate_random2(row) -> int:
409422
assert isinstance(sum_random, int), "Result is not of type int"
410423
connect_session.stop()
411424

425+
412426
@pytest.mark.skip(
413427
reason="Skipping PyPI package installation test since it's not supported yet"
414428
)
@@ -452,6 +466,7 @@ def test_sql_functions(connect_session):
452466
assert tax_results[1]["tax"] == 20.0
453467
assert tax_results[2]["tax"] == 15.0
454468

469+
455470
@pytest.mark.skip(
456471
reason="Skipping PyPI package installation test since it's not supported yet"
457472
)
@@ -485,6 +500,7 @@ def uppercase_func(text):
485500
# Clean up
486501
connect_session.sql("DROP VIEW IF EXISTS test_table")
487502

503+
488504
@pytest.mark.skip(
489505
reason="Skipping PyPI package installation test since it's not supported yet"
490506
)

0 commit comments

Comments
 (0)