Skip to content

Commit 0aaa944

Browse files
committed
partially skipping
1 parent e097c60 commit 0aaa944

File tree

1 file changed

+1
-25
lines changed

1 file changed

+1
-25
lines changed

tests/integration/test_session.py

Lines changed: 1 addition & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -150,9 +150,6 @@ def session_name(test_project, test_region, connect_session):
150150
return f"projects/{test_project}/locations/{test_region}/sessions/{DataprocSparkSession._active_s8s_session_id}"
151151

152152

153-
@pytest.mark.skip(
154-
reason="Skipping PyPI package installation test since it's not supported yet"
155-
)
156153
def test_create_spark_session_with_default_notebook_behavior(
157154
auth_type, connect_session, session_name, session_controller_client
158155
):
@@ -187,9 +184,6 @@ def test_create_spark_session_with_default_notebook_behavior(
187184
assert DataprocSparkSession._active_s8s_session_uuid is None
188185

189186

190-
@pytest.mark.skip(
191-
reason="Skipping PyPI package installation test since it's not supported yet"
192-
)
193187
def test_reuse_s8s_spark_session(
194188
connect_session, session_name, session_controller_client
195189
):
@@ -211,9 +205,6 @@ def test_reuse_s8s_spark_session(
211205
connect_session.stop()
212206

213207

214-
@pytest.mark.skip(
215-
reason="Skipping PyPI package installation test since it's not supported yet"
216-
)
217208
def test_stop_spark_session_with_deleted_serverless_session(
218209
connect_session, session_name, session_controller_client
219210
):
@@ -230,9 +221,6 @@ def test_stop_spark_session_with_deleted_serverless_session(
230221
assert DataprocSparkSession._active_s8s_session_id is None
231222

232223

233-
@pytest.mark.skip(
234-
reason="Skipping PyPI package installation test since it's not supported yet"
235-
)
236224
def test_stop_spark_session_with_terminated_serverless_session(
237225
connect_session, session_name, session_controller_client
238226
):
@@ -251,9 +239,6 @@ def test_stop_spark_session_with_terminated_serverless_session(
251239
assert DataprocSparkSession._active_s8s_session_id is None
252240

253241

254-
@pytest.mark.skip(
255-
reason="Skipping PyPI package installation test since it's not supported yet"
256-
)
257242
def test_get_or_create_spark_session_with_terminated_serverless_session(
258243
test_project,
259244
test_region,
@@ -341,9 +326,6 @@ def session_template_name(
341326
)
342327

343328

344-
@pytest.mark.skip(
345-
reason="Skipping PyPI package installation test since it's not supported yet"
346-
)
347329
def test_create_spark_session_with_session_template_and_user_provided_dataproc_config(
348330
image_version,
349331
test_project,
@@ -423,9 +405,6 @@ def generate_random2(row) -> int:
423405
connect_session.stop()
424406

425407

426-
@pytest.mark.skip(
427-
reason="Skipping PyPI package installation test since it's not supported yet"
428-
)
429408
def test_sql_functions(connect_session):
430409
"""Test basic SQL functions like col(), sum(), count(), etc."""
431410
# Import SparkConnect-compatible functions
@@ -467,9 +446,6 @@ def test_sql_functions(connect_session):
467446
assert tax_results[2]["tax"] == 15.0
468447

469448

470-
@pytest.mark.skip(
471-
reason="Skipping PyPI package installation test since it's not supported yet"
472-
)
473449
def test_sql_udf(connect_session):
474450
"""Test SQL UDF registration and usage."""
475451
# Import SparkConnect-compatible functions
@@ -502,7 +478,7 @@ def uppercase_func(text):
502478

503479

504480
@pytest.mark.skip(
505-
reason="Skipping PyPI package installation test since it's not supported yet"
481+
reason="pending for verfication or fixing"
506482
)
507483
def test_session_reuse_with_custom_id(
508484
auth_type,

0 commit comments

Comments
 (0)