diff --git a/google/cloud/dataproc_spark_connect/session.py b/google/cloud/dataproc_spark_connect/session.py index 8d8d9f2..5a89831 100644 --- a/google/cloud/dataproc_spark_connect/session.py +++ b/google/cloud/dataproc_spark_connect/session.py @@ -721,8 +721,6 @@ def _get_dataproc_config(self): # Merge default configs with existing properties, # user configs take precedence for k, v in { - "spark.datasource.bigquery.viewsEnabled": "true", - "spark.datasource.bigquery.writeMethod": "direct", "spark.sql.catalog.spark_catalog": "com.google.cloud.spark.bigquery.BigQuerySparkSessionCatalog", "spark.sql.sources.default": "bigquery", }.items(): diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index 8b6be94..d75c8ce 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -1011,7 +1011,7 @@ def test_create_session_with_default_datasource_env_var( 0 ] self.assertNotIn( - "spark.datasource.bigquery.writeMethod", + "spark.sql.sources.default", create_session_request.session.runtime_config.properties, ) mock_logger.warning.assert_not_called() @@ -1034,18 +1034,6 @@ def test_create_session_with_default_datasource_env_var( 0 ] # BigQuery properties should be set - self.assertEqual( - create_session_request.session.runtime_config.properties.get( - "spark.datasource.bigquery.writeMethod" - ), - "direct", - ) - self.assertEqual( - create_session_request.session.runtime_config.properties.get( - "spark.datasource.bigquery.viewsEnabled" - ), - "true", - ) self.assertEqual( create_session_request.session.runtime_config.properties.get( "spark.sql.sources.default" @@ -1078,7 +1066,7 @@ def test_create_session_with_default_datasource_env_var( 0 ] self.assertNotIn( - "spark.datasource.bigquery.writeMethod", + "spark.sql.sources.default", create_session_request.session.runtime_config.properties, ) mock_logger.warning.assert_called_once_with( @@ -1099,7 +1087,7 @@ def test_create_session_with_default_datasource_env_var( dataproc_config = Session() dataproc_config.runtime_config.version = "3.0" dataproc_config.runtime_config.properties = { - "spark.datasource.bigquery.writeMethod": "override_method", + "spark.sql.sources.default": "override_source", "spark.some.other.property": "some_value", } session = DataprocSparkSession.builder.dataprocSessionConfig( @@ -1112,24 +1100,12 @@ def test_create_session_with_default_datasource_env_var( ] # The BigQuery default properties should be set, # but pre-existing properties should override defaults. - self.assertEqual( - create_session_request.session.runtime_config.properties.get( - "spark.datasource.bigquery.writeMethod" - ), - "override_method", - ) # Pre-existing property remains - self.assertEqual( - create_session_request.session.runtime_config.properties.get( - "spark.datasource.bigquery.viewsEnabled" - ), - "true", - ) # Default should still be set self.assertEqual( create_session_request.session.runtime_config.properties.get( "spark.sql.sources.default" ), - "bigquery", - ) # Default should still be set + "override_source", + ) # Pre-existing property remains self.assertEqual( create_session_request.session.runtime_config.properties.get( "spark.sql.catalog.spark_catalog"