Skip to content

Commit 386f35d

Browse files
authored
refactor: move DDL gen to bigframes.session._io, add missing test (#131)
Towards internal issue 280662868 🦕
1 parent f9ba28c commit 386f35d

File tree

5 files changed

+26
-12
lines changed

5 files changed

+26
-12
lines changed

bigframes/dataframe.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@
4646
import bigframes.core.guid
4747
import bigframes.core.indexers as indexers
4848
import bigframes.core.indexes as indexes
49-
import bigframes.core.io
5049
import bigframes.core.ordering as order
5150
import bigframes.core.utils as utils
5251
import bigframes.core.window
@@ -56,6 +55,7 @@
5655
import bigframes.operations.aggregations as agg_ops
5756
import bigframes.series
5857
import bigframes.series as bf_series
58+
import bigframes.session._io.bigquery
5959
import third_party.bigframes_vendored.pandas.core.frame as vendored_pandas_frame
6060
import third_party.bigframes_vendored.pandas.pandas._typing as vendored_pandas_typing
6161

@@ -2201,9 +2201,9 @@ def to_csv(
22012201
raise NotImplementedError(ERROR_IO_REQUIRES_WILDCARD)
22022202

22032203
result_table = self._run_io_query(
2204-
index=index, ordering_id=bigframes.core.io.IO_ORDERING_ID
2204+
index=index, ordering_id=bigframes.session._io.bigquery.IO_ORDERING_ID
22052205
)
2206-
export_data_statement = bigframes.core.io.create_export_csv_statement(
2206+
export_data_statement = bigframes.session._io.bigquery.create_export_csv_statement(
22072207
f"{result_table.project}.{result_table.dataset_id}.{result_table.table_id}",
22082208
uri=path_or_buf,
22092209
field_delimiter=sep,
@@ -2243,9 +2243,9 @@ def to_json(
22432243
)
22442244

22452245
result_table = self._run_io_query(
2246-
index=index, ordering_id=bigframes.core.io.IO_ORDERING_ID
2246+
index=index, ordering_id=bigframes.session._io.bigquery.IO_ORDERING_ID
22472247
)
2248-
export_data_statement = bigframes.core.io.create_export_data_statement(
2248+
export_data_statement = bigframes.session._io.bigquery.create_export_data_statement(
22492249
f"{result_table.project}.{result_table.dataset_id}.{result_table.table_id}",
22502250
uri=path_or_buf,
22512251
format="JSON",
@@ -2319,9 +2319,9 @@ def to_parquet(
23192319
export_options["compression"] = compression.upper()
23202320

23212321
result_table = self._run_io_query(
2322-
index=index, ordering_id=bigframes.core.io.IO_ORDERING_ID
2322+
index=index, ordering_id=bigframes.session._io.bigquery.IO_ORDERING_ID
23232323
)
2324-
export_data_statement = bigframes.core.io.create_export_data_statement(
2324+
export_data_statement = bigframes.session._io.bigquery.create_export_data_statement(
23252325
f"{result_table.project}.{result_table.dataset_id}.{result_table.table_id}",
23262326
uri=path,
23272327
format="PARQUET",

bigframes/session/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,13 +67,13 @@
6767
import bigframes.core as core
6868
import bigframes.core.blocks as blocks
6969
import bigframes.core.guid as guid
70-
import bigframes.core.io as bigframes_io
7170
from bigframes.core.ordering import IntegerEncoding, OrderingColumnReference
7271
import bigframes.core.utils as utils
7372
import bigframes.dataframe as dataframe
7473
import bigframes.formatting_helpers as formatting_helpers
7574
from bigframes.remote_function import read_gbq_function as bigframes_rgf
7675
from bigframes.remote_function import remote_function as bigframes_rf
76+
import bigframes.session._io.bigquery as bigframes_io
7777
import bigframes.session.clients
7878
import bigframes.version
7979

bigframes/session/_io/__init__.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# Copyright 2023 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
File renamed without changes.

tests/unit/core/test_io.py renamed to tests/unit/session/test_io_bigquery.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,15 @@
1818
import google.cloud.bigquery as bigquery
1919
import pytest
2020

21-
import bigframes.core.io
21+
import bigframes.session._io.bigquery
2222

2323

2424
def test_create_snapshot_sql_doesnt_timetravel_anonymous_datasets():
2525
table_ref = bigquery.TableReference.from_string(
2626
"my-test-project._e8166e0cdb.anonbb92cd"
2727
)
2828

29-
sql = bigframes.core.io.create_snapshot_sql(
29+
sql = bigframes.session._io.bigquery.create_snapshot_sql(
3030
table_ref, datetime.datetime.now(datetime.timezone.utc)
3131
)
3232

@@ -40,7 +40,7 @@ def test_create_snapshot_sql_doesnt_timetravel_anonymous_datasets():
4040
def test_create_snapshot_sql_doesnt_timetravel_session_datasets():
4141
table_ref = bigquery.TableReference.from_string("my-test-project._session.abcdefg")
4242

43-
sql = bigframes.core.io.create_snapshot_sql(
43+
sql = bigframes.session._io.bigquery.create_snapshot_sql(
4444
table_ref, datetime.datetime.now(datetime.timezone.utc)
4545
)
4646

@@ -101,4 +101,5 @@ def test_create_snapshot_sql_doesnt_timetravel_session_datasets():
101101
),
102102
)
103103
def test_bq_schema_to_sql(schema: Iterable[bigquery.SchemaField], expected: str):
104-
pass
104+
sql = bigframes.session._io.bigquery.bq_schema_to_sql(schema)
105+
assert sql == expected

0 commit comments

Comments
 (0)