Skip to content

Commit 99493bf

Browse files
authored
test: remove pragma (#2212)
* test: remove pragma * test: remove comment about pragma * updates to conditionals related to pandas 2.0+ tests
1 parent bd5aba8 commit 99493bf

File tree

3 files changed

+13
-22
lines changed

3 files changed

+13
-22
lines changed

google/cloud/bigquery/_pandas_helpers.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -337,13 +337,8 @@ def types_mapper(arrow_data_type):
337337
):
338338
return range_date_dtype
339339

340-
# TODO: this section does not have a test yet OR at least not one that is
341-
# recognized by coverage, hence the pragma. See Issue: #2132
342-
elif (
343-
range_timestamp_dtype is not None
344-
and arrow_data_type.equals( # pragma: NO COVER
345-
range_timestamp_dtype.pyarrow_dtype
346-
)
340+
elif range_timestamp_dtype is not None and arrow_data_type.equals(
341+
range_timestamp_dtype.pyarrow_dtype
347342
):
348343
return range_timestamp_dtype
349344

tests/unit/job/test_query_pandas.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -647,12 +647,6 @@ def test_to_dataframe_bqstorage_no_pyarrow_compression():
647647
)
648648

649649

650-
# TODO: The test needs work to account for pandas 2.0+. See Issue: #2132
651-
# pragma added due to issues with coverage.
652-
@pytest.mark.skipif(
653-
pandas.__version__.startswith("2."),
654-
reason="pandas 2.0 changes some default dtypes and we haven't update the test to account for those",
655-
)
656650
@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`")
657651
def test_to_dataframe_column_dtypes():
658652
from google.cloud.bigquery.job import QueryJob as target_class
@@ -704,14 +698,18 @@ def test_to_dataframe_column_dtypes():
704698
exp_columns = [field["name"] for field in query_resource["schema"]["fields"]]
705699
assert list(df) == exp_columns # verify the column names
706700

707-
assert df.start_timestamp.dtype.name == "datetime64[ns, UTC]"
708701
assert df.seconds.dtype.name == "Int64"
709702
assert df.miles.dtype.name == "float64"
710703
assert df.km.dtype.name == "float16"
711704
assert df.payment_type.dtype.name == "object"
712705
assert df.complete.dtype.name == "boolean"
713706
assert df.date.dtype.name == "dbdate"
714707

708+
if pandas.__version__.startswith("2."):
709+
assert df.start_timestamp.dtype.name == "datetime64[us, UTC]"
710+
else:
711+
assert df.start_timestamp.dtype.name == "datetime64[ns, UTC]"
712+
715713

716714
def test_to_dataframe_column_date_dtypes():
717715
from google.cloud.bigquery.job import QueryJob as target_class

tests/unit/test_table_pandas.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,6 @@ def class_under_test():
3434
return RowIterator
3535

3636

37-
# TODO: The test needs work to account for pandas 2.0+. See Issue: #2132
38-
# pragma added due to issues with coverage.
39-
@pytest.mark.skipif(
40-
pandas.__version__.startswith("2."),
41-
reason="pandas 2.0 changes some default dtypes and we haven't update the test to account for those",
42-
)
4337
def test_to_dataframe_nullable_scalars(
4438
monkeypatch, class_under_test
4539
): # pragma: NO COVER
@@ -113,14 +107,18 @@ def test_to_dataframe_nullable_scalars(
113107
assert df.dtypes["bool_col"].name == "boolean"
114108
assert df.dtypes["bytes_col"].name == "object"
115109
assert df.dtypes["date_col"].name == "dbdate"
116-
assert df.dtypes["datetime_col"].name == "datetime64[ns]"
117110
assert df.dtypes["float64_col"].name == "float64"
118111
assert df.dtypes["int64_col"].name == "Int64"
119112
assert df.dtypes["numeric_col"].name == "object"
120113
assert df.dtypes["string_col"].name == "object"
121114
assert df.dtypes["time_col"].name == "dbtime"
122-
assert df.dtypes["timestamp_col"].name == "datetime64[ns, UTC]"
123115
assert df.dtypes["json_col"].name == "object"
116+
if pandas.__version__.startswith("2."):
117+
assert df.dtypes["datetime_col"].name == "datetime64[us]"
118+
assert df.dtypes["timestamp_col"].name == "datetime64[us, UTC]"
119+
else:
120+
assert df.dtypes["datetime_col"].name == "datetime64[ns]"
121+
assert df.dtypes["timestamp_col"].name == "datetime64[ns, UTC]"
124122

125123
# Check for expected values.
126124
assert df["bignumeric_col"][0] == decimal.Decimal("123.456789101112131415")

0 commit comments

Comments
 (0)