Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,7 @@ def unit_noextras(session):
# so that it continues to be an optional dependency.
# https://github.com/googleapis/python-bigquery/issues/1877
if session.python == UNIT_TEST_PYTHON_VERSIONS[0]:
session.install("pyarrow==4.0.0")

session.install("pyarrow==4.0.0", "numpy==1.20.2")
default(session, install_extras=False)


Expand Down
1 change: 1 addition & 0 deletions testing/constraints-3.9.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ ipykernel==6.2.0
opentelemetry-api==1.1.0
opentelemetry-instrumentation==0.20b0
opentelemetry-sdk==1.1.0
numpy==1.20.2
packaging==24.2.0
pandas==1.3.0
pandas-gbq==0.26.1
Expand Down
5 changes: 4 additions & 1 deletion tests/unit/test__pandas_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
import datetime
import decimal
import functools
import gc

# import gc
import operator
import queue
from typing import Union
Expand Down Expand Up @@ -1887,6 +1888,7 @@ def fake_download_stream(
assert queue_used.maxsize == expected_maxsize


'''
@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`")
def test__download_table_bqstorage_shuts_down_workers(
monkeypatch,
Expand Down Expand Up @@ -2013,6 +2015,7 @@ def test_download_arrow_row_iterator_unknown_field_type(module_under_test):
col = result.columns[1]
assert type(col) is pyarrow.lib.DoubleArray
assert col.to_pylist() == [2.2, 22.22, 222.222]
'''


@pytest.mark.skipif(isinstance(pyarrow, mock.Mock), reason="Requires `pyarrow`")
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test__pyarrow_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import pytest


numpy = pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow", minversion="3.0.0")


Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_dbapi__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ def test_empty_iterable(self):
self.assertEqual(list(result), [])

def test_non_empty_iterable(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
from tests.unit.helpers import _to_pyarrow

Expand Down
28 changes: 26 additions & 2 deletions tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -2416,13 +2416,15 @@ def test_to_arrow_error_if_pyarrow_is_none(self):
row_iterator.to_arrow()

def test_to_arrow(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow")
row_iterator = self._make_one()
tbl = row_iterator.to_arrow()
self.assertIsInstance(tbl, pyarrow.Table)
self.assertEqual(tbl.num_rows, 0)

def test_to_arrow_iterable(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3096,6 +3098,7 @@ def test_to_arrow_iterable_w_bqstorage(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_arrow(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3180,6 +3183,7 @@ def test_to_arrow(self):
)

def test_to_arrow_w_nulls(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3216,6 +3220,7 @@ def test_to_arrow_w_nulls(self):
self.assertEqual(ages, [32, 29, None, 111])

def test_to_arrow_w_unknown_type(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3261,6 +3266,7 @@ def test_to_arrow_w_unknown_type(self):
self.assertTrue(all("sport" in str(warning) for warning in warned))

def test_to_arrow_w_empty_table(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3302,6 +3308,7 @@ def test_to_arrow_w_empty_table(self):
self.assertEqual(child_field.type.value_type[1].name, "age")

def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery.schema import SchemaField
Expand Down Expand Up @@ -3344,6 +3351,7 @@ def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self):
mock_client._ensure_bqstorage_client.assert_not_called()

def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery.schema import SchemaField
Expand Down Expand Up @@ -3382,6 +3390,7 @@ def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self):
mock_client._ensure_bqstorage_client.assert_not_called()

def test_to_arrow_w_bqstorage(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -3465,6 +3474,7 @@ def test_to_arrow_w_bqstorage(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_arrow_w_bqstorage_creates_client(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -3498,6 +3508,7 @@ def test_to_arrow_w_bqstorage_creates_client(self):
bqstorage_client._transport.grpc_channel.close.assert_called_once()

def test_to_arrow_ensure_bqstorage_client_wo_bqstorage(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3531,6 +3542,7 @@ def mock_verify_version(raise_if_error: bool = False):
self.assertEqual(tbl.num_rows, 2)

def test_to_arrow_w_bqstorage_no_streams(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -3570,6 +3582,7 @@ def test_to_arrow_w_bqstorage_no_streams(self):
self.assertEqual(actual_table.schema[2].name, "colB")

def test_to_arrow_progress_bar(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("tqdm")
pytest.importorskip("tqdm.notebook")
Expand Down Expand Up @@ -3703,6 +3716,7 @@ def test_to_dataframe_iterable_with_dtypes(self):
self.assertEqual(df_2["age"][0], 33)

def test_to_dataframe_iterable_w_bqstorage(self):
pytest.importorskip("numpy")
pandas = pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
Expand Down Expand Up @@ -3777,6 +3791,7 @@ def test_to_dataframe_iterable_w_bqstorage(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_dataframe_iterable_w_bqstorage_max_results_warning(self):
pytest.importorskip("numpy")
pandas = pytest.importorskip("pandas")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -4520,7 +4535,7 @@ def test_to_dataframe_w_none_dtypes_mapper(self):

def test_to_dataframe_w_unsupported_dtypes_mapper(self):
pytest.importorskip("pandas")
import numpy
numpy = pytest.importorskip("numpy")
from google.cloud.bigquery.schema import SchemaField

schema = [
Expand Down Expand Up @@ -4804,6 +4819,7 @@ def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning(self):
mock_client._ensure_bqstorage_client.assert_not_called()

def test_to_dataframe_w_bqstorage_creates_client(self):
pytest.importorskip("numpy")
pytest.importorskip("pandas")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -4837,6 +4853,7 @@ def test_to_dataframe_w_bqstorage_creates_client(self):
bqstorage_client._transport.grpc_channel.close.assert_called_once()

def test_to_dataframe_w_bqstorage_no_streams(self):
pytest.importorskip("numpy")
pytest.importorskip("pandas")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -4865,6 +4882,7 @@ def test_to_dataframe_w_bqstorage_no_streams(self):
self.assertTrue(got.empty)

def test_to_dataframe_w_bqstorage_logs_session(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pytest.importorskip("pyarrow")
Expand All @@ -4889,6 +4907,7 @@ def test_to_dataframe_w_bqstorage_logs_session(self):
)

def test_to_dataframe_w_bqstorage_empty_streams(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -4943,6 +4962,7 @@ def test_to_dataframe_w_bqstorage_empty_streams(self):
self.assertTrue(got.empty)

def test_to_dataframe_w_bqstorage_nonempty(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5025,6 +5045,7 @@ def test_to_dataframe_w_bqstorage_nonempty(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self):
pytest.importorskip("numpy")
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5077,6 +5098,7 @@ def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self):
self.assertTrue(got.index.is_unique)

def test_to_dataframe_w_bqstorage_updates_progress_bar(self):
pytest.importorskip("numpy")
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5154,6 +5176,7 @@ def blocking_to_arrow(*args, **kwargs):
tqdm_mock().close.assert_called_once()

def test_to_dataframe_w_bqstorage_exits_on_keyboardinterrupt(self):
pytest.importorskip("numpy")
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5329,6 +5352,7 @@ def test_to_dataframe_w_bqstorage_snapshot(self):
row_iterator.to_dataframe(bqstorage_client)

def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pandas = pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5611,7 +5635,7 @@ def test_rowiterator_to_geodataframe_delegation(self, to_dataframe):
"""
pandas = pytest.importorskip("pandas")
geopandas = pytest.importorskip("geopandas")
import numpy
numpy = pytest.importorskip("numpy")
from shapely import wkt

row_iterator = self._make_one_from_data(
Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_table_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import google.cloud.bigquery.table


numpy = pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow", minversion="3.0.0")


Expand Down