Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,7 @@ def unit_noextras(session):
# so that it continues to be an optional dependency.
# https://github.com/googleapis/python-bigquery/issues/1877
if session.python == UNIT_TEST_PYTHON_VERSIONS[0]:
session.install("pyarrow==4.0.0")

session.install("pyarrow==4.0.0", "numpy==1.20.2")
default(session, install_extras=False)


Expand Down
1 change: 1 addition & 0 deletions testing/constraints-3.9.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ ipykernel==6.2.0
opentelemetry-api==1.1.0
opentelemetry-instrumentation==0.20b0
opentelemetry-sdk==1.1.0
numpy==1.20.2
packaging==24.2.0
pandas==1.3.0
pandas-gbq==0.26.1
Expand Down
1 change: 1 addition & 0 deletions tests/unit/test__pandas_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1856,6 +1856,7 @@ def test__download_table_bqstorage_shuts_down_workers(
Make sure that when the top-level iterator goes out of scope (is deleted),
the child threads are also stopped.
"""
pytest.importorskip("google.cloud.bigquery_storage_v1")
from google.cloud.bigquery import dataset
from google.cloud.bigquery import table
import google.cloud.bigquery_storage_v1.reader
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test__pyarrow_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import pytest


numpy = pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow", minversion="3.0.0")


Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_dbapi__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ def test_empty_iterable(self):
self.assertEqual(list(result), [])

def test_non_empty_iterable(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
from tests.unit.helpers import _to_pyarrow

Expand Down
5 changes: 5 additions & 0 deletions tests/unit/test_magics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1276,6 +1276,11 @@ def test_bigquery_magic_with_no_query_cache(monkeypatch):
bigquery.load_ipython_extension(ip)
conn = make_connection()
monkeypatch.setattr(magics.context, "_connection", conn)
monkeypatch.setattr(
magics.context,
"credentials",
mock.create_autospec(google.auth.credentials.Credentials, instance=True),
)
monkeypatch.setattr(magics.context, "project", "project-from-context")

# --no_query_cache option should override context.
Expand Down
28 changes: 26 additions & 2 deletions tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -2416,13 +2416,15 @@ def test_to_arrow_error_if_pyarrow_is_none(self):
row_iterator.to_arrow()

def test_to_arrow(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow")
row_iterator = self._make_one()
tbl = row_iterator.to_arrow()
self.assertIsInstance(tbl, pyarrow.Table)
self.assertEqual(tbl.num_rows, 0)

def test_to_arrow_iterable(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3089,6 +3091,7 @@ def test_to_arrow_iterable_w_bqstorage(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_arrow(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3173,6 +3176,7 @@ def test_to_arrow(self):
)

def test_to_arrow_w_nulls(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3209,6 +3213,7 @@ def test_to_arrow_w_nulls(self):
self.assertEqual(ages, [32, 29, None, 111])

def test_to_arrow_w_unknown_type(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3254,6 +3259,7 @@ def test_to_arrow_w_unknown_type(self):
self.assertTrue(all("sport" in str(warning) for warning in warned))

def test_to_arrow_w_empty_table(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3295,6 +3301,7 @@ def test_to_arrow_w_empty_table(self):
self.assertEqual(child_field.type.value_type[1].name, "age")

def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery.schema import SchemaField
Expand Down Expand Up @@ -3337,6 +3344,7 @@ def test_to_arrow_max_results_w_explicit_bqstorage_client_warning(self):
mock_client._ensure_bqstorage_client.assert_not_called()

def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery.schema import SchemaField
Expand Down Expand Up @@ -3375,6 +3383,7 @@ def test_to_arrow_max_results_w_create_bqstorage_client_no_warning(self):
mock_client._ensure_bqstorage_client.assert_not_called()

def test_to_arrow_w_bqstorage(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -3458,6 +3467,7 @@ def test_to_arrow_w_bqstorage(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_arrow_w_bqstorage_creates_client(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -3491,6 +3501,7 @@ def test_to_arrow_w_bqstorage_creates_client(self):
bqstorage_client._transport.grpc_channel.close.assert_called_once()

def test_to_arrow_ensure_bqstorage_client_wo_bqstorage(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip(
"pyarrow", minversion=self.PYARROW_MINIMUM_VERSION
)
Expand Down Expand Up @@ -3524,6 +3535,7 @@ def mock_verify_version(raise_if_error: bool = False):
self.assertEqual(tbl.num_rows, 2)

def test_to_arrow_w_bqstorage_no_streams(self):
pytest.importorskip("numpy")
pyarrow = pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -3563,6 +3575,7 @@ def test_to_arrow_w_bqstorage_no_streams(self):
self.assertEqual(actual_table.schema[2].name, "colB")

def test_to_arrow_progress_bar(self):
pytest.importorskip("numpy")
pytest.importorskip("pyarrow")
pytest.importorskip("tqdm")
pytest.importorskip("tqdm.notebook")
Expand Down Expand Up @@ -3696,6 +3709,7 @@ def test_to_dataframe_iterable_with_dtypes(self):
self.assertEqual(df_2["age"][0], 33)

def test_to_dataframe_iterable_w_bqstorage(self):
pytest.importorskip("numpy")
pandas = pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
pytest.importorskip("google.cloud.bigquery_storage")
Expand Down Expand Up @@ -3770,6 +3784,7 @@ def test_to_dataframe_iterable_w_bqstorage(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_dataframe_iterable_w_bqstorage_max_results_warning(self):
pytest.importorskip("numpy")
pandas = pytest.importorskip("pandas")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -4513,7 +4528,7 @@ def test_to_dataframe_w_none_dtypes_mapper(self):

def test_to_dataframe_w_unsupported_dtypes_mapper(self):
pytest.importorskip("pandas")
import numpy
numpy = pytest.importorskip("numpy")
from google.cloud.bigquery.schema import SchemaField

schema = [
Expand Down Expand Up @@ -4797,6 +4812,7 @@ def test_to_dataframe_max_results_w_create_bqstorage_client_no_warning(self):
mock_client._ensure_bqstorage_client.assert_not_called()

def test_to_dataframe_w_bqstorage_creates_client(self):
pytest.importorskip("numpy")
pytest.importorskip("pandas")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -4830,6 +4846,7 @@ def test_to_dataframe_w_bqstorage_creates_client(self):
bqstorage_client._transport.grpc_channel.close.assert_called_once()

def test_to_dataframe_w_bqstorage_no_streams(self):
pytest.importorskip("numpy")
pytest.importorskip("pandas")
pytest.importorskip("google.cloud.bigquery_storage")
from google.cloud.bigquery import schema
Expand Down Expand Up @@ -4858,6 +4875,7 @@ def test_to_dataframe_w_bqstorage_no_streams(self):
self.assertTrue(got.empty)

def test_to_dataframe_w_bqstorage_logs_session(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pytest.importorskip("pyarrow")
Expand All @@ -4882,6 +4900,7 @@ def test_to_dataframe_w_bqstorage_logs_session(self):
)

def test_to_dataframe_w_bqstorage_empty_streams(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -4936,6 +4955,7 @@ def test_to_dataframe_w_bqstorage_empty_streams(self):
self.assertTrue(got.empty)

def test_to_dataframe_w_bqstorage_nonempty(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5018,6 +5038,7 @@ def test_to_dataframe_w_bqstorage_nonempty(self):
bqstorage_client._transport.grpc_channel.close.assert_not_called()

def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self):
pytest.importorskip("numpy")
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5070,6 +5091,7 @@ def test_to_dataframe_w_bqstorage_multiple_streams_return_unique_index(self):
self.assertTrue(got.index.is_unique)

def test_to_dataframe_w_bqstorage_updates_progress_bar(self):
pytest.importorskip("numpy")
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5147,6 +5169,7 @@ def blocking_to_arrow(*args, **kwargs):
tqdm_mock().close.assert_called_once()

def test_to_dataframe_w_bqstorage_exits_on_keyboardinterrupt(self):
pytest.importorskip("numpy")
bigquery_storage = pytest.importorskip("google.cloud.bigquery_storage")
pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5322,6 +5345,7 @@ def test_to_dataframe_w_bqstorage_snapshot(self):
row_iterator.to_dataframe(bqstorage_client)

def test_to_dataframe_concat_categorical_dtype_w_pyarrow(self):
pytest.importorskip("numpy")
pytest.importorskip("google.cloud.bigquery_storage")
pandas = pytest.importorskip("pandas")
pyarrow = pytest.importorskip("pyarrow")
Expand Down Expand Up @@ -5604,7 +5628,7 @@ def test_rowiterator_to_geodataframe_delegation(self, to_dataframe):
"""
pandas = pytest.importorskip("pandas")
geopandas = pytest.importorskip("geopandas")
import numpy
numpy = pytest.importorskip("numpy")
from shapely import wkt

row_iterator = self._make_one_from_data(
Expand Down
3 changes: 2 additions & 1 deletion tests/unit/test_table_arrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
import google.cloud.bigquery.table


pyarrow = pytest.importorskip("pyarrow", minversion="3.0.0")
pytest.importorskip("numpy")
pytest.importorskip("pyarrow", minversion="3.0.0")


def test_to_arrow_with_jobs_query_response():
Expand Down