-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. +
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
diff --git a/docs/reference/bigframes.pandas/frame.rst b/docs/reference/bigframes.pandas/frame.rst index ea4c6dec1c..ebc56e1e18 100644 --- a/docs/reference/bigframes.pandas/frame.rst +++ b/docs/reference/bigframes.pandas/frame.rst @@ -41,4 +41,4 @@ AI operators .. autoclass:: bigframes.operations.ai.AIAccessor :members: :inherited-members: - :undoc-members: \ No newline at end of file + :undoc-members: diff --git a/docs/reference/bigframes.pandas/indexing.rst b/docs/reference/bigframes.pandas/indexing.rst index e25e8652ec..86ce31ba13 100644 --- a/docs/reference/bigframes.pandas/indexing.rst +++ b/docs/reference/bigframes.pandas/indexing.rst @@ -18,4 +18,4 @@ Index objects .. autoclass:: bigframes.core.indexes.datetimes.DatetimeIndex :members: :inherited-members: - :undoc-members: \ No newline at end of file + :undoc-members: diff --git a/noxfile.py b/noxfile.py index 2d0edfc1b0..e83ed8a9aa 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,7 +28,7 @@ import nox import nox.sessions -BLACK_VERSION = "black==22.3.0" +BLACK_VERSION = "black==25.1.0" FLAKE8_VERSION = "flake8==7.1.2" ISORT_VERSION = "isort==5.12.0" MYPY_VERSION = "mypy==1.15.0" diff --git a/owlbot.py b/owlbot.py index b9145d4367..48b5fe978b 100644 --- a/owlbot.py +++ b/owlbot.py @@ -59,6 +59,7 @@ ".kokoro/presubmit/common.cfg", # Temporary workaround to update docs job to use python 3.10 ".github/workflows/docs.yml", + ".flake8", ], ) diff --git a/samples/dbt/README.md b/samples/dbt/README.md index c52b633116..4e97faf35b 100644 --- a/samples/dbt/README.md +++ b/samples/dbt/README.md @@ -59,4 +59,4 @@ Follow these steps to run the Python models using dbt Core. ```bash dbt run --select your_model_name - ``` \ No newline at end of file + ``` diff --git a/samples/snippets/classification_boosted_tree_model_test.py b/samples/snippets/classification_boosted_tree_model_test.py index c79706a020..7dc7318b02 100644 --- a/samples/snippets/classification_boosted_tree_model_test.py +++ b/samples/snippets/classification_boosted_tree_model_test.py @@ -31,7 +31,10 @@ def test_boosted_tree_model(random_model_id: str) -> None: "functional_weight", ), ) - input_data["dataframe"] = bpd.Series("training", index=input_data.index,).case_when( + input_data["dataframe"] = bpd.Series( + "training", + index=input_data.index, + ).case_when( [ (((input_data["functional_weight"] % 10) == 8), "evaluation"), (((input_data["functional_weight"] % 10) == 9), "prediction"), diff --git a/samples/snippets/logistic_regression_prediction_test.py b/samples/snippets/logistic_regression_prediction_test.py index dd92f8f3e3..6bf1671070 100644 --- a/samples/snippets/logistic_regression_prediction_test.py +++ b/samples/snippets/logistic_regression_prediction_test.py @@ -63,7 +63,10 @@ def test_logistic_regression_prediction(random_model_id: str) -> None: "functional_weight", ), ) - input_data["dataframe"] = bpd.Series("training", index=input_data.index,).case_when( + input_data["dataframe"] = bpd.Series( + "training", + index=input_data.index, + ).case_when( [ (((input_data["functional_weight"] % 10) == 8), "evaluation"), (((input_data["functional_weight"] % 10) == 9), "prediction"), diff --git a/scripts/create_read_gbq_colab_benchmark_tables.py b/scripts/create_read_gbq_colab_benchmark_tables.py index 63419bc660..b367ad695c 100644 --- a/scripts/create_read_gbq_colab_benchmark_tables.py +++ b/scripts/create_read_gbq_colab_benchmark_tables.py @@ -463,7 +463,7 @@ def worker_initializer(project_id: str | None): def worker_process_item( - work_item: tuple[str, Sequence[tuple[str, str, int | None]], int] + work_item: tuple[str, Sequence[tuple[str, str, int | None]], int], ): global worker_client, worker_rng diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst index 4fd239765b..30ad03d050 100644 --- a/scripts/readme-gen/templates/README.tmpl.rst +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -84,4 +84,4 @@ to `browse the source`_ and `report issues`_. {% endif %} -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/testing/.gitignore b/testing/.gitignore index b05fbd6308..40f47fa771 100644 --- a/testing/.gitignore +++ b/testing/.gitignore @@ -1,3 +1,3 @@ test-env.sh service-account.json -client-secrets.json \ No newline at end of file +client-secrets.json diff --git a/tests/data/nested_structs.jsonl b/tests/data/nested_structs.jsonl index f57214b0b3..6fa160f8bc 100644 --- a/tests/data/nested_structs.jsonl +++ b/tests/data/nested_structs.jsonl @@ -1,2 +1,2 @@ {"id": 1, "person": {"name": "Alice", "age":30, "address": {"city": "New York", "country": "USA"}}} -{"id": 2, "person": {"name": "Bob", "age":25, "address": {"city": "London", "country": "UK"}}} \ No newline at end of file +{"id": 2, "person": {"name": "Bob", "age":25, "address": {"city": "London", "country": "UK"}}} diff --git a/tests/system/small/ml/test_model_selection.py b/tests/system/small/ml/test_model_selection.py index c1a1e073b9..560bcf51b4 100644 --- a/tests/system/small/ml/test_model_selection.py +++ b/tests/system/small/ml/test_model_selection.py @@ -249,7 +249,13 @@ def test_train_test_split_value_error(penguins_df_default_index, train_size, tes ) def test_train_test_split_stratify(df_fixture, request): df = request.getfixturevalue(df_fixture) - X = df[["species", "island", "culmen_length_mm",]].rename( + X = df[ + [ + "species", + "island", + "culmen_length_mm", + ] + ].rename( columns={"species": "x_species"} ) # Keep "species" col just for easy checking. Rename to avoid conflicts. y = df[["species"]] diff --git a/tests/system/small/operations/test_strings.py b/tests/system/small/operations/test_strings.py index a720614892..e3811aa514 100644 --- a/tests/system/small/operations/test_strings.py +++ b/tests/system/small/operations/test_strings.py @@ -306,7 +306,7 @@ def test_isnumeric(weird_strings, weird_strings_pd): pd.testing.assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -318,7 +318,7 @@ def test_isalpha(weird_strings, weird_strings_pd): pd.testing.assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -332,7 +332,7 @@ def test_isdigit(weird_strings, weird_strings_pd): pd.testing.assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -344,7 +344,7 @@ def test_isdecimal(weird_strings, weird_strings_pd): pd.testing.assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -356,7 +356,7 @@ def test_isalnum(weird_strings, weird_strings_pd): pd.testing.assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -368,7 +368,7 @@ def test_isspace(weird_strings, weird_strings_pd): pd.testing.assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -380,7 +380,7 @@ def test_islower(weird_strings, weird_strings_pd): assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) @@ -392,7 +392,7 @@ def test_isupper(weird_strings, weird_strings_pd): assert_series_equal( bf_result, - pd_result.astype(pd.BooleanDtype()) + pd_result.astype(pd.BooleanDtype()), # the dtype here is a case of intentional diversion from pandas # see go/bigframes-dtypes ) diff --git a/tests/system/small/test_pandas.py b/tests/system/small/test_pandas.py index 550a75e1bb..cf67213b3f 100644 --- a/tests/system/small/test_pandas.py +++ b/tests/system/small/test_pandas.py @@ -102,7 +102,7 @@ def test_get_dummies_dataframe(scalars_dfs, kwargs): # dtype argument above is needed for pandas v1 only # adjust for expected dtype differences - for (column_name, type_name) in zip(pd_result.columns, pd_result.dtypes): + for column_name, type_name in zip(pd_result.columns, pd_result.dtypes): if type_name == "bool": pd_result[column_name] = pd_result[column_name].astype("boolean") @@ -131,7 +131,7 @@ def test_get_dummies_dataframe_duplicate_labels(scalars_dfs): # dtype argument above is needed for pandas v1 only # adjust for expected dtype differences - for (column_name, type_name) in zip(pd_result.columns, pd_result.dtypes): + for column_name, type_name in zip(pd_result.columns, pd_result.dtypes): if type_name == "bool": pd_result[column_name] = pd_result[column_name].astype("boolean") @@ -148,7 +148,7 @@ def test_get_dummies_series(scalars_dfs): # dtype argument above is needed for pandas v1 only # adjust for expected dtype differences - for (column_name, type_name) in zip(pd_result.columns, pd_result.dtypes): + for column_name, type_name in zip(pd_result.columns, pd_result.dtypes): if type_name == "bool": # pragma: NO COVER pd_result[column_name] = pd_result[column_name].astype("boolean") pd_result.columns = pd_result.columns.astype(object) @@ -169,7 +169,7 @@ def test_get_dummies_series_nameless(scalars_dfs): # dtype argument above is needed for pandas v1 only # adjust for expected dtype differences - for (column_name, type_name) in zip(pd_result.columns, pd_result.dtypes): + for column_name, type_name in zip(pd_result.columns, pd_result.dtypes): if type_name == "bool": # pragma: NO COVER pd_result[column_name] = pd_result[column_name].astype("boolean") pd_result.columns = pd_result.columns.astype(object) @@ -506,9 +506,11 @@ def _convert_pandas_category(pd_s: pd.Series): data = [pd.NA] * len(pd_s) else: data = [ - {left_key: interval.left, right_key: interval.right} # type: ignore - if pd.notna(val) - else pd.NA + ( + {left_key: interval.left, right_key: interval.right} # type: ignore + if pd.notna(val) + else pd.NA + ) for val, interval in zip(pd_s, pd_s.cat.categories[pd_s.cat.codes]) # type: ignore ] diff --git a/tests/system/small/test_series.py b/tests/system/small/test_series.py index 3f64234293..c794ed7f46 100644 --- a/tests/system/small/test_series.py +++ b/tests/system/small/test_series.py @@ -1471,7 +1471,11 @@ def test_isin_bigframes_index(scalars_dfs, session): scalars_df, scalars_pandas_df = scalars_dfs bf_result = ( scalars_df["string_col"] - .isin(bigframes.pandas.Index(["Hello, World!", "Hi", "こんにちは"], session=session)) + .isin( + bigframes.pandas.Index( + ["Hello, World!", "Hi", "こんにちは"], session=session + ) + ) .to_pandas() ) pd_result = ( @@ -4515,9 +4519,7 @@ def foo(x: int, y: int, df): ) pd_result = ( - scalars_pandas_df_index[column] - .pipe((foo, "df"), x=7, y=9) - .pipe(lambda x: x**2) + scalars_pandas_df_index[column].pipe((foo, "df"), x=7, y=9).pipe(lambda x: x**2) ) assert_series_equal(bf_result, pd_result) diff --git a/tests/unit/core/compile/googlesql/test_query.py b/tests/unit/core/compile/googlesql/test_query.py index b8d1d024e2..e6263671c1 100644 --- a/tests/unit/core/compile/googlesql/test_query.py +++ b/tests/unit/core/compile/googlesql/test_query.py @@ -46,9 +46,9 @@ def test_table_expression(table_id, dataset_id, project_id, expected): def test_from_item_w_table_name(table_name, alias, expected): expr = sql.FromItem( sql.TableExpression(table_id=table_name), - as_alias=None - if alias is None - else sql.AsAlias(sql.AliasExpression(alias=alias)), + as_alias=( + None if alias is None else sql.AsAlias(sql.AliasExpression(alias=alias)) + ), ) assert expr.sql() == expected diff --git a/tests/unit/core/test_expression.py b/tests/unit/core/test_expression.py index 4c3d233879..09c7d033b0 100644 --- a/tests/unit/core/test_expression.py +++ b/tests/unit/core/test_expression.py @@ -105,7 +105,7 @@ def test_nested_expression_dtypes_are_cached(): def _create_field_bindings( - col_dtypes: typing.Dict[str, dtypes.Dtype] + col_dtypes: typing.Dict[str, dtypes.Dtype], ) -> typing.Dict[ids.ColumnId, field.Field]: return { ids.ColumnId(col): field.Field(ids.ColumnId(col), dtype) diff --git a/third_party/bigframes_vendored/ibis/backends/bigquery/__init__.py b/third_party/bigframes_vendored/ibis/backends/bigquery/__init__.py index a87cb081cb..5510919dca 100644 --- a/third_party/bigframes_vendored/ibis/backends/bigquery/__init__.py +++ b/third_party/bigframes_vendored/ibis/backends/bigquery/__init__.py @@ -957,12 +957,9 @@ def version(self): def create_table( self, name: str, - obj: ir.Table - | pd.DataFrame - | pa.Table - | pl.DataFrame - | pl.LazyFrame - | None = None, + obj: ( + ir.Table | pd.DataFrame | pa.Table | pl.DataFrame | pl.LazyFrame | None + ) = None, *, schema: bigframes_vendored.ibis.Schema | None = None, database: str | None = None, diff --git a/third_party/bigframes_vendored/ibis/backends/bigquery/backend.py b/third_party/bigframes_vendored/ibis/backends/bigquery/backend.py index 3d214766dc..a1e8873b84 100644 --- a/third_party/bigframes_vendored/ibis/backends/bigquery/backend.py +++ b/third_party/bigframes_vendored/ibis/backends/bigquery/backend.py @@ -844,12 +844,9 @@ def version(self): def create_table( self, name: str, - obj: ir.Table - | pd.DataFrame - | pa.Table - | pl.DataFrame - | pl.LazyFrame - | None = None, + obj: ( + ir.Table | pd.DataFrame | pa.Table | pl.DataFrame | pl.LazyFrame | None + ) = None, *, schema: sch.SchemaLike | None = None, database: str | None = None, diff --git a/third_party/bigframes_vendored/ibis/backends/bigquery/converter.py b/third_party/bigframes_vendored/ibis/backends/bigquery/converter.py index c2db774b2b..2afccd454a 100644 --- a/third_party/bigframes_vendored/ibis/backends/bigquery/converter.py +++ b/third_party/bigframes_vendored/ibis/backends/bigquery/converter.py @@ -13,10 +13,6 @@ def convert_GeoSpatial(cls, s, dtype, pandas_type): return gpd.GeoSeries(shp.from_wkt(s)) - convert_Point = ( - convert_LineString - ) = ( - convert_Polygon - ) = ( - convert_MultiLineString - ) = convert_MultiPoint = convert_MultiPolygon = convert_GeoSpatial + convert_Point = convert_LineString = convert_Polygon = convert_MultiLineString = ( + convert_MultiPoint + ) = convert_MultiPolygon = convert_GeoSpatial diff --git a/third_party/bigframes_vendored/ibis/backends/sql/compilers/base.py b/third_party/bigframes_vendored/ibis/backends/sql/compilers/base.py index acccd7ea6c..906461e3cd 100644 --- a/third_party/bigframes_vendored/ibis/backends/sql/compilers/base.py +++ b/third_party/bigframes_vendored/ibis/backends/sql/compilers/base.py @@ -1084,9 +1084,9 @@ def visit_VarianceStandardDevCovariance(self, op, *, how, where, **kw): funcname = f"{funcs[type(op)]}_{hows[how]}" return self.agg[funcname](*args, where=where) - visit_Variance = ( - visit_StandardDev - ) = visit_Covariance = visit_VarianceStandardDevCovariance + visit_Variance = visit_StandardDev = visit_Covariance = ( + visit_VarianceStandardDevCovariance + ) def visit_SimpleCase(self, op, *, base=None, cases, results, default): return sge.Case( @@ -1537,11 +1537,9 @@ def visit_Add(self, op, *, left, right): def visit_Subtract(self, op, *, left, right): return sge.Sub(this=left, expression=right) - visit_DateSub = ( - visit_DateDiff - ) = ( - visit_TimestampSub - ) = visit_TimestampDiff = visit_IntervalSubtract = visit_Subtract + visit_DateSub = visit_DateDiff = visit_TimestampSub = visit_TimestampDiff = ( + visit_IntervalSubtract + ) = visit_Subtract @parenthesize_inputs def visit_Multiply(self, op, *, left, right): diff --git a/third_party/bigframes_vendored/ibis/backends/sql/datatypes.py b/third_party/bigframes_vendored/ibis/backends/sql/datatypes.py index fce0643783..d2211e2efe 100644 --- a/third_party/bigframes_vendored/ibis/backends/sql/datatypes.py +++ b/third_party/bigframes_vendored/ibis/backends/sql/datatypes.py @@ -414,11 +414,7 @@ def _from_ibis_SpecificGeometry(cls, dtype: dt.GeoSpatial): this = getattr(typecode, dtype.geotype.upper()) return sge.DataType(this=this, expressions=expressions) - _from_ibis_Point = ( - _from_ibis_LineString - ) = ( - _from_ibis_Polygon - ) = ( + _from_ibis_Point = _from_ibis_LineString = _from_ibis_Polygon = ( _from_ibis_MultiLineString ) = _from_ibis_MultiPoint = _from_ibis_MultiPolygon = _from_ibis_SpecificGeometry @@ -465,11 +461,9 @@ def _from_sqlglot_GEOGRAPHY( def _from_sqlglot_TINYINT(cls) -> dt.Int64: return dt.Int64(nullable=cls.default_nullable) - _from_sqlglot_UINT = ( - _from_sqlglot_USMALLINT - ) = ( - _from_sqlglot_UTINYINT - ) = _from_sqlglot_INT = _from_sqlglot_SMALLINT = _from_sqlglot_TINYINT + _from_sqlglot_UINT = _from_sqlglot_USMALLINT = _from_sqlglot_UTINYINT = ( + _from_sqlglot_INT + ) = _from_sqlglot_SMALLINT = _from_sqlglot_TINYINT @classmethod def _from_sqlglot_UBIGINT(cls) -> NoReturn: diff --git a/third_party/bigframes_vendored/ibis/common/bases.py b/third_party/bigframes_vendored/ibis/common/bases.py index c9389decec..cfb5b41be9 100644 --- a/third_party/bigframes_vendored/ibis/common/bases.py +++ b/third_party/bigframes_vendored/ibis/common/bases.py @@ -128,8 +128,7 @@ def __prohibit_inheritance__(cls, **kwargs): @collections.abc.Hashable.register class Hashable(Abstract): @abstractmethod - def __hash__(self) -> int: - ... + def __hash__(self) -> int: ... class Comparable(Abstract): @@ -147,8 +146,7 @@ class Comparable(Abstract): __cache__ = {} @abstractmethod - def __equals__(self, other) -> bool: - ... + def __equals__(self, other) -> bool: ... def __eq__(self, other) -> bool: if self is other: diff --git a/third_party/bigframes_vendored/ibis/common/collections.py b/third_party/bigframes_vendored/ibis/common/collections.py index 363e51c534..afc844581c 100644 --- a/third_party/bigframes_vendored/ibis/common/collections.py +++ b/third_party/bigframes_vendored/ibis/common/collections.py @@ -29,8 +29,7 @@ class Iterable(Abstract, Generic[V]): """Iterable abstract base class for quicker isinstance checks.""" @abstractmethod - def __iter__(self): - ... + def __iter__(self): ... @collections.abc.Reversible.register @@ -38,8 +37,7 @@ class Reversible(Iterable[V]): """Reverse iterable abstract base class for quicker isinstance checks.""" @abstractmethod - def __reversed__(self): - ... + def __reversed__(self): ... @collections.abc.Iterator.register @@ -47,8 +45,7 @@ class Iterator(Iterable[V]): """Iterator abstract base class for quicker isinstance checks.""" @abstractmethod - def __next__(self): - ... + def __next__(self): ... def __iter__(self): return self @@ -59,8 +56,7 @@ class Sized(Abstract): """Sized abstract base class for quicker isinstance checks.""" @abstractmethod - def __len__(self): - ... + def __len__(self): ... @collections.abc.Container.register @@ -68,8 +64,7 @@ class Container(Abstract, Generic[V]): """Container abstract base class for quicker isinstance checks.""" @abstractmethod - def __contains__(self, x): - ... + def __contains__(self, x): ... @collections.abc.Collection.register @@ -82,8 +77,7 @@ class Sequence(Reversible[V], Collection[V]): """Sequence abstract base class for quicker isinstance checks.""" @abstractmethod - def __getitem__(self, index): - ... + def __getitem__(self, index): ... def __iter__(self): i = 0 @@ -127,8 +121,7 @@ class Mapping(Collection[K], Generic[K, V]): """Mapping abstract base class for quicker isinstance checks.""" @abstractmethod - def __getitem__(self, key): - ... + def __getitem__(self, key): ... def get(self, key, default=None): try: diff --git a/third_party/bigframes_vendored/ibis/common/deferred.py b/third_party/bigframes_vendored/ibis/common/deferred.py index 1c6f06e223..d1291fb488 100644 --- a/third_party/bigframes_vendored/ibis/common/deferred.py +++ b/third_party/bigframes_vendored/ibis/common/deferred.py @@ -51,8 +51,7 @@ def resolve(self, context: dict): """ @abstractmethod - def __eq__(self, other: Resolver) -> bool: - ... + def __eq__(self, other: Resolver) -> bool: ... @classmethod def __coerce__(cls, value): @@ -579,13 +578,11 @@ def _contains_deferred(obj: Any) -> bool: @overload -def deferrable(*, repr: str | None = None) -> Callable[[F], F]: - ... +def deferrable(*, repr: str | None = None) -> Callable[[F], F]: ... @overload -def deferrable(func: F) -> F: - ... +def deferrable(func: F) -> F: ... def deferrable(func=None, *, repr=None): diff --git a/third_party/bigframes_vendored/ibis/common/patterns.py b/third_party/bigframes_vendored/ibis/common/patterns.py index 5d4b5ba35d..3a62c94eb1 100644 --- a/third_party/bigframes_vendored/ibis/common/patterns.py +++ b/third_party/bigframes_vendored/ibis/common/patterns.py @@ -225,8 +225,7 @@ def describe(self, plural=False): return f"matching {self!r}" @abstractmethod - def __eq__(self, other: Pattern) -> bool: - ... + def __eq__(self, other: Pattern) -> bool: ... def __invert__(self) -> Not: """Syntax sugar for matching the inverse of the pattern.""" diff --git a/third_party/bigframes_vendored/ibis/common/typing.py b/third_party/bigframes_vendored/ibis/common/typing.py index a464054fdb..bffab4baed 100644 --- a/third_party/bigframes_vendored/ibis/common/typing.py +++ b/third_party/bigframes_vendored/ibis/common/typing.py @@ -249,7 +249,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: class CoercionError(Exception): - ... + """Couldn't coerce types.""" class Coercible(Abstract): @@ -262,8 +262,7 @@ class Coercible(Abstract): @classmethod @abstractmethod - def __coerce__(cls, value: Any, **kwargs: Any) -> Self: - ... + def __coerce__(cls, value: Any, **kwargs: Any) -> Self: ... def get_defining_frame(obj): diff --git a/third_party/bigframes_vendored/ibis/expr/api.py b/third_party/bigframes_vendored/ibis/expr/api.py index 4ef10e449b..f6a9e4a614 100644 --- a/third_party/bigframes_vendored/ibis/expr/api.py +++ b/third_party/bigframes_vendored/ibis/expr/api.py @@ -777,13 +777,11 @@ def timestamp( second: int | ir.IntegerValue | Deferred, /, timezone: str | None = None, -) -> TimestampValue: - ... +) -> TimestampValue: ... @overload -def timestamp(value_or_year: Any, /, timezone: str | None = None) -> TimestampValue: - ... +def timestamp(value_or_year: Any, /, timezone: str | None = None) -> TimestampValue: ... @deferrable @@ -881,13 +879,11 @@ def date( month: int | ir.IntegerValue | Deferred, day: int | ir.IntegerValue | Deferred, /, -) -> DateValue: - ... +) -> DateValue: ... @overload -def date(value_or_year: Any, /) -> DateValue: - ... +def date(value_or_year: Any, /) -> DateValue: ... @deferrable @@ -956,13 +952,11 @@ def time( minute: int | ir.IntegerValue | Deferred, second: int | ir.IntegerValue | Deferred, /, -) -> TimeValue: - ... +) -> TimeValue: ... @overload -def time(value_or_hour: Any, /) -> TimeValue: - ... +def time(value_or_hour: Any, /) -> TimeValue: ... @deferrable diff --git a/third_party/bigframes_vendored/ibis/expr/datatypes/core.py b/third_party/bigframes_vendored/ibis/expr/datatypes/core.py index eb597cfc6a..32c54473cd 100644 --- a/third_party/bigframes_vendored/ibis/expr/datatypes/core.py +++ b/third_party/bigframes_vendored/ibis/expr/datatypes/core.py @@ -113,13 +113,11 @@ class DataType(Concrete, Coercible): @property @abstractmethod - def scalar(self): - ... + def scalar(self): ... @property @abstractmethod - def column(self): - ... + def column(self): ... # TODO(kszucs): remove it, prefer to use Annotable.__repr__ instead @property diff --git a/third_party/bigframes_vendored/ibis/expr/operations/reductions.py b/third_party/bigframes_vendored/ibis/expr/operations/reductions.py index 34f6406e0c..650f524287 100644 --- a/third_party/bigframes_vendored/ibis/expr/operations/reductions.py +++ b/third_party/bigframes_vendored/ibis/expr/operations/reductions.py @@ -167,9 +167,9 @@ def dtype(self): return dt.float64 elif dtype.is_decimal(): return dt.Decimal( - precision=max(dtype.precision, 38) - if dtype.precision is not None - else None, + precision=( + max(dtype.precision, 38) if dtype.precision is not None else None + ), scale=max(dtype.scale, 2) if dtype.scale is not None else None, ) else: diff --git a/third_party/bigframes_vendored/ibis/expr/operations/udf.py b/third_party/bigframes_vendored/ibis/expr/operations/udf.py index 91366cace8..4ce45b41fa 100644 --- a/third_party/bigframes_vendored/ibis/expr/operations/udf.py +++ b/third_party/bigframes_vendored/ibis/expr/operations/udf.py @@ -35,9 +35,9 @@ EMPTY = inspect.Parameter.empty -_udf_name_cache: MutableMapping[ - type[core.Node], Iterable[int] -] = collections.defaultdict(itertools.count) +_udf_name_cache: MutableMapping[type[core.Node], Iterable[int]] = ( + collections.defaultdict(itertools.count) +) def _make_udf_name(name: str) -> str: @@ -186,8 +186,7 @@ class scalar(_UDF): @overload @classmethod - def builtin(cls, fn: Callable) -> Callable[..., ir.Value]: - ... + def builtin(cls, fn: Callable) -> Callable[..., ir.Value]: ... @overload @classmethod @@ -199,8 +198,7 @@ def builtin( catalog: str | None = None, signature: tuple[tuple[Any, ...], Any] | None = None, **kwargs: Any, - ) -> Callable[[Callable], Callable[..., ir.Value]]: - ... + ) -> Callable[[Callable], Callable[..., ir.Value]]: ... @util.experimental @classmethod @@ -263,8 +261,7 @@ def builtin( @overload @classmethod - def python(cls, fn: Callable) -> Callable[..., ir.Value]: - ... + def python(cls, fn: Callable) -> Callable[..., ir.Value]: ... @overload @classmethod @@ -276,8 +273,7 @@ def python( catalog: str | None = None, signature: tuple[tuple[Any, ...], Any] | None = None, **kwargs: Any, - ) -> Callable[[Callable], Callable[..., ir.Value]]: - ... + ) -> Callable[[Callable], Callable[..., ir.Value]]: ... @util.experimental @classmethod @@ -385,8 +381,7 @@ def python( @overload @classmethod - def pandas(cls, fn: Callable) -> Callable[..., ir.Value]: - ... + def pandas(cls, fn: Callable) -> Callable[..., ir.Value]: ... @overload @classmethod @@ -398,8 +393,7 @@ def pandas( catalog: str | None = None, signature: tuple[tuple[Any, ...], Any] | None = None, **kwargs: Any, - ) -> Callable[[Callable], Callable[..., ir.Value]]: - ... + ) -> Callable[[Callable], Callable[..., ir.Value]]: ... @util.experimental @classmethod @@ -484,8 +478,7 @@ def pandas( @overload @classmethod - def pyarrow(cls, fn: Callable) -> Callable[..., ir.Value]: - ... + def pyarrow(cls, fn: Callable) -> Callable[..., ir.Value]: ... @overload @classmethod @@ -497,8 +490,7 @@ def pyarrow( catalog: str | None = None, signature: tuple[tuple[Any, ...], Any] | None = None, **kwargs: Any, - ) -> Callable[[Callable], Callable[..., ir.Value]]: - ... + ) -> Callable[[Callable], Callable[..., ir.Value]]: ... @util.experimental @classmethod @@ -586,8 +578,7 @@ class agg(_UDF): @overload @classmethod - def builtin(cls, fn: Callable) -> Callable[..., ir.Value]: - ... + def builtin(cls, fn: Callable) -> Callable[..., ir.Value]: ... @overload @classmethod @@ -599,8 +590,7 @@ def builtin( catalog: str | None = None, signature: tuple[tuple[Any, ...], Any] | None = None, **kwargs: Any, - ) -> Callable[[Callable], Callable[..., ir.Value]]: - ... + ) -> Callable[[Callable], Callable[..., ir.Value]]: ... @util.experimental @classmethod diff --git a/third_party/bigframes_vendored/ibis/expr/types/temporal.py b/third_party/bigframes_vendored/ibis/expr/types/temporal.py index 72e41dd942..9639b602aa 100644 --- a/third_party/bigframes_vendored/ibis/expr/types/temporal.py +++ b/third_party/bigframes_vendored/ibis/expr/types/temporal.py @@ -245,10 +245,12 @@ def __rsub__(self, other: ops.Value[dt.Interval | dt.Time, ds.Any]): def delta( self, other: datetime.time | Value[dt.Time], - part: Literal[ - "hour", "minute", "second", "millisecond", "microsecond", "nanosecond" - ] - | Value[dt.String], + part: ( + Literal[ + "hour", "minute", "second", "millisecond", "microsecond", "nanosecond" + ] + | Value[dt.String] + ), ) -> ir.IntegerValue: """Compute the number of `part`s between two times. @@ -736,20 +738,22 @@ def __rsub__(self, other: ops.Value[dt.Timestamp | dt.Interval, ds.Any]): def delta( self, other: datetime.datetime | Value[dt.Timestamp], - part: Literal[ - "year", - "quarter", - "month", - "week", - "day", - "hour", - "minute", - "second", - "millisecond", - "microsecond", - "nanosecond", - ] - | Value[dt.String], + part: ( + Literal[ + "year", + "quarter", + "month", + "week", + "day", + "hour", + "minute", + "second", + "millisecond", + "microsecond", + "nanosecond", + ] + | Value[dt.String] + ), ) -> ir.IntegerValue: """Compute the number of `part`s between two timestamps. diff --git a/third_party/bigframes_vendored/ibis/formats/pandas.py b/third_party/bigframes_vendored/ibis/formats/pandas.py index f47f94853c..a19772a9db 100644 --- a/third_party/bigframes_vendored/ibis/formats/pandas.py +++ b/third_party/bigframes_vendored/ibis/formats/pandas.py @@ -179,13 +179,9 @@ def convert_GeoSpatial(cls, s, dtype, pandas_type): return gpd.GeoSeries(s) return gpd.GeoSeries.from_wkb(s) - convert_Point = ( - convert_LineString - ) = ( - convert_Polygon - ) = ( - convert_MultiLineString - ) = convert_MultiPoint = convert_MultiPolygon = convert_GeoSpatial + convert_Point = convert_LineString = convert_Polygon = convert_MultiLineString = ( + convert_MultiPoint + ) = convert_MultiPolygon = convert_GeoSpatial @classmethod def convert_default(cls, s, dtype, pandas_type): diff --git a/third_party/bigframes_vendored/ibis/selectors.py b/third_party/bigframes_vendored/ibis/selectors.py index 401aba253f..5802189589 100644 --- a/third_party/bigframes_vendored/ibis/selectors.py +++ b/third_party/bigframes_vendored/ibis/selectors.py @@ -408,9 +408,11 @@ def expand(self, table: ir.Table) -> Sequence[ir.Value]: @public def across( selector: Selector | Iterable[str] | str, - func: Deferred - | Callable[[ir.Value], ir.Value] - | Mapping[str | None, Deferred | Callable[[ir.Value], ir.Value]], + func: ( + Deferred + | Callable[[ir.Value], ir.Value] + | Mapping[str | None, Deferred | Callable[[ir.Value], ir.Value]] + ), names: str | Callable[[str, str | None], str] | None = None, ) -> Across: """Apply data transformations across multiple columns. diff --git a/third_party/bigframes_vendored/pandas/core/computation/eval.py b/third_party/bigframes_vendored/pandas/core/computation/eval.py index d3d11a9c2a..0d80a336a3 100644 --- a/third_party/bigframes_vendored/pandas/core/computation/eval.py +++ b/third_party/bigframes_vendored/pandas/core/computation/eval.py @@ -343,9 +343,9 @@ def eval( if inplace and isinstance(target, NDFrame): target.loc[:, assigner] = ret else: - target[ # pyright: ignore[reportGeneralTypeIssues] - assigner - ] = ret + target[assigner] = ( # pyright: ignore[reportGeneralTypeIssues] + ret + ) except (TypeError, IndexError) as err: raise ValueError("Cannot assign expression output to target") from err diff --git a/third_party/bigframes_vendored/pandas/core/dtypes/inference.py b/third_party/bigframes_vendored/pandas/core/dtypes/inference.py index fcbb4c242f..7875c297bc 100644 --- a/third_party/bigframes_vendored/pandas/core/dtypes/inference.py +++ b/third_party/bigframes_vendored/pandas/core/dtypes/inference.py @@ -1,5 +1,5 @@ # Contains code from https://github.com/pandas-dev/pandas/blob/main/pandas/core/dtypes/inference.py -""" basic inference routines """ +"""basic inference routines""" from __future__ import annotations diff --git a/third_party/bigframes_vendored/pandas/core/series.py b/third_party/bigframes_vendored/pandas/core/series.py index 0160a7eb50..7491a8a6b0 100644 --- a/third_party/bigframes_vendored/pandas/core/series.py +++ b/third_party/bigframes_vendored/pandas/core/series.py @@ -1,6 +1,7 @@ """ Data structure for 1-dimensional cross-sectional and time series data """ + from __future__ import annotations from typing import ( diff --git a/third_party/bigframes_vendored/pandas/io/gbq.py b/third_party/bigframes_vendored/pandas/io/gbq.py index 3dae2b6bbe..c8e4ddbb06 100644 --- a/third_party/bigframes_vendored/pandas/io/gbq.py +++ b/third_party/bigframes_vendored/pandas/io/gbq.py @@ -1,5 +1,5 @@ # Contains code from https://github.com/pandas-dev/pandas/blob/main/pandas/io/gbq.py -""" Google BigQuery support """ +"""Google BigQuery support""" from __future__ import annotations diff --git a/third_party/bigframes_vendored/pandas/io/parquet.py b/third_party/bigframes_vendored/pandas/io/parquet.py index aec911d2fe..a7d6d82142 100644 --- a/third_party/bigframes_vendored/pandas/io/parquet.py +++ b/third_party/bigframes_vendored/pandas/io/parquet.py @@ -1,5 +1,5 @@ # Contains code from https://github.com/pandas-dev/pandas/blob/main/pandas/io/parquet.py -""" parquet compat """ +"""parquet compat""" from __future__ import annotations from bigframes import constants diff --git a/third_party/bigframes_vendored/pandas/io/pickle.py b/third_party/bigframes_vendored/pandas/io/pickle.py index 33088dc019..5b5376ebcc 100644 --- a/third_party/bigframes_vendored/pandas/io/pickle.py +++ b/third_party/bigframes_vendored/pandas/io/pickle.py @@ -1,5 +1,5 @@ # Contains code from https://github.com/pandas-dev/pandas/blob/main/pandas/io/pickle.py -""" pickle compat """ +"""pickle compat""" from __future__ import annotations from pandas._typing import ( diff --git a/third_party/bigframes_vendored/pandas/pandas/_typing.py b/third_party/bigframes_vendored/pandas/pandas/_typing.py index e665339fc8..114054f0ab 100644 --- a/third_party/bigframes_vendored/pandas/pandas/_typing.py +++ b/third_party/bigframes_vendored/pandas/pandas/_typing.py @@ -234,13 +234,11 @@ def flush(self) -> Any: class ReadPickleBuffer(ReadBuffer[bytes], Protocol): - def readline(self) -> bytes: - ... + def readline(self) -> bytes: ... class WriteExcelBuffer(WriteBuffer[bytes], Protocol): - def truncate(self, size: int | None = ...) -> int: - ... + def truncate(self, size: int | None = ...) -> int: ... class ReadCsvBuffer(ReadBuffer[AnyStr_co], Protocol): diff --git a/third_party/bigframes_vendored/sklearn/compose/_column_transformer.py b/third_party/bigframes_vendored/sklearn/compose/_column_transformer.py index e4e71c1ff9..d7043e4e86 100644 --- a/third_party/bigframes_vendored/sklearn/compose/_column_transformer.py +++ b/third_party/bigframes_vendored/sklearn/compose/_column_transformer.py @@ -1,4 +1,5 @@ """Utilities for meta-estimators""" + # Author: Joel Nothman # Andreas Mueller # License: BSD diff --git a/third_party/bigframes_vendored/sklearn/decomposition/_mf.py b/third_party/bigframes_vendored/sklearn/decomposition/_mf.py index c3c3a77b71..0c785ed73e 100644 --- a/third_party/bigframes_vendored/sklearn/decomposition/_mf.py +++ b/third_party/bigframes_vendored/sklearn/decomposition/_mf.py @@ -1,5 +1,4 @@ -""" Matrix Factorization. -""" +"""Matrix Factorization.""" # Author: Alexandre Gramfort # Olivier Grisel diff --git a/third_party/bigframes_vendored/sklearn/decomposition/_pca.py b/third_party/bigframes_vendored/sklearn/decomposition/_pca.py index f13c52bfb6..fd7391362f 100644 --- a/third_party/bigframes_vendored/sklearn/decomposition/_pca.py +++ b/third_party/bigframes_vendored/sklearn/decomposition/_pca.py @@ -1,5 +1,4 @@ -""" Principal Component Analysis. -""" +"""Principal Component Analysis.""" # Author: Alexandre Gramfort # Olivier Grisel diff --git a/third_party/bigframes_vendored/sklearn/pipeline.py b/third_party/bigframes_vendored/sklearn/pipeline.py index 96eaa903be..1709d6b911 100644 --- a/third_party/bigframes_vendored/sklearn/pipeline.py +++ b/third_party/bigframes_vendored/sklearn/pipeline.py @@ -2,6 +2,7 @@ The :mod:`sklearn.pipeline` module implements utilities to build a composite estimator, as a chain of transforms and estimators. """ + # Author: Edouard Duchesnay # Gael Varoquaux # Virgile Fritsch diff --git a/third_party/bigframes_vendored/sklearn/preprocessing/_encoder.py b/third_party/bigframes_vendored/sklearn/preprocessing/_encoder.py index 5476a9fb3c..b47db06b37 100644 --- a/third_party/bigframes_vendored/sklearn/preprocessing/_encoder.py +++ b/third_party/bigframes_vendored/sklearn/preprocessing/_encoder.py @@ -85,5 +85,6 @@ def transform(self, X): Returns: bigframes.dataframe.DataFrame: The result is categorized as index: number, value: number, - where index is the position of the dict seeing the category, and value is 0 or 1.""" + where index is the position of the dict seeing the category, and value is 0 or 1. + """ raise NotImplementedError(constants.ABSTRACT_METHOD_ERROR_MESSAGE)