Skip to content

Commit 16a541a

Browse files
committed
Ruff FBT001: fixed most boolean params
1 parent 9cdbf2b commit 16a541a

16 files changed

+148
-148
lines changed

duckdb/__init__.pyi

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,7 @@ class DuckDBPyConnection:
354354
field_appearance_threshold: Optional[float] = None,
355355
map_inference_threshold: Optional[int] = None,
356356
maximum_sample_files: Optional[int] = None,
357-
filename: Optional[bool | str] = None,
357+
filename: Optional[Union[bool, str]] = None,
358358
hive_partitioning: Optional[bool] = None,
359359
union_by_name: Optional[bool] = None,
360360
hive_types: Optional[Dict[str, str]] = None,
@@ -368,12 +368,12 @@ class DuckDBPyConnection:
368368
self,
369369
path_or_buffer: Union[str, StringIO, TextIOBase],
370370
*,
371-
header: Optional[bool | int] = None,
371+
header: Optional[Union[bool, int]] = None,
372372
compression: Optional[str] = None,
373373
sep: Optional[str] = None,
374374
delimiter: Optional[str] = None,
375-
dtype: Optional[Dict[str, str] | List[str]] = None,
376-
na_values: Optional[str | List[str]] = None,
375+
dtype: Optional[Union[Dict[str, str], List[str]]] = None,
376+
na_values: Optional[Union[str, List[str]]] = None,
377377
skiprows: Optional[int] = None,
378378
quotechar: Optional[str] = None,
379379
escapechar: Optional[str] = None,
@@ -399,7 +399,7 @@ class DuckDBPyConnection:
399399
buffer_size: Optional[int] = None,
400400
decimal: Optional[str] = None,
401401
allow_quoted_nulls: Optional[bool] = None,
402-
filename: Optional[bool | str] = None,
402+
filename: Optional[Union[bool, str]] = None,
403403
hive_partitioning: Optional[bool] = None,
404404
union_by_name: Optional[bool] = None,
405405
hive_types: Optional[Dict[str, str]] = None,
@@ -409,12 +409,12 @@ class DuckDBPyConnection:
409409
self,
410410
path_or_buffer: Union[str, StringIO, TextIOBase],
411411
*,
412-
header: Optional[bool | int] = None,
412+
header: Optional[Union[bool, int]] = None,
413413
compression: Optional[str] = None,
414414
sep: Optional[str] = None,
415415
delimiter: Optional[str] = None,
416-
dtype: Optional[Dict[str, str] | List[str]] = None,
417-
na_values: Optional[str | List[str]] = None,
416+
dtype: Optional[Union[Dict[str, str], List[str]]] = None,
417+
na_values: Optional[Union[str, List[str]]] = None,
418418
skiprows: Optional[int] = None,
419419
quotechar: Optional[str] = None,
420420
escapechar: Optional[str] = None,
@@ -440,7 +440,7 @@ class DuckDBPyConnection:
440440
buffer_size: Optional[int] = None,
441441
decimal: Optional[str] = None,
442442
allow_quoted_nulls: Optional[bool] = None,
443-
filename: Optional[bool | str] = None,
443+
filename: Optional[Union[bool, str]] = None,
444444
hive_partitioning: Optional[bool] = None,
445445
union_by_name: Optional[bool] = None,
446446
hive_types: Optional[Dict[str, str]] = None,
@@ -655,7 +655,7 @@ class DuckDBPyRelation:
655655
def distinct(self) -> DuckDBPyRelation: ...
656656
def except_(self, other_rel: DuckDBPyRelation) -> DuckDBPyRelation: ...
657657
def execute(self, *args, **kwargs) -> DuckDBPyRelation: ...
658-
def explain(self, type: Optional[Literal["standard", "analyze"] | int] = "standard") -> str: ...
658+
def explain(self, type: Optional[Union[Literal["standard", "analyze"], int]] = "standard") -> str: ...
659659
def fetchall(self) -> List[Any]: ...
660660
def fetchmany(self, size: int = ...) -> List[Any]: ...
661661
def fetchnumpy(self) -> dict: ...
@@ -707,7 +707,7 @@ class DuckDBPyRelation:
707707
escapechar: Optional[str] = None,
708708
date_format: Optional[str] = None,
709709
timestamp_format: Optional[str] = None,
710-
quoting: Optional[str | int] = None,
710+
quoting: Optional[Union[str, int]] = None,
711711
encoding: Optional[str] = None,
712712
compression: Optional[str] = None,
713713
write_partition_columns: Optional[bool] = None,
@@ -721,8 +721,8 @@ class DuckDBPyRelation:
721721
self,
722722
file_name: str,
723723
compression: Optional[str] = None,
724-
field_ids: Optional[dict | str] = None,
725-
row_group_size_bytes: Optional[int | str] = None,
724+
field_ids: Optional[Union[dict, str]] = None,
725+
row_group_size_bytes: Optional[Union[int, str]] = None,
726726
row_group_size: Optional[int] = None,
727727
partition_by: Optional[List[str]] = None,
728728
write_partition_columns: Optional[bool] = None,
@@ -748,7 +748,7 @@ class DuckDBPyRelation:
748748
escapechar: Optional[str] = None,
749749
date_format: Optional[str] = None,
750750
timestamp_format: Optional[str] = None,
751-
quoting: Optional[str | int] = None,
751+
quoting: Optional[Union[str, int]] = None,
752752
encoding: Optional[str] = None,
753753
compression: Optional[str] = None,
754754
write_partition_columns: Optional[bool] = None,
@@ -761,8 +761,8 @@ class DuckDBPyRelation:
761761
self,
762762
file_name: str,
763763
compression: Optional[str] = None,
764-
field_ids: Optional[dict | str] = None,
765-
row_group_size_bytes: Optional[int | str] = None,
764+
field_ids: Optional[Union[dict, str]] = None,
765+
row_group_size_bytes: Optional[Union[int, str]] = None,
766766
row_group_size: Optional[int] = None,
767767
partition_by: Optional[List[str]] = None,
768768
write_partition_columns: Optional[bool] = None,
@@ -956,7 +956,7 @@ def read_json(
956956
field_appearance_threshold: Optional[float] = None,
957957
map_inference_threshold: Optional[int] = None,
958958
maximum_sample_files: Optional[int] = None,
959-
filename: Optional[bool | str] = None,
959+
filename: Optional[Union[bool, str]] = None,
960960
hive_partitioning: Optional[bool] = None,
961961
union_by_name: Optional[bool] = None,
962962
hive_types: Optional[Dict[str, str]] = None,
@@ -976,12 +976,12 @@ def from_query(
976976
def read_csv(
977977
path_or_buffer: Union[str, StringIO, TextIOBase],
978978
*,
979-
header: Optional[bool | int] = None,
979+
header: Optional[Union[bool, int]] = None,
980980
compression: Optional[str] = None,
981981
sep: Optional[str] = None,
982982
delimiter: Optional[str] = None,
983-
dtype: Optional[Dict[str, str] | List[str]] = None,
984-
na_values: Optional[str | List[str]] = None,
983+
dtype: Optional[Union[Dict[str, str], List[str]]] = None,
984+
na_values: Optional[Union[str, List[str]]] = None,
985985
skiprows: Optional[int] = None,
986986
quotechar: Optional[str] = None,
987987
escapechar: Optional[str] = None,
@@ -1007,7 +1007,7 @@ def read_csv(
10071007
buffer_size: Optional[int] = None,
10081008
decimal: Optional[str] = None,
10091009
allow_quoted_nulls: Optional[bool] = None,
1010-
filename: Optional[bool | str] = None,
1010+
filename: Optional[Union[bool, str]] = None,
10111011
hive_partitioning: Optional[bool] = None,
10121012
union_by_name: Optional[bool] = None,
10131013
hive_types: Optional[Dict[str, str]] = None,
@@ -1017,12 +1017,12 @@ def read_csv(
10171017
def from_csv_auto(
10181018
path_or_buffer: Union[str, StringIO, TextIOBase],
10191019
*,
1020-
header: Optional[bool | int] = None,
1020+
header: Optional[Union[bool, int]] = None,
10211021
compression: Optional[str] = None,
10221022
sep: Optional[str] = None,
10231023
delimiter: Optional[str] = None,
1024-
dtype: Optional[Dict[str, str] | List[str]] = None,
1025-
na_values: Optional[str | List[str]] = None,
1024+
dtype: Optional[Union[Dict[str, str], List[str]]] = None,
1025+
na_values: Optional[Union[str, List[str]]] = None,
10261026
skiprows: Optional[int] = None,
10271027
quotechar: Optional[str] = None,
10281028
escapechar: Optional[str] = None,
@@ -1048,7 +1048,7 @@ def from_csv_auto(
10481048
buffer_size: Optional[int] = None,
10491049
decimal: Optional[str] = None,
10501050
allow_quoted_nulls: Optional[bool] = None,
1051-
filename: Optional[bool | str] = None,
1051+
filename: Optional[Union[bool, str]] = None,
10521052
hive_partitioning: Optional[bool] = None,
10531053
union_by_name: Optional[bool] = None,
10541054
hive_types: Optional[Dict[str, str]] = None,
@@ -1105,7 +1105,7 @@ def write_csv(
11051105
escapechar: Optional[str] = None,
11061106
date_format: Optional[str] = None,
11071107
timestamp_format: Optional[str] = None,
1108-
quoting: Optional[str | int] = None,
1108+
quoting: Optional[Union[str, int]] = None,
11091109
encoding: Optional[str] = None,
11101110
compression: Optional[str] = None,
11111111
overwrite: Optional[bool] = None,
@@ -1117,7 +1117,7 @@ def write_csv(
11171117
) -> None: ...
11181118
def aggregate(
11191119
df: pandas.DataFrame,
1120-
aggr_expr: str | List[Expression],
1120+
aggr_expr: Union[str, List[Expression]],
11211121
group_expr: str = "",
11221122
*,
11231123
connection: DuckDBPyConnection = ...,

duckdb/experimental/spark/sql/catalog.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import NamedTuple, Optional # noqa: D100
1+
from typing import NamedTuple, Optional, Union # noqa: D100
22

33
from .session import SparkSession
44

@@ -63,7 +63,7 @@ def listColumns(self, tableName: str, dbName: Optional[str] = None) -> list[Colu
6363
query += f" and database_name = '{dbName}'"
6464
res = self._session.conn.sql(query).fetchall()
6565

66-
def transform_to_column(x: list[str | bool]) -> Column:
66+
def transform_to_column(x: list[Union[str, bool]]) -> Column:
6767
return Column(name=x[0], description=None, dataType=x[1], nullable=x[2], isPartition=False, isBucket=False)
6868

6969
columns = [transform_to_column(x) for x in res]

duckdb/experimental/spark/sql/functions.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def ucase(str: "ColumnOrName") -> Column:
9090
return upper(str)
9191

9292

93-
def when(condition: "Column", value: Column | str) -> Column: # noqa: D103
93+
def when(condition: "Column", value: Union[Column, str]) -> Column: # noqa: D103
9494
if not isinstance(condition, Column):
9595
msg = "condition should be a Column"
9696
raise TypeError(msg)
@@ -99,7 +99,7 @@ def when(condition: "Column", value: Column | str) -> Column: # noqa: D103
9999
return Column(expr)
100100

101101

102-
def _inner_expr_or_val(val: Column | str) -> Column | str:
102+
def _inner_expr_or_val(val: Union[Column, str]) -> Union[Column, str]:
103103
return val.expr if isinstance(val, Column) else val
104104

105105

@@ -842,7 +842,7 @@ def collect_list(col: "ColumnOrName") -> Column:
842842
return array_agg(col)
843843

844844

845-
def array_append(col: "ColumnOrName", value: Column | str) -> Column:
845+
def array_append(col: "ColumnOrName", value: Union[Column, str]) -> Column:
846846
"""Collection function: returns an array of the elements in col1 along
847847
with the added element in col2 at the last of the array.
848848
@@ -876,7 +876,7 @@ def array_append(col: "ColumnOrName", value: Column | str) -> Column:
876876
return _invoke_function("list_append", _to_column_expr(col), _get_expr(value))
877877

878878

879-
def array_insert(arr: "ColumnOrName", pos: Union["ColumnOrName", int], value: Column | str) -> Column:
879+
def array_insert(arr: "ColumnOrName", pos: Union["ColumnOrName", int], value: Union[Column, str]) -> Column:
880880
"""Collection function: adds an item into a given array at a specified array index.
881881
Array indices start at 1, or start from the end if index is negative.
882882
Index above array size appends the array, or prepends the array if index is negative,
@@ -969,7 +969,7 @@ def array_insert(arr: "ColumnOrName", pos: Union["ColumnOrName", int], value: Co
969969
)
970970

971971

972-
def array_contains(col: "ColumnOrName", value: Column | str) -> Column:
972+
def array_contains(col: "ColumnOrName", value: Union[Column, str]) -> Column:
973973
"""Collection function: returns null if the array is null, true if the array contains the
974974
given value, and false otherwise.
975975

duckdb/experimental/spark/sql/session.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def construct_parameters(tuples: Iterable) -> list[list]:
111111
return DataFrame(rel, self)
112112

113113
def _createDataFrameFromPandas(
114-
self, data: "PandasDataFrame", types: list[str] | None, names: list[str] | None
114+
self, data: "PandasDataFrame", types: Union[list[str], None], names: Union[list[str], None]
115115
) -> DataFrame:
116116
df = self._create_dataframe(data)
117117

duckdb/experimental/spark/sql/types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1181,7 +1181,7 @@ def asDict(self, recursive: bool = False) -> dict[str, Any]:
11811181

11821182
if recursive:
11831183

1184-
def conv(obj: Row | list | dict | object) -> list | dict | object:
1184+
def conv(obj: Union[Row, list, dict, object]) -> Union[list, dict, object]:
11851185
if isinstance(obj, Row):
11861186
return obj.asDict(True)
11871187
elif isinstance(obj, list):

duckdb/query_graph/__main__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,7 @@ def generate_ipython(json_input: str) -> str: # noqa: D103
268268
)
269269

270270

271-
def generate_style_html(graph_json: str, include_meta_info: bool) -> None: # noqa: D103
271+
def generate_style_html(graph_json: str, include_meta_info: bool) -> None: # noqa: D103, FBT001
272272
treeflex_css = '<link rel="stylesheet" href="https://unpkg.com/treeflex/dist/css/treeflex.css">\n'
273273
css = "<style>\n"
274274
css += qgraph_css + "\n"

duckdb_packaging/_versioning.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def strip_post_from_version(version: str) -> str:
148148

149149

150150
def get_git_describe(
151-
repo_path: Optional[pathlib.Path] = None, since_major: bool = False, since_minor: bool = False
151+
repo_path: Optional[pathlib.Path] = None, since_major: bool = False, since_minor: bool = False # noqa: FBT001
152152
) -> Optional[str]:
153153
"""Get git describe output for version determination.
154154

duckdb_packaging/build_backend.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -44,14 +44,13 @@
4444
_FORCED_PEP440_VERSION = forced_version_from_env()
4545

4646

47-
def _log(msg: str, is_error: bool = False) -> None:
47+
def _log(msg: str) -> None:
4848
"""Log a message with build backend prefix.
4949
5050
Args:
5151
msg: The message to log.
52-
is_error: If True, log to stderr; otherwise log to stdout.
5352
"""
54-
print(_LOGGING_FORMAT.format(msg), flush=True, file=sys.stderr if is_error else sys.stdout)
53+
print(_LOGGING_FORMAT.format(msg), flush=True, file=sys.stderr)
5554

5655

5756
def _in_git_repository() -> bool:
@@ -135,7 +134,7 @@ def _read_duckdb_long_version() -> str:
135134

136135

137136
def _skbuild_config_add(
138-
key: str, value: Union[list, str], config_settings: dict[str, Union[list[str], str]], fail_if_exists: bool = False
137+
key: str, value: Union[list, str], config_settings: dict[str, Union[list[str], str]]
139138
) -> None:
140139
"""Add or modify a configuration setting for scikit-build-core.
141140
@@ -146,10 +145,9 @@ def _skbuild_config_add(
146145
key: The configuration key to set (will be prefixed with 'skbuild.' if needed).
147146
value: The value to add (string or list).
148147
config_settings: The configuration dictionary to modify.
149-
fail_if_exists: If True, raise an error if the key already exists.
150148
151149
Raises:
152-
RuntimeError: If fail_if_exists is True and key exists, or on type mismatches.
150+
RuntimeError: If this would overwrite an existing value, or on type mismatches.
153151
AssertionError: If config_settings is None.
154152
155153
Behavior Rules:
@@ -172,16 +170,13 @@ def _skbuild_config_add(
172170
val_is_list = isinstance(value, list)
173171
if not key_exists:
174172
config_settings[store_key] = value
175-
elif fail_if_exists:
176-
msg = f"{key} already present in config and may not be overridden"
177-
raise RuntimeError(msg)
178173
elif key_exists_as_list and val_is_list:
179174
config_settings[store_key].extend(value)
180175
elif key_exists_as_list and val_is_str:
181176
config_settings[store_key].append(value)
182177
elif key_exists_as_str and val_is_str:
183-
_log(f"WARNING: overriding existing value in {store_key}")
184-
config_settings[store_key] = value
178+
msg = f"{key} already present in config and may not be overridden"
179+
raise RuntimeError(msg)
185180
else:
186181
msg = f"Type mismatch: cannot set {store_key} ({type(config_settings[store_key])}) to `{value}` ({type(value)})"
187182
raise RuntimeError(msg)

0 commit comments

Comments
 (0)