Skip to content

Commit e6f2ae7

Browse files
committed
Ruff ANN204: return type annotations
1 parent fa0b363 commit e6f2ae7

35 files changed

+143
-139
lines changed

duckdb/bytes_io_wrapper.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from io import StringIO, TextIOBase
2-
from typing import Union
2+
from typing import Any, Union
33

44
"""
55
BSD 3-Clause License
@@ -48,7 +48,7 @@ def __init__(self, buffer: Union[StringIO, TextIOBase], encoding: str = "utf-8")
4848
# overflow to the front of the bytestring the next time reading is performed
4949
self.overflow = b""
5050

51-
def __getattr__(self, attr: str):
51+
def __getattr__(self, attr: str) -> Any:
5252
return getattr(self.buffer, attr)
5353

5454
def read(self, n: Union[int, None] = -1) -> bytes:

duckdb/experimental/spark/_globals.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@ def foo(arg=pyducdkb.spark._NoValue):
3232
Note that this approach is taken after from NumPy.
3333
"""
3434

35+
from typing import Type
36+
3537
__ALL__ = ["_NoValue"]
3638

3739

@@ -54,23 +56,23 @@ class _NoValueType:
5456

5557
__instance = None
5658

57-
def __new__(cls):
59+
def __new__(cls) -> '_NoValueType':
5860
# ensure that only one instance exists
5961
if not cls.__instance:
6062
cls.__instance = super(_NoValueType, cls).__new__(cls)
6163
return cls.__instance
6264

6365
# Make the _NoValue instance falsey
64-
def __nonzero__(self):
66+
def __nonzero__(self) -> bool:
6567
return False
6668

6769
__bool__ = __nonzero__
6870

6971
# needed for python 2 to preserve identity through a pickle
70-
def __reduce__(self):
72+
def __reduce__(self) -> tuple[Type, tuple]:
7173
return (self.__class__, ())
7274

73-
def __repr__(self):
75+
def __repr__(self) -> str:
7476
return "<no value>"
7577

7678

duckdb/experimental/spark/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44

55
class SparkConf:
6-
def __init__(self):
6+
def __init__(self) -> None:
77
raise NotImplementedError
88

99
def contains(self, key: str) -> bool:

duckdb/experimental/spark/context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88

99
class SparkContext:
10-
def __init__(self, master: str):
10+
def __init__(self, master: str) -> None:
1111
self._connection = duckdb.connect(':memory:')
1212
# This aligns the null ordering with Spark.
1313
self._connection.execute("set default_null_order='nulls_first_on_asc_last_on_desc'")

duckdb/experimental/spark/errors/exceptions/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def __init__(
1414
error_class: Optional[str] = None,
1515
# The dictionary listing the arguments specified in the message (or the error_class)
1616
message_parameters: Optional[dict[str, str]] = None,
17-
):
17+
) -> None:
1818
# `message` vs `error_class` & `message_parameters` are mutually exclusive.
1919
assert (message is not None and (error_class is None and message_parameters is None)) or (
2020
message is None and (error_class is not None and message_parameters is not None)

duckdb/experimental/spark/exception.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ class ContributionsAcceptedError(NotImplementedError):
55
feel free to open up a PR or a Discussion over on https://github.com/duckdb/duckdb
66
"""
77

8-
def __init__(self, message=None):
8+
def __init__(self, message=None) -> None:
99
doc = self.__class__.__doc__
1010
if message:
1111
doc = message + '\n' + doc

duckdb/experimental/spark/sql/catalog.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ class Function(NamedTuple):
3333

3434

3535
class Catalog:
36-
def __init__(self, session: SparkSession):
36+
def __init__(self, session: SparkSession) -> None:
3737
self._session = session
3838

3939
def listDatabases(self) -> list[Database]:

duckdb/experimental/spark/sql/column.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,11 +95,11 @@ class Column:
9595
.. versionadded:: 1.3.0
9696
"""
9797

98-
def __init__(self, expr: Expression):
98+
def __init__(self, expr: Expression) -> None:
9999
self.expr = expr
100100

101101
# arithmetic operators
102-
def __neg__(self):
102+
def __neg__(self) -> 'Column':
103103
return Column(-self.expr)
104104

105105
# `and`, `or`, `not` cannot be overloaded in Python,

duckdb/experimental/spark/sql/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55

66
class RuntimeConfig:
7-
def __init__(self, connection: DuckDBPyConnection):
7+
def __init__(self, connection: DuckDBPyConnection) -> None:
88
self._connection = connection
99

1010
def set(self, key: str, value: str) -> None:

duckdb/experimental/spark/sql/dataframe.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737

3838

3939
class DataFrame:
40-
def __init__(self, relation: duckdb.DuckDBPyRelation, session: "SparkSession"):
40+
def __init__(self, relation: duckdb.DuckDBPyRelation, session: "SparkSession") -> None:
4141
self.relation = relation
4242
self.session = session
4343
self._schema = None
@@ -870,7 +870,7 @@ def limit(self, num: int) -> "DataFrame":
870870
rel = self.relation.limit(num)
871871
return DataFrame(rel, self.session)
872872

873-
def __contains__(self, item: str):
873+
def __contains__(self, item: str) -> bool:
874874
"""
875875
Check if the :class:`DataFrame` contains a column by the name of `item`
876876
"""

0 commit comments

Comments
 (0)