Skip to content

Commit 9a692a8

Browse files
committed
Ruff format fixes
1 parent a062e37 commit 9a692a8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

50 files changed

+40
-132
lines changed

adbc_driver_duckdb/dbapi.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,7 @@
1515
# specific language governing permissions and limitations
1616
# under the License.
1717

18-
"""DBAPI 2.0-compatible facade for the ADBC DuckDB driver.
19-
"""
18+
"""DBAPI 2.0-compatible facade for the ADBC DuckDB driver."""
2019

2120
import typing
2221

duckdb/experimental/spark/errors/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,7 @@
1515
# limitations under the License.
1616
#
1717

18-
"""PySpark exceptions.
19-
"""
18+
"""PySpark exceptions."""
2019

2120
from .exceptions.base import (
2221
AnalysisException,

duckdb/experimental/spark/errors/exceptions/base.py

Lines changed: 24 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,7 @@
44

55

66
class PySparkException(Exception):
7-
"""Base Exception for handling errors generated from PySpark.
8-
"""
7+
"""Base Exception for handling errors generated from PySpark."""
98

109
def __init__(
1110
self,
@@ -78,115 +77,92 @@ def __str__(self) -> str:
7877

7978

8079
class AnalysisException(PySparkException):
81-
"""Failed to analyze a SQL query plan.
82-
"""
80+
"""Failed to analyze a SQL query plan."""
8381

8482

8583
class SessionNotSameException(PySparkException):
86-
"""Performed the same operation on different SparkSession.
87-
"""
84+
"""Performed the same operation on different SparkSession."""
8885

8986

9087
class TempTableAlreadyExistsException(AnalysisException):
91-
"""Failed to create temp view since it is already exists.
92-
"""
88+
"""Failed to create temp view since it is already exists."""
9389

9490

9591
class ParseException(AnalysisException):
96-
"""Failed to parse a SQL command.
97-
"""
92+
"""Failed to parse a SQL command."""
9893

9994

10095
class IllegalArgumentException(PySparkException):
101-
"""Passed an illegal or inappropriate argument.
102-
"""
96+
"""Passed an illegal or inappropriate argument."""
10397

10498

10599
class ArithmeticException(PySparkException):
106-
"""Arithmetic exception thrown from Spark with an error class.
107-
"""
100+
"""Arithmetic exception thrown from Spark with an error class."""
108101

109102

110103
class UnsupportedOperationException(PySparkException):
111-
"""Unsupported operation exception thrown from Spark with an error class.
112-
"""
104+
"""Unsupported operation exception thrown from Spark with an error class."""
113105

114106

115107
class ArrayIndexOutOfBoundsException(PySparkException):
116-
"""Array index out of bounds exception thrown from Spark with an error class.
117-
"""
108+
"""Array index out of bounds exception thrown from Spark with an error class."""
118109

119110

120111
class DateTimeException(PySparkException):
121-
"""Datetime exception thrown from Spark with an error class.
122-
"""
112+
"""Datetime exception thrown from Spark with an error class."""
123113

124114

125115
class NumberFormatException(IllegalArgumentException):
126-
"""Number format exception thrown from Spark with an error class.
127-
"""
116+
"""Number format exception thrown from Spark with an error class."""
128117

129118

130119
class StreamingQueryException(PySparkException):
131-
"""Exception that stopped a :class:`StreamingQuery`.
132-
"""
120+
"""Exception that stopped a :class:`StreamingQuery`."""
133121

134122

135123
class QueryExecutionException(PySparkException):
136-
"""Failed to execute a query.
137-
"""
124+
"""Failed to execute a query."""
138125

139126

140127
class PythonException(PySparkException):
141-
"""Exceptions thrown from Python workers.
142-
"""
128+
"""Exceptions thrown from Python workers."""
143129

144130

145131
class SparkRuntimeException(PySparkException):
146-
"""Runtime exception thrown from Spark with an error class.
147-
"""
132+
"""Runtime exception thrown from Spark with an error class."""
148133

149134

150135
class SparkUpgradeException(PySparkException):
151-
"""Exception thrown because of Spark upgrade.
152-
"""
136+
"""Exception thrown because of Spark upgrade."""
153137

154138

155139
class UnknownException(PySparkException):
156-
"""None of the above exceptions.
157-
"""
140+
"""None of the above exceptions."""
158141

159142

160143
class PySparkValueError(PySparkException, ValueError):
161-
"""Wrapper class for ValueError to support error classes.
162-
"""
144+
"""Wrapper class for ValueError to support error classes."""
163145

164146

165147
class PySparkIndexError(PySparkException, IndexError):
166-
"""Wrapper class for IndexError to support error classes.
167-
"""
148+
"""Wrapper class for IndexError to support error classes."""
168149

169150

170151
class PySparkTypeError(PySparkException, TypeError):
171-
"""Wrapper class for TypeError to support error classes.
172-
"""
152+
"""Wrapper class for TypeError to support error classes."""
173153

174154

175155
class PySparkAttributeError(PySparkException, AttributeError):
176-
"""Wrapper class for AttributeError to support error classes.
177-
"""
156+
"""Wrapper class for AttributeError to support error classes."""
178157

179158

180159
class PySparkRuntimeError(PySparkException, RuntimeError):
181-
"""Wrapper class for RuntimeError to support error classes.
182-
"""
160+
"""Wrapper class for RuntimeError to support error classes."""
183161

184162

185163
class PySparkAssertionError(PySparkException, AssertionError):
186-
"""Wrapper class for AssertionError to support error classes.
187-
"""
164+
"""Wrapper class for AssertionError to support error classes."""
188165

189166

190167
class PySparkNotImplementedError(PySparkException, NotImplementedError):
191-
"""Wrapper class for NotImplementedError to support error classes.
192-
"""
168+
"""Wrapper class for NotImplementedError to support error classes."""

duckdb/experimental/spark/errors/utils.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,13 @@
2121

2222

2323
class ErrorClassesReader:
24-
"""A reader to load error information from error_classes.py.
25-
"""
24+
"""A reader to load error information from error_classes.py."""
2625

2726
def __init__(self) -> None:
2827
self.error_info_map = ERROR_CLASSES_MAP
2928

3029
def get_error_message(self, error_class: str, message_parameters: dict[str, str]) -> str:
31-
"""Returns the completed error message by applying message parameters to the message template.
32-
"""
30+
"""Returns the completed error message by applying message parameters to the message template."""
3331
message_template = self.get_message_template(error_class)
3432
# Verify message parameters.
3533
message_parameters_from_template = re.findall("<([a-zA-Z0-9_-]+)>", message_template)

duckdb/experimental/spark/sql/dataframe.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -843,8 +843,7 @@ def limit(self, num: int) -> "DataFrame":
843843
return DataFrame(rel, self.session)
844844

845845
def __contains__(self, item: str) -> bool:
846-
"""Check if the :class:`DataFrame` contains a column by the name of `item`
847-
"""
846+
"""Check if the :class:`DataFrame` contains a column by the name of `item`"""
848847
return item in self.relation
849848

850849
@property

duckdb/experimental/spark/sql/streaming.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ def load(
3030
schema: Union[StructType, str, None] = None,
3131
**options: OptionalPrimitiveType,
3232
) -> "DataFrame":
33-
3433
raise NotImplementedError
3534

3635

duckdb/experimental/spark/sql/types.py

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -102,13 +102,11 @@ def needConversion(self) -> bool:
102102
return False
103103

104104
def toInternal(self, obj: Any) -> Any:
105-
"""Converts a Python object into an internal SQL object.
106-
"""
105+
"""Converts a Python object into an internal SQL object."""
107106
return obj
108107

109108
def fromInternal(self, obj: Any) -> Any:
110-
"""Converts an internal SQL object into a native Python object.
111-
"""
109+
"""Converts an internal SQL object into a native Python object."""
112110
return obj
113111

114112

@@ -979,14 +977,12 @@ def typeName(cls) -> str:
979977

980978
@classmethod
981979
def sqlType(cls) -> DataType:
982-
"""Underlying SQL storage type for this UDT.
983-
"""
980+
"""Underlying SQL storage type for this UDT."""
984981
raise NotImplementedError("UDT must implement sqlType().")
985982

986983
@classmethod
987984
def module(cls) -> str:
988-
"""The Python module of the UDT.
989-
"""
985+
"""The Python module of the UDT."""
990986
raise NotImplementedError("UDT must implement module().")
991987

992988
@classmethod
@@ -1001,8 +997,7 @@ def needConversion(self) -> bool:
1001997

1002998
@classmethod
1003999
def _cachedSqlType(cls) -> DataType:
1004-
"""Cache the sqlType() into class, because it's heavily used in `toInternal`.
1005-
"""
1000+
"""Cache the sqlType() into class, because it's heavily used in `toInternal`."""
10061001
if not hasattr(cls, "_cached_sql_type"):
10071002
cls._cached_sql_type = cls.sqlType() # type: ignore[attr-defined]
10081003
return cls._cached_sql_type # type: ignore[attr-defined]
@@ -1017,13 +1012,11 @@ def fromInternal(self, obj: Any) -> Any:
10171012
return self.deserialize(v)
10181013

10191014
def serialize(self, obj: Any) -> Any:
1020-
"""Converts a user-type object into a SQL datum.
1021-
"""
1015+
"""Converts a user-type object into a SQL datum."""
10221016
raise NotImplementedError("UDT must implement toInternal().")
10231017

10241018
def deserialize(self, datum: Any) -> Any:
1025-
"""Converts a SQL datum into a user-type object.
1026-
"""
1019+
"""Converts a SQL datum into a user-type object."""
10271020
raise NotImplementedError("UDT must implement fromInternal().")
10281021

10291022
def simpleString(self) -> str:

duckdb/polars_io.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -206,8 +206,7 @@ def _pl_tree_to_sql(tree: dict) -> str:
206206

207207

208208
def duckdb_source(relation: duckdb.DuckDBPyRelation, schema: pl.schema.Schema) -> pl.LazyFrame:
209-
"""A polars IO plugin for DuckDB.
210-
"""
209+
"""A polars IO plugin for DuckDB."""
211210

212211
def source_generator(
213212
with_columns: Optional[list[str]],

duckdb/udf.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
def vectorized(func):
2-
"""Decorate a function with annotated function parameters, so DuckDB can infer that the function should be provided with pyarrow arrays and should expect pyarrow array(s) as output
3-
"""
2+
"""Decorate a function with annotated function parameters, so DuckDB can infer that the function should be provided with pyarrow arrays and should expect pyarrow array(s) as output"""
43
import types
54
from inspect import signature
65

duckdb_packaging/pypi_cleanup.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,17 +78,14 @@ class PyPICleanupError(Exception):
7878
"""Base exception for PyPI cleanup operations."""
7979

8080

81-
8281
class AuthenticationError(PyPICleanupError):
8382
"""Raised when authentication fails."""
8483

8584

86-
8785
class ValidationError(PyPICleanupError):
8886
"""Raised when input validation fails."""
8987

9088

91-
9289
def setup_logging(verbose: bool = False) -> None:
9390
"""Configure logging with appropriate level and format."""
9491
level = logging.DEBUG if verbose else logging.INFO

0 commit comments

Comments
 (0)