forked from apache/spark
-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathpyproject.toml
More file actions
71 lines (67 loc) · 2.39 KB
/
pyproject.toml
File metadata and controls
71 lines (67 loc) · 2.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
[tool.ruff]
exclude = [
"*/target/*",
"**/*.ipynb",
"docs/.local_ruby_bundle/",
"*python/pyspark/cloudpickle/*.py",
"*python/pyspark/ml/deepspeed/tests/*.py",
"*python/docs/build/*",
"*python/docs/source/conf.py",
"*python/.eggs/*",
"dist/*",
".git/*",
"*python/pyspark/sql/pandas/functions.pyi",
"*python/pyspark/sql/column.pyi",
"*python/pyspark/worker.pyi",
"*python/pyspark/java_gateway.pyi",
"*python/pyspark/sql/connect/proto/*",
"*python/pyspark/sql/streaming/proto/*",
"*venv*/*",
]
[tool.ruff.lint]
extend-select = [
"G010", # logging-warn
# ambiguous unicode character
"RUF001", # string
"RUF002", # docstring
"RUF003", # comment
# ambiguous unicode character end
"RUF100", # unused-noqa
]
ignore = [
"E402", # Module top level import is disabled for optional import check, etc.
# TODO
"E721", # Use isinstance for type comparison, too many for now.
"E741", # Ambiguous variables like l, I or O.
]
[tool.ruff.lint.per-file-ignores]
# E501 is ignored as shared.py is auto-generated.
"python/pyspark/ml/param/shared.py" = ["E501"]
# E501 is ignored as we should keep the json string format in error_classes.py.
"python/pyspark/errors/error_classes.py" = ["E501"]
# Examples contain some unused variables.
"examples/src/main/python/sql/datasource.py" = ["F841"]
[tool.black]
# When changing the version, we have to update
# GitHub workflow version and dev/reformat-python
required-version = "23.12.1"
line-length = 100
target-version = ['py39']
include = '\.pyi?$'
extend-exclude = 'cloudpickle|error_classes.py'