Skip to content

Commit 7289e7d

Browse files
authored
Update pre-commit config (#1842)
* Upgrade black style to 21.11b1 * Fix bad runtime error string formatting. This also caused flake to complain which it is now no more. * Fix outstanding pre-commit issues * Upgrade isort version in pre-commit hooks * Udate Lucas-C pre-commit hook version * Update jumanjihouse pre-commit version * Update pre-commit hooks * Escape isort args * Do not skip flake8 in pre-commit * Fix outstanding precommit issues; ignore E741 * Apply isort * Avoid isort 5 warning by double-dashing`` * Fix black formatting in recent commit. * Pin flake8 and flake8-print versions, skip flak8 for pre-commit run
1 parent df018d0 commit 7289e7d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+366
-183
lines changed

.github/workflows/linting.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,13 @@ jobs:
2121
python-version: "3.6"
2222
- name: Install dependencies
2323
run: |
24-
pip install flake8==3.7.9 flake8-print==3.1.4 pre-commit
24+
pip install flake8==4.0.1 flake8-print==4.0.0 pre-commit
2525
pre-commit install
2626
pre-commit run seed-isort-config || true
2727
- name: Run linting
2828
run: |
2929
flake8
3030
- name: Run pre-commit checks
31+
# skipping flake8 here (run separatey above b/c pre-commit does not include flake8-print)
3132
run: |
3233
SKIP=flake8 pre-commit run --files test/**/*.py gpytorch/**/*.py

.pre-commit-config.yaml

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
11
repos:
22
- repo: https://github.com/pre-commit/pre-commit-hooks
3-
rev: v2.4.0
3+
rev: v4.0.1
44
hooks:
5-
- id: flake8
6-
args: [--config=setup.cfg]
7-
exclude: ^(examples/*)|(docs/*)
85
- id: check-byte-order-marker
96
- id: check-case-conflict
107
- id: check-merge-conflict
@@ -14,29 +11,35 @@ repos:
1411
args: [--fix=lf]
1512
- id: trailing-whitespace
1613
- id: debug-statements
14+
- repo: https://github.com/pycqa/flake8
15+
rev: 4.0.1
16+
hooks:
17+
- id: flake8
18+
args: [--config=setup.cfg]
19+
exclude: ^(examples/*)|(docs/*)
1720
- repo: https://github.com/ambv/black
18-
rev: 19.10b0
21+
rev: 21.11b1
1922
hooks:
2023
- id: black
2124
exclude: ^(build/*)|(docs/*)|(examples/*)
2225
args: [-l 120, --target-version=py36]
2326
- repo: https://github.com/pre-commit/mirrors-isort
24-
rev: v4.3.21
27+
rev: v5.9.3
2528
hooks:
2629
- id: isort
2730
language_version: python3
2831
exclude: ^(build/*)|(docs/*)|(examples/*)
29-
args: [-w 120, -m 3, -tc, --project=gpytorch]
32+
args: [-w120, -m3, --tc, --project=gpytorch]
3033
- repo: https://github.com/jumanjihouse/pre-commit-hooks
31-
rev: 1.11.0
34+
rev: 2.1.5
3235
hooks:
3336
- id: require-ascii
3437
exclude: ^(examples/LBFGS.py)|(examples/.*\.ipynb)
3538
- id: script-must-have-extension
3639
- id: forbid-binary
3740
exclude: ^(examples/*)
3841
- repo: https://github.com/Lucas-C/pre-commit-hooks
39-
rev: v1.1.7
42+
rev: v1.1.10
4043
hooks:
4144
- id: forbid-crlf
4245
- id: forbid-tabs

docs/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,4 +17,4 @@ help:
1717
# Catch-all target: route all unknown targets to Sphinx using the new
1818
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
1919
%: Makefile
20-
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
20+
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

docs/source/module.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,3 @@ gpytorch.Module
99

1010
.. autoclass:: gpytorch.Module
1111
:members:
12-

gpytorch/kernels/additive_structure_kernel.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,10 @@ def is_stationary(self) -> bool:
4242
return self.base_kernel.is_stationary
4343

4444
def __init__(
45-
self, base_kernel: Kernel, num_dims: int, active_dims: Optional[Tuple[int, ...]] = None,
45+
self,
46+
base_kernel: Kernel,
47+
num_dims: int,
48+
active_dims: Optional[Tuple[int, ...]] = None,
4649
):
4750
super(AdditiveStructureKernel, self).__init__(active_dims=active_dims)
4851
self.base_kernel = base_kernel

gpytorch/kernels/arc_kernel.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,26 +119,34 @@ def __init__(
119119
# TODO: check the errors given by interval
120120
angle_constraint = Interval(0.1, 0.9)
121121
self.register_parameter(
122-
name="raw_angle", parameter=torch.nn.Parameter(torch.zeros(*self.batch_shape, 1, self.last_dim)),
122+
name="raw_angle",
123+
parameter=torch.nn.Parameter(torch.zeros(*self.batch_shape, 1, self.last_dim)),
123124
)
124125
if angle_prior is not None:
125126
if not isinstance(angle_prior, Prior):
126127
raise TypeError("Expected gpytorch.priors.Prior but got " + type(angle_prior).__name__)
127128
self.register_prior(
128-
"angle_prior", angle_prior, lambda m: m.angle, lambda m, v: m._set_angle(v),
129+
"angle_prior",
130+
angle_prior,
131+
lambda m: m.angle,
132+
lambda m, v: m._set_angle(v),
129133
)
130134

131135
self.register_constraint("raw_angle", angle_constraint)
132136

133137
self.register_parameter(
134-
name="raw_radius", parameter=torch.nn.Parameter(torch.zeros(*self.batch_shape, 1, self.last_dim)),
138+
name="raw_radius",
139+
parameter=torch.nn.Parameter(torch.zeros(*self.batch_shape, 1, self.last_dim)),
135140
)
136141

137142
if radius_prior is not None:
138143
if not isinstance(radius_prior, Prior):
139144
raise TypeError("Expected gpytorch.priors.Prior but got " + type(radius_prior).__name__)
140145
self.register_prior(
141-
"radius_prior", radius_prior, lambda m: m.radius, lambda m, v: m._set_radius(v),
146+
"radius_prior",
147+
radius_prior,
148+
lambda m: m.radius,
149+
lambda m, v: m._set_radius(v),
142150
)
143151

144152
radius_constraint = Positive()

gpytorch/kernels/distributional_input_kernel.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,9 @@ class DistributionalInputKernel(Kernel):
2525
has_lengthscale = True
2626

2727
def __init__(
28-
self, distance_function: Callable, **kwargs,
28+
self,
29+
distance_function: Callable,
30+
**kwargs,
2931
):
3032
super(DistributionalInputKernel, self).__init__(**kwargs)
3133
if distance_function is None:

gpytorch/kernels/grid_interpolation_kernel.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,10 @@ def __init__(
114114
grid = create_grid(self.grid_sizes, self.grid_bounds)
115115

116116
super(GridInterpolationKernel, self).__init__(
117-
base_kernel=base_kernel, grid=grid, interpolation_mode=True, active_dims=active_dims,
117+
base_kernel=base_kernel,
118+
grid=grid,
119+
interpolation_mode=True,
120+
active_dims=active_dims,
118121
)
119122
self.register_buffer("has_initialized_grid", torch.tensor(has_initialized_grid, dtype=torch.bool))
120123

@@ -170,7 +173,10 @@ def forward(self, x1, x2, diag=False, last_dim_is_batch=False, **params):
170173
for x_min, x_max, spacing in zip(x_mins, x_maxs, grid_spacings)
171174
)
172175
grid = create_grid(
173-
self.grid_sizes, self.grid_bounds, dtype=self.grid[0].dtype, device=self.grid[0].device,
176+
self.grid_sizes,
177+
self.grid_bounds,
178+
dtype=self.grid[0].dtype,
179+
device=self.grid[0].device,
174180
)
175181
self.update_grid(grid)
176182

@@ -186,7 +192,9 @@ def forward(self, x1, x2, diag=False, last_dim_is_batch=False, **params):
186192
right_interp_indices, right_interp_values = self._compute_grid(x2, last_dim_is_batch)
187193

188194
batch_shape = _mul_broadcast_shape(
189-
base_lazy_tsr.batch_shape, left_interp_indices.shape[:-2], right_interp_indices.shape[:-2],
195+
base_lazy_tsr.batch_shape,
196+
left_interp_indices.shape[:-2],
197+
right_interp_indices.shape[:-2],
190198
)
191199
res = InterpolatedLazyTensor(
192200
base_lazy_tsr.expand(*batch_shape, *base_lazy_tsr.matrix_shape),

gpytorch/kernels/polynomial_kernel.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,11 @@ class PolynomialKernel(Kernel):
3535
"""
3636

3737
def __init__(
38-
self, power: int, offset_prior: Optional[Prior] = None, offset_constraint: Optional[Interval] = None, **kwargs,
38+
self,
39+
power: int,
40+
offset_prior: Optional[Prior] = None,
41+
offset_constraint: Optional[Interval] = None,
42+
**kwargs,
3943
):
4044
super().__init__(**kwargs)
4145
if offset_constraint is None:

gpytorch/kernels/product_structure_kernel.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,10 @@ def is_stationary(self) -> bool:
4848
return self.base_kernel.is_stationary
4949

5050
def __init__(
51-
self, base_kernel: Kernel, num_dims: int, active_dims: Optional[Tuple[int, ...]] = None,
51+
self,
52+
base_kernel: Kernel,
53+
num_dims: int,
54+
active_dims: Optional[Tuple[int, ...]] = None,
5255
):
5356
super(ProductStructureKernel, self).__init__(active_dims=active_dims)
5457
self.base_kernel = base_kernel

0 commit comments

Comments
 (0)