Skip to content

Commit 3ae569f

Browse files
authored
Update the new 2022 black syntax (#360)
Black is now out of beta and has introduced a small change in the syntax, removing white space around `**` operators. They also now promise to only update syntax like this at most once a year. Seems reasonable and not a lot of work to keep up to date without pinning black.
1 parent 18c8166 commit 3ae569f

File tree

12 files changed

+20
-20
lines changed

12 files changed

+20
-20
lines changed

doc/gallery_src/blockreduce_weights_mean.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
# It's important that the weights are given as 1/sigma**2 for the uncertainty
5050
# propagation. In this case, you should not use verde.variance_to_weights
5151
# because it would normalize the weights.
52-
weights = 1 / data.std_up ** 2
52+
weights = 1 / data.std_up**2
5353
reducer = vd.BlockMean(spacing, center_coordinates=True)
5454
# First produce the weighted variance weights
5555
variance_weights = reducer.filter(coordinates, data.velocity_up, weights)[-1]

doc/gallery_src/spline_weights.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
train, test = vd.train_test_split(
4848
projection(*coordinates),
4949
data.velocity_up,
50-
weights=1 / data.std_up ** 2,
50+
weights=1 / data.std_up**2,
5151
random_state=0,
5252
)
5353
# Fit the model on the training set

doc/gallery_src/vector_trend.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
# [Trend(...)]*2 as an argument to Vector. This creates references to the same
3232
# Trend instance and will mess up the fitting.
3333
trend = vd.Vector([vd.Trend(degree=2) for i in range(2)])
34-
weights = vd.variance_to_weights((data.std_east ** 2, data.std_north ** 2))
34+
weights = vd.variance_to_weights((data.std_east**2, data.std_north**2))
3535
trend.fit(
3636
coordinates=(data.longitude, data.latitude),
3737
data=(data.velocity_east, data.velocity_north),

doc/tutorials_src/vectors.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@
6969
block_coords, block_data, block_weights = reducer.filter(
7070
coordinates=proj_coords,
7171
data=(data.velocity_east, data.velocity_north),
72-
weights=(1 / data.std_east ** 2, 1 / data.std_north ** 2),
72+
weights=(1 / data.std_east**2, 1 / data.std_north**2),
7373
)
7474
print(len(block_data), len(block_weights))
7575

@@ -124,7 +124,7 @@
124124
trend.fit(
125125
coordinates=proj_coords,
126126
data=(data.velocity_east, data.velocity_north),
127-
weights=(1 / data.std_east ** 2, 1 / data.std_north ** 2),
127+
weights=(1 / data.std_east**2, 1 / data.std_north**2),
128128
)
129129

130130
###############################################################################
@@ -213,7 +213,7 @@
213213
train, test = vd.train_test_split(
214214
coordinates=proj_coords,
215215
data=(data.velocity_east, data.velocity_north),
216-
weights=(1 / data.std_east ** 2, 1 / data.std_north ** 2),
216+
weights=(1 / data.std_east**2, 1 / data.std_north**2),
217217
random_state=1,
218218
)
219219

doc/tutorials_src/weights.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def plot_data(coordinates, velocity, weights, title_data, title_weights):
153153
# weights.
154154

155155
# We'll use 1 over the squared data uncertainty as our input weights.
156-
data["weights"] = 1 / data.std_up ** 2
156+
data["weights"] = 1 / data.std_up**2
157157

158158
# By default, BlockMean assumes that weights are not related to uncertainties
159159
coordinates, velocity, weights = mean.filter(

verde/spline.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -509,11 +509,11 @@ def warn_weighted_exact_solution(spline, weights):
509509

510510
def greens_func(east, north, mindist):
511511
"Calculate the Green's function for the Bi-Harmonic Spline"
512-
distance = np.sqrt(east ** 2 + north ** 2)
512+
distance = np.sqrt(east**2 + north**2)
513513
# The mindist factor helps avoid singular matrices when the force and
514514
# computation point are too close
515515
distance += mindist
516-
return (distance ** 2) * (np.log(distance) - 1)
516+
return (distance**2) * (np.log(distance) - 1)
517517

518518

519519
def predict_numpy(east, north, force_east, force_north, mindist, forces, result):

verde/tests/test_blockreduce.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ def test_blockmean_uncertainty_weights():
244244
uncertainty = np.ones_like(data)
245245
# Set a higher uncertainty for the first block
246246
uncertainty[:2, :2] = 2
247-
weights = 1 / uncertainty ** 2
247+
weights = 1 / uncertainty**2
248248
reducer = BlockMean(spacing=1, uncertainty=True)
249249
# Uncertainty propagation can only work if weights are given
250250
with pytest.raises(ValueError):
@@ -269,7 +269,7 @@ def test_blockmean_variance_weights():
269269
uncertainty = np.ones_like(data)
270270
# Set a higher uncertainty for the first block
271271
uncertainty[:2, :2] = 2
272-
weights = 1 / uncertainty ** 2
272+
weights = 1 / uncertainty**2
273273
reducer = BlockMean(spacing=1, uncertainty=False)
274274
block_coords, block_data, block_weights = reducer.filter(coords, data, weights)
275275
assert len(block_coords[0]) == 4

verde/tests/test_minimal.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def test_minimal_integration_2d_gps():
3636
train, test = train_test_split(
3737
coordinates=proj_coords,
3838
data=(data.velocity_east, data.velocity_north),
39-
weights=(1 / data.std_east ** 2, 1 / data.std_north ** 2),
39+
weights=(1 / data.std_east**2, 1 / data.std_north**2),
4040
random_state=1,
4141
)
4242
chain = Chain(

verde/tests/test_projections.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
def projection(longitude, latitude):
2020
"Dummy projection"
21-
return longitude ** 2, latitude ** 2
21+
return longitude**2, latitude**2
2222

2323

2424
@pytest.mark.parametrize(

verde/tests/test_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ def test_kdtree():
7474
def test_grid_to_table_order():
7575
"Check that coordinates are in the right order when converting to tables"
7676
lon, lat = grid_coordinates(region=(1, 10, -10, -1), shape=(3, 4))
77-
data = lon ** 2
77+
data = lon**2
7878
# If the DataArray is created with coords in an order that doesn't match
7979
# the dims (which is valid), we were getting it wrong because we were
8080
# relying on the order of the coords instead of dims. This test would have

0 commit comments

Comments
 (0)