Skip to content

Commit a44f7ba

Browse files
committed
dev
1 parent 15f4541 commit a44f7ba

File tree

8 files changed

+72
-59
lines changed

8 files changed

+72
-59
lines changed

cf/data/dask_regrid.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Regridding functions used within a dask graph."""
22

33
import numpy as np
4-
from cfdm.data.dask_utils import cfdm_asanyarray
4+
from cfdm.data.dask_utils import cfdm_to_memory
55

66

77
def regrid(
@@ -175,12 +175,12 @@ def regrid(
175175
"""
176176
weights, dst_mask = weights_dst_mask
177177

178-
a = cfdm_asanyarray(a)
178+
a = cfdm_to_memory(a)
179179
if dst_mask is not None:
180-
dst_mask = cfdm_asanyarray(dst_mask)
180+
dst_mask = cfdm_to_memory(dst_mask)
181181

182182
if ref_src_mask is not None:
183-
ref_src_mask = cfdm_asanyarray(ref_src_mask)
183+
ref_src_mask = cfdm_to_memory(ref_src_mask)
184184

185185
# ----------------------------------------------------------------
186186
# Reshape the array into a form suitable for the regridding dot

cf/data/data.py

Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
Importre logging
1+
import logging
22
import math
33
from functools import partial, reduce
44
from itertools import product
@@ -456,12 +456,12 @@ def __contains__(self, value):
456456
return False
457457

458458
# 'cf_contains' has its own calls to 'cfdm_to_memory', so
459-
# we can set '_force_in_memory=False'.
460-
value = value.to_dask_array(_force_in_memory=False)
459+
# we can set '_force_to_memory=False'.
460+
value = value.to_dask_array(_force_to_memory=False)
461461

462462
# 'cf_contains' has its own calls to 'cfdm_to_memory', so we
463-
# can set '_force_in_memory=False'.
464-
dx = self.to_dask_array(_force_in_memory=False)
463+
# can set '_force_to_memory=False'.
464+
dx = self.to_dask_array(_force_to_memory=False)
465465

466466
out_ind = tuple(range(dx.ndim))
467467
dx_ind = out_ind
@@ -1567,8 +1567,8 @@ def percentile(
15671567
axes = tuple(sorted(d._parse_axes(axes)))
15681568

15691569
# 'cf_percentile' has its own call to 'cfdm_to_memory', so we
1570-
# can set '_force_in_memory=False'.
1571-
dx = d.to_dask_array(_force_in_memory=False)
1570+
# can set '_force_to_memory=False'.
1571+
dx = d.to_dask_array(_force_to_memory=False)
15721572
dtype = dx.dtype
15731573
shape = dx.shape
15741574

@@ -2129,8 +2129,8 @@ def _asdatetime(self, inplace=False):
21292129

21302130
if not d._isdatetime():
21312131
# 'cf_rt2dt' has its own call to 'cfdm_to_memory', so we
2132-
# can set '_force_in_memory=False'.
2133-
dx = d.to_dask_array(_force_in_memory=False)
2132+
# can set '_force_to_memory=False'.
2133+
dx = d.to_dask_array(_force_to_memory=False)
21342134
dx = dx.map_blocks(cf_rt2dt, units=units, dtype=object)
21352135
d._set_dask(dx)
21362136

@@ -2186,8 +2186,8 @@ def _asreftime(self, inplace=False):
21862186

21872187
if d._isdatetime():
21882188
# 'cf_dt2rt' has its own call to 'cfdm_to_memory', so we
2189-
# can set '_force_in_memory=False'.
2190-
dx = d.to_dask_array(_force_in_memory=False)
2189+
# can set '_force_to_memory=False'.
2190+
dx = d.to_dask_array(_force_to_memory=False)
21912191
dx = dx.map_blocks(cf_dt2rt, units=units, dtype=float)
21922192
d._set_dask(dx)
21932193

@@ -2784,8 +2784,8 @@ def _regrid(
27842784
)
27852785

27862786
# 'regrid' has its own calls to 'cfdm_to_memory', so we can set
2787-
# '_force_in_memory=False'.
2788-
dx = self.to_dask_array(_force_in_memory=False)
2787+
# '_force_to_memory=False'.
2788+
dx = self.to_dask_array(_force_to_memory=False)
27892789

27902790
# Rechunk so that each chunk contains data in the form
27912791
# expected by the regrid operator, i.e. the regrid axes all
@@ -3028,10 +3028,10 @@ def concatenate(
30283028
copied = not copy # to avoid making two copies in a given case
30293029

30303030
# Get data as dask arrays and apply concatenation
3031-
# operation. We can set '_force_in_memory=False' because at compute
3031+
# operation. We can set '_force_to_memory=False' because at compute
30323032
# time the concatenation operation does not need to access the
30333033
# actual data.
3034-
dxs = [d.to_dask_array(_force_in_memory=False) for d in processed_data]
3034+
dxs = [d.to_dask_array(_force_to_memory=False) for d in processed_data]
30353035
dx = da.concatenate(dxs, axis=axis)
30363036

30373037
# Set the CFA write status
@@ -3066,7 +3066,7 @@ def concatenate(
30663066
# If and only if any two input Data objects have
30673067
# different __in_memory__ values, then set
30683068
# in_memory=False on the concatenation.
3069-
in_memory= False
3069+
in_memory = False
30703070
break
30713071

30723072
# Set the new dask array
@@ -3464,8 +3464,8 @@ def Units(self, value):
34643464
cf_func = partial(cf_units, from_units=old_units, to_units=value)
34653465

34663466
# 'cf_units' has its own call to 'cfdm_to_memory', so we
3467-
# can set '_force_in_memory=False'.
3468-
dx = self.to_dask_array(_force_in_memory=False)
3467+
# can set '_force_to_memory=False'.
3468+
dx = self.to_dask_array(_force_to_memory=False)
34693469
dx = dx.map_blocks(cf_func, dtype=dtype)
34703470

34713471
# Setting equivalent units doesn't affect the CFA write
@@ -3508,8 +3508,8 @@ def is_masked(self):
35083508
35093509
"""
35103510
# 'cf_is_masked' has its own call to 'cfdm_to_memory', so we
3511-
# can set '_force_in_memory=False'.
3512-
dx = self.to_dask_array(_force_in_memory=False)
3511+
# can set '_force_to_memory=False'.
3512+
dx = self.to_dask_array(_force_to_memory=False)
35133513

35143514
out_ind = tuple(range(dx.ndim))
35153515
dx_ind = out_ind
@@ -4215,8 +4215,8 @@ def convert_reference_time(
42154215
d.Units = units0
42164216

42174217
# 'cf_rt2dt' its own call to 'cfdm_to_memory', so we can set
4218-
# '_force_in_memory=False'.
4219-
dx = d.to_dask_array(_force_in_memory=False)
4218+
# '_force_to_memory=False'.
4219+
dx = d.to_dask_array(_force_to_memory=False)
42204220

42214221
# Convert to the correct date-time objects
42224222
dx = dx.map_blocks(cf_rt2dt, units=units0, dtype=object)
@@ -4283,10 +4283,10 @@ def get_deterministic_name(self):
42834283
units = self._Units
42844284

42854285
# The dask graph is never going to be computed, so we can set
4286-
# '_force_in_memory=False'.
4286+
# '_force_to_memory=False'.
42874287
return tokenize(
42884288
self.to_dask_array(
4289-
_apply_mask_hardness=False, _force_in_memory=False
4289+
_force_mask_hardness=False, _force_to_memory=False
42904290
).name,
42914291
units.formatted(definition=True, names=True),
42924292
units._canonical_calendar,
@@ -4324,8 +4324,8 @@ def add_file_location(self, location):
43244324
updated = False
43254325

43264326
# The dask graph is never going to be computed, so we can set
4327-
# '_force_in_memory=False'.
4328-
dsk = self.todict(_force_in_memory=False)
4327+
# '_force_to_memory=False'.
4328+
dsk = self.todict(_force_to_memory=False)
43294329
for key, a in dsk.items():
43304330
try:
43314331
dsk[key] = a.add_file_location(location)
@@ -4338,7 +4338,7 @@ def add_file_location(self, location):
43384338
updated = True
43394339

43404340
if updated:
4341-
dx = self.to_dask_array(_force_in_memory=False)
4341+
dx = self.to_dask_array(_force_to_memory=False)
43424342
dx = da.Array(dsk, dx.name, dx.chunks, dx.dtype, dx._meta)
43434343
self._set_dask(dx, clear=self._NONE, in_memory=None)
43444344

@@ -5725,8 +5725,8 @@ def unique(self, split_every=None):
57255725
d.soften_mask()
57265726

57275727
# The applicable chunk function will have its own call to
5728-
# 'cfdm_to_memory', so we can set '_force_in_memory=False'.
5729-
dx = d.to_dask_array(_force_in_memory=False)
5728+
# 'cfdm_to_memory', so we can set '_force_to_memory=False'.
5729+
dx = d.to_dask_array(_force_to_memory=False)
57305730
dx = Collapse().unique(dx, split_every=split_every)
57315731

57325732
d._set_dask(dx)
@@ -6240,8 +6240,8 @@ def file_locations(self):
62406240
out = set()
62416241

62426242
# The dask graph is never going to be computed, so we can set
6243-
# '_force_in_memory=False'.
6244-
for key, a in self.todict(_force_in_memory=False).items():
6243+
# '_force_to_memory=False'.
6244+
for key, a in self.todict(_force_to_memory=False).items():
62456245
try:
62466246
out.update(a.file_locations())
62476247
except AttributeError:
@@ -6801,8 +6801,8 @@ def del_file_location(self, location):
68016801
updated = False
68026802

68036803
# The dask graph is never going to be computed, so we can set
6804-
# '_force_in_memory=False'.
6805-
dsk = self.todict(_force_in_memory=False)
6804+
# '_force_to_memory=False'.
6805+
dsk = self.todict(_force_to_memory=False)
68066806
for key, a in dsk.items():
68076807
try:
68086808
dsk[key] = a.del_file_location(location)
@@ -6815,7 +6815,7 @@ def del_file_location(self, location):
68156815
updated = True
68166816

68176817
if updated:
6818-
dx = self.to_dask_array(_force_in_memory=False)
6818+
dx = self.to_dask_array(_force_to_memory=False)
68196819
dx = da.Array(dsk, dx.name, dx.chunks, dx.dtype, dx._meta)
68206820
self._set_dask(dx, clear=self._NONE, in_memory=None)
68216821

@@ -7852,8 +7852,8 @@ def where(
78527852
# hardness has been applied.
78537853
#
78547854
# 'cf_where' has its own calls to 'cfdm_to_memory', so we can
7855-
# set '_force_in_memory=False'.
7856-
dx = d.to_dask_array(_force_in_memory=False)
7855+
# set '_force_to_memory=False'.
7856+
dx = d.to_dask_array(_force_to_memory=False)
78577857

78587858
units = d.Units
78597859

@@ -7869,8 +7869,8 @@ def where(
78697869
condition = type(self).asdata(condition)
78707870
condition = where_broadcastable(d, condition, "condition")
78717871
# 'cf_where' has its own calls to 'cfdm_to_memory', so we can
7872-
# set '_force_in_memory=False'.
7873-
condition = condition.to_dask_array(_force_in_memory=False)
7872+
# set '_force_to_memory=False'.
7873+
condition = condition.to_dask_array(_force_to_memory=False)
78747874

78757875
# If x or y is self then change it to None. This prevents an
78767876
# unnecessary copy; and, at compute time, an unncessary numpy

cf/data/utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -417,10 +417,10 @@ def collapse(
417417
kwargs["ddof"] = ddof
418418

419419
# The applicable chunk function will have its own call to
420-
# 'cfdm_to_memory', so we can set '_force_in_memory=False'. Also,
421-
# setting _force_in_memory=False will ensure that any active
420+
# 'cfdm_to_memory', so we can set '_force_to_memory=False'. Also,
421+
# setting _force_to_memory=False will ensure that any active
422422
# storage operations are not compromised.
423-
dx = d.to_dask_array(_force_in_memory=False)
423+
dx = d.to_dask_array(_force_to_memory=False)
424424
dx = func(dx, **kwargs)
425425
d._set_dask(dx)
426426

cf/read_write/netcdf/netcdfwrite.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -748,8 +748,8 @@ def _cfa_write_non_standard_terms(
748748
# missing data.
749749
#
750750
# '_cfa_unique' has its own call to 'cfdm_to_memory', so
751-
# we can set '_asanyarray=False'.
752-
dx = data.to_dask_array(_asanyarray=False)
751+
# we can set '_force_to_memory=False'.
752+
dx = data.to_dask_array(_force_to_memory=False)
753753
dx_ind = tuple(range(dx.ndim))
754754
out_ind = dx_ind
755755
dx = da.blockwise(
@@ -963,7 +963,7 @@ def _cfa_aggregation_instructions(self, data, cfvar):
963963
# ------------------------------------------------------------
964964
dtype = np.dtype(np.int32)
965965
if (
966-
max(data.to_dask_array(_asanyarray=False).chunksize)
966+
max(data.to_dask_array(_force_to_memory=False).chunksize)
967967
> np.iinfo(dtype).max
968968
):
969969
dtype = np.dtype(np.int64)

cf/test/individual_tests.sh

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,6 @@ do
55
echo "Running $file"
66
python $file
77
rc=$?
8-
# if [[ $rc != 0 ]]; then
9-
# exit $rc
10-
# fi
118
done
129

1310
file=setup_create_field.py
@@ -18,14 +15,30 @@ if [[ $rc != 0 ]]; then
1815
exit $rc
1916
fi
2017

18+
style="lots"
19+
2120
for file in test_*.py
2221
do
2322
echo "Running $file"
2423
python $file
2524
rc=$?
2625
if [[ $rc != 0 ]]; then
27-
exit $rc
28-
# echo -e "\n\n$file FAILED \n\n"
26+
if [[ "$file" == "test_style.py" ]] ; then
27+
style="none"
28+
else
29+
exit $rc
30+
# echo -e "\n\n$file FAILED \n\n"
31+
fi
2932
fi
3033
done
3134

35+
echo
36+
if [[ "$style" == "none" ]] ; then
37+
echo "------------------------------------------"
38+
echo "All tests passed, APART FROM test_style.py"
39+
echo "------------------------------------------"
40+
else
41+
echo "================"
42+
echo "All tests passed"
43+
echo "================"
44+
fi

cf/test/test_Data.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4540,7 +4540,7 @@ def test_Data_cull_graph(self):
45404540
len(
45414541
dict(
45424542
d.to_dask_array(
4543-
_apply_mask_hardness=False, _asanyarray=False
4543+
_force_mask_hardness=False, _force_to_memory=False
45444544
).dask
45454545
)
45464546
),
@@ -4553,7 +4553,7 @@ def test_Data_cull_graph(self):
45534553
len(
45544554
dict(
45554555
d.to_dask_array(
4556-
_apply_mask_hardness=False, _asanyarray=False
4556+
_force_mask_hardness=False, _force_to_memory=False
45574557
).dask
45584558
)
45594559
),
@@ -4725,7 +4725,7 @@ def test_Data_file_location(self):
47254725
def test_Data_todict(self):
47264726
"""Test Data.todict."""
47274727
d = cf.Data([1, 2, 3, 4], chunks=2)
4728-
key = d.to_dask_array(_apply_mask_hardness=False).name
4728+
key = d.to_dask_array(_force_mask_hardness=False).name
47294729

47304730
x = d.todict()
47314731
self.assertIsInstance(x, dict)

cf/test/test_DimensionCoordinate.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -614,7 +614,7 @@ def test_DimensiconCoordinate_persist(self):
614614
self.assertEqual(
615615
len(
616616
e.data.to_dask_array(
617-
_apply_mask_hardness=False, _asanyarray=False
617+
_force_mask_hardness=False, _force_to_memory=False
618618
).dask.layers
619619
),
620620
1,

cf/test/test_Field.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2767,7 +2767,7 @@ def test_Field_persist(self):
27672767
self.assertGreater(
27682768
len(
27692769
f.data.to_dask_array(
2770-
_apply_mask_hardness=False, _asanyarray=False
2770+
_force_mask_hardness=False, _force_to_memory=False
27712771
).dask.layers
27722772
),
27732773
2,
@@ -2778,7 +2778,7 @@ def test_Field_persist(self):
27782778
self.assertEqual(
27792779
len(
27802780
g.data.to_dask_array(
2781-
_apply_mask_hardness=False, _asanyarray=False
2781+
_force_mask_hardness=False, _force_to_memory=False
27822782
).dask.layers
27832783
),
27842784
1,

0 commit comments

Comments
 (0)