Skip to content

Commit c32e235

Browse files
authored
Merge pull request #133 from Climate-REF/fix-pycmec-attributes
fix the validation error when 'attributes' value is a dict
2 parents 18f92b4 + ad43a86 commit c32e235

5 files changed

Lines changed: 106 additions & 14 deletions

File tree

changelog/133.fix.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fixed the validation error when 'attributes' value is a dict

packages/ref-core/src/cmip_ref_core/pycmec/metric.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -157,18 +157,24 @@ def _check_nested_dict_keys(cls, nested: dict[Any, Any], metdims: dict[Any, Any]
157157
raise ValueError("Error in dicts of Results")
158158

159159
for key, value in nested.items():
160-
if isinstance(value, dict) and level < len(metdims[MetricCV.JSON_STRUCTURE.value]) - 1:
160+
if key == MetricCV.ATTRIBUTES.value:
161+
continue
162+
163+
elif isinstance(value, dict) and level < len(metdims[MetricCV.JSON_STRUCTURE.value]) - 1:
161164
cls._check_nested_dict_keys(value, metdims, level + 1)
162165
elif isinstance(value, dict):
163-
StrNumDict(value)
166+
tmp = dict(value)
167+
if MetricCV.ATTRIBUTES.value in tmp:
168+
tmp.pop(MetricCV.ATTRIBUTES.value)
169+
StrNumDict(tmp)
164170

165171
@field_validator("root", mode="after")
166172
@classmethod
167173
def _validate_results(cls, rlt: Any, info: ValidationInfo) -> Any:
168174
"""Validate a MetricResults object"""
169175
if not isinstance(info.context, MetricDimensions):
170176
s = "\nTo validate MetricResults object, MetricDimensions is needed,\n"
171-
s += "please use model_validate(Results, context=MetricDimensions to instantiate\n"
177+
s += "please use model_validate(Results, context=MetricDimensions) to instantiate\n"
172178
raise ValueError(s)
173179
else:
174180
# results = rlt.root
@@ -295,7 +301,11 @@ def _fill(cls, mdict: dict[Any, Any], mdims: dict[Any, Any], level: int = 0) ->
295301
mdict[key] = {}
296302

297303
for key, value in mdict.items():
298-
if isinstance(value, dict) and level < len(mdims[MetricCV.JSON_STRUCTURE.value]) - 1:
304+
if (
305+
isinstance(value, dict)
306+
and level < len(mdims[MetricCV.JSON_STRUCTURE.value]) - 1
307+
and key != MetricCV.ATTRIBUTES.value
308+
):
299309
cls._fill(value, mdims, level + 1)
300310

301311
@classmethod
@@ -321,8 +331,8 @@ def merge(cls, metric_obj1: Any, metric_obj2: Any) -> Self:
321331

322332
merged_obj_dims = MetricDimensions.merge_dimension(mobj1.DIMENSIONS, mobj2.DIMENSIONS)
323333

324-
result1 = mobj2.RESULTS
325-
result2 = mobj1.RESULTS
334+
result1 = mobj1.RESULTS
335+
result2 = mobj2.RESULTS
326336
merged_obj_rlts = cls._merge(dict(result1), result2)
327337

328338
cls._fill(merged_obj_rlts, merged_obj_dims.root)

packages/ref-core/tests/unit/pycmec/cmec_testdata/cmec_metric_sample.json

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,13 @@
3333
"Hydrology Cycle": {
3434
"overall score": 0.26,
3535
"bias": 0.70
36+
},
37+
"attributes":{
38+
"resolution": "low",
39+
"attr0": "1980-2015",
40+
"attr1":{},
41+
"attr2":[],
42+
"attr3": -999
3643
}
3744
},
3845
"CESM2": {
@@ -43,18 +50,44 @@
4350
"Hydrology Cycle": {
4451
"overall score": 0.61,
4552
"bias": 0.18
53+
},
54+
"attributes":{
55+
"resolution": "low",
56+
"attr0": "1980-2015",
57+
"attr1":{},
58+
"attr2":[],
59+
"attr3": -999
4660
}
4761
},
4862
"IPSL-CM5A-LR": {
4963
"Ecosystem and Carbon Cycle": {
5064
"overall score": 0.08,
5165
"bias": 0.92,
52-
"rmse": 0.34
66+
"rmse": 0.34,
67+
"attributes":{
68+
"overall score": "ILAMB scoring system",
69+
"bias": "mean bias",
70+
"rmse": "root mean squre error"
71+
}
5372
},
5473
"Hydrology Cycle": {
5574
"overall score": 0.67,
5675
"rmse": 0.68
76+
},
77+
"attributes":{
78+
"attr1":{
79+
"attr2":{
80+
"attr3":{}
81+
}
82+
}
5783
}
84+
},
85+
"attributes":{
86+
"attr0": "1980-2015",
87+
"attr1":{},
88+
"attr2":[],
89+
"attr3": -999,
90+
"attr4":"first level"
5891
}
5992
}
6093
}

packages/ref-core/tests/unit/pycmec/test_cmec_metric.py

Lines changed: 54 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -198,18 +198,50 @@ def test_metric_merge():
198198
"GFDL-ESM2M": {
199199
"NinoSstDiversity_2": -75,
200200
"BiasTauxLonRmse": 5.959564210237568,
201-
}
201+
"attributes": {
202+
"NinoSstDiversity_2": "Nino SST diversity",
203+
"BiasTauxLonRmse": "Bias Taux RMSE",
204+
},
205+
},
206+
"attributes": {
207+
"package": "pmp",
208+
"NinoSstDiversity_2": {},
209+
},
202210
},
203211
}
204212
dict_ilamb = {
205213
"DIMENSIONS": {
206214
"json_structure": ["model", "metric"],
207-
"model": {"E3SM": {"name": "E3SM"}, "CESM": {"name": "CESM"}},
215+
"model": {
216+
"E3SM": {"name": "E3SM"},
217+
"CESM": {"name": "CESM"},
218+
"GFDL-ESM2M": {"name": "GFDL-ESM2M"},
219+
},
208220
"metric": {"carbon": {"name": "carbon"}},
209221
},
210222
"RESULTS": {
211-
"E3SM": {"carbon": {"overall score": 0.11, "bias": 0.56}},
212-
"CESM": {"carbon": {"overall score": 0.05, "bias": 0.72}},
223+
"E3SM": {
224+
"carbon": {
225+
"overall score": 0.11,
226+
"bias": 0.56,
227+
"attributes": {
228+
"score": "ILAMB scoring system",
229+
},
230+
},
231+
},
232+
"CESM": {
233+
"carbon": {"overall score": 0.05, "bias": 0.72},
234+
},
235+
"GFDL-ESM2M": {
236+
"carbon": {"overall score": 0.35, "bias": 0.37},
237+
"attributes": {
238+
"score": "ILAMB scoring system",
239+
},
240+
},
241+
"attributes": {
242+
"package": "ilamb",
243+
"overall score": {},
244+
},
213245
},
214246
}
215247

@@ -229,7 +261,13 @@ def test_metric_merge():
229261
},
230262
"RESULTS": {
231263
"E3SM": {
232-
"carbon": {"overall score": 0.11, "bias": 0.56},
264+
"carbon": {
265+
"overall score": 0.11,
266+
"bias": 0.56,
267+
"attributes": {
268+
"score": "ILAMB scoring system",
269+
},
270+
},
233271
"NinoSstDiversity_2": {},
234272
"BiasTauxLonRmse": {},
235273
},
@@ -241,7 +279,17 @@ def test_metric_merge():
241279
"GFDL-ESM2M": {
242280
"NinoSstDiversity_2": -75,
243281
"BiasTauxLonRmse": 5.959564210237568,
244-
"carbon": {},
282+
"attributes": {
283+
"NinoSstDiversity_2": "Nino SST diversity",
284+
"BiasTauxLonRmse": "Bias Taux RMSE",
285+
"score": "ILAMB scoring system",
286+
},
287+
"carbon": {"overall score": 0.35, "bias": 0.37},
288+
},
289+
"attributes": {
290+
"package": "ilamb",
291+
"NinoSstDiversity_2": {},
292+
"overall score": {},
245293
},
246294
},
247295
"PROVENANCE": None,

packages/ref-metrics-pmp/src/cmip_ref_metrics_pmp/pmp_driver.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def process_json_result(
8787

8888
# Remove the "attributes" key from the RESULTS
8989
# This isn't standard CMEC output, but it is what PMP produces
90-
results = _remove_nested_key(json_result["RESULTS"], "attributes")
90+
results = json_result["RESULTS"]
9191

9292
cmec_metric["RESULTS"] = results
9393
cmec_metric["DIMENSIONS"] = dimensions

0 commit comments

Comments
 (0)