Skip to content

Commit f19c08b

Browse files
Feature: support fans v0.4 (#33)
* bump fans version * remove pointless entry-points * def input generator tool * use input generator tool * change macroscale_loading type to List * code cleanup * filename -> filepath * move results input to metadata.options and add results_prefix input under metadata.options * satisfy ruff
1 parent dd85e04 commit f19c08b

File tree

5 files changed

+67
-84
lines changed

5 files changed

+67
-84
lines changed

pyproject.toml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,15 @@ dependencies = [
2323
]
2424

2525
# Entry Points
26-
[project.entry-points."aiida.data"]
27-
"fans" = "aiida_fans.data:FANSParameters"
26+
# [project.entry-points."aiida.data"]
27+
# "fans" = "aiida_fans.data:FANSParameters"
2828
[project.entry-points."aiida.calculations"]
2929
"fans.stashed" = "aiida_fans.calculations:FansStashedCalculation"
3030
"fans.fragmented" = "aiida_fans.calculations:FansFragmentedCalculation"
3131
[project.entry-points."aiida.parsers"]
3232
"fans" = "aiida_fans.parsers:FansParser"
33-
[project.entry-points."aiida.cmdline.data"]
34-
"fans" = "aiida_fans.cli:data_cli"
33+
# [project.entry-points."aiida.cmdline.data"]
34+
# "fans" = "aiida_fans.cli:data_cli"
3535

3636
# Build System
3737
[build-system]
@@ -64,7 +64,7 @@ dependencies = {aiida-fans = "==0.1.5"}
6464
# [tool.pixi.feature.aiida]
6565
# dependencies = {aiida-core = "2.6.*"}
6666
[tool.pixi.feature.fans]
67-
dependencies = {fans = "0.3.*"}
67+
dependencies = {fans = "0.4.*"}
6868
[tool.pixi.feature.ruff]
6969
dependencies = {ruff = "*"}
7070
tasks = {fmt = "ruff check", dummy = "echo dummy", my-dummy="echo my-dummy"}

src/aiida_fans/calculations.py

Lines changed: 13 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,11 @@
88
from aiida.common.folders import Folder
99
from aiida.engine import CalcJob
1010
from aiida.engine.processes.process_spec import CalcJobProcessSpec
11-
from aiida.orm import ArrayData, Dict, Float, Int, List, SinglefileData, Str
11+
from aiida.orm import Dict, Float, Int, List, SinglefileData, Str
1212
from h5py import File as h5File
1313

14+
from aiida_fans.helpers import make_input_dict
15+
1416

1517
class FansCalcBase(CalcJob):
1618
"""Base class of all calculations using FANS."""
@@ -20,9 +22,8 @@ def define(cls, spec: CalcJobProcessSpec) -> None:
2022
"""Define inputs, outputs, and exit codes of the calculation."""
2123
super().define(spec)
2224

23-
# Metadata
25+
# Default Metadata
2426
spec.inputs["metadata"]["label"].default = "FANS"
25-
# spec.inputs["metadata"]["dry_run"].default = True
2627
## Processing Power
2728
spec.inputs["metadata"]["options"]["withmpi"].default = True
2829
spec.inputs["metadata"]["options"]["resources"].default = {
@@ -35,6 +36,10 @@ def define(cls, spec: CalcJobProcessSpec) -> None:
3536
## Parser
3637
spec.inputs["metadata"]["options"]["parser_name"].default = "fans"
3738

39+
# Custom Metadata
40+
spec.input("metadata.options.results_prefix", valid_type=str, default="")
41+
spec.input("metadata.options.results", valid_type=list, default=[])
42+
3843
# Input Ports
3944
## Microstructure Definition
4045
spec.input_namespace("microstructure")
@@ -53,9 +58,7 @@ def define(cls, spec: CalcJobProcessSpec) -> None:
5358
spec.input("error_parameters.type", valid_type=Str)
5459
spec.input("error_parameters.tolerance", valid_type=Float)
5560
## Macroscale Loading Conditions
56-
spec.input("macroscale_loading", valid_type=ArrayData)
57-
## Results Specification
58-
spec.input("results", valid_type=List)
61+
spec.input("macroscale_loading", valid_type=List)
5962

6063
# Output Ports
6164
spec.output("output", valid_type=SinglefileData)
@@ -107,28 +110,8 @@ def prepare_for_submission(self, folder: Folder) -> CalcInfo:
107110
copyfileobj(source, target)
108111

109112
# input.json as dict
110-
input_dict = {
111-
## Microstructure Definition
112-
"ms_filename": str(ms_filepath), # path to stashed microstructure
113-
"ms_datasetname": self.inputs.microstructure.datasetname.value,
114-
"ms_L": self.inputs.microstructure.L.get_list(),
115-
## Problem Type and Material Model
116-
"problem_type": self.inputs.problem_type.value,
117-
"matmodel": self.inputs.matmodel.value,
118-
"material_properties": self.inputs.material_properties.get_dict(),
119-
## Solver Settings
120-
"method": self.inputs.method.value,
121-
"n_it": self.inputs.n_it.value,
122-
"error_parameters": {
123-
"measure": self.inputs.error_parameters.measure.value,
124-
"type": self.inputs.error_parameters.type.value,
125-
"tolerance": self.inputs.error_parameters.tolerance.value
126-
},
127-
## Macroscale Loading Conditions
128-
"macroscale_loading": [a[1].tolist() for a in self.inputs.macroscale_loading.get_iterarrays()],
129-
## Results Specification
130-
"results": self.inputs.results.get_list()
131-
}
113+
input_dict = make_input_dict(self)
114+
input_dict["microstructure"]["filepath"] = str(ms_filepath)
132115
# write input.json to working directory
133116
with folder.open(self.options.input_filename, "w", "utf8") as json:
134117
dump(input_dict, json, indent=4)
@@ -154,28 +137,8 @@ def prepare_for_submission(self, folder: Folder) -> CalcInfo:
154137
h5_src.copy(datasetname, h5_dest, name=datasetname)
155138

156139
# input.json as dict
157-
input_dict = {
158-
## Microstructure Definition
159-
"ms_filename": "microstructure.h5", # path to fragmented microstructure
160-
"ms_datasetname": self.inputs.microstructure.datasetname.value,
161-
"ms_L": self.inputs.microstructure.L.get_list(),
162-
## Problem Type and Material Model
163-
"problem_type": self.inputs.problem_type.value,
164-
"matmodel": self.inputs.matmodel.value,
165-
"material_properties": self.inputs.material_properties.get_dict(),
166-
## Solver Settings
167-
"method": self.inputs.method.value,
168-
"n_it": self.inputs.n_it.value,
169-
"error_parameters": {
170-
"measure": self.inputs.error_parameters.measure.value,
171-
"type": self.inputs.error_parameters.type.value,
172-
"tolerance": self.inputs.error_parameters.tolerance.value
173-
},
174-
## Macroscale Loading Conditions
175-
"macroscale_loading": [a[1].tolist() for a in self.inputs.macroscale_loading.get_iterarrays()],
176-
## Results Specification
177-
"results": self.inputs.results.get_list()
178-
}
140+
input_dict = make_input_dict(self)
141+
input_dict["microstructure"]["filepath"] = "microstructure.h5"
179142
# write input.json to working directory
180143
with folder.open(self.options.input_filename, "w", "utf8") as json:
181144
dump(input_dict, json, indent=4)

src/aiida_fans/helpers.py

Lines changed: 30 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,38 @@
1-
"""Tools required by aiida_fans."""
1+
"""Tools required by aiida-fans."""
22

3-
import json
3+
from typing import Any
44

5-
from aiida.orm import ArrayData, Dict, Float, Int, List, SinglefileData, Str
5+
from aiida.engine import CalcJob
66
from numpy import allclose, ndarray
77

88

9-
class InputEncoder(json.JSONEncoder):
10-
"""Prepares a dictionary of calcjob inputs for json representation."""
11-
12-
def default(self, obj):
13-
"""Converts aiida datatypes to their python counterparts."""
14-
match obj:
15-
case Str() | Int() | Float():
16-
return obj.value
17-
case List():
18-
return obj.get_list()
19-
case Dict():
20-
return obj.get_dict()
21-
case ArrayData():
22-
return [a[1].tolist() for a in obj.get_iterarrays()] #! Caution: may be disordered
23-
case SinglefileData():
24-
return obj.filename
25-
case _:
26-
# Let the base class default method raise the TypeError
27-
return super().default(obj)
9+
def make_input_dict(job: CalcJob) -> dict[str, Any]:
10+
"""Prepares a dictionary that maps to an input.json from calcjob inputs."""
11+
return {
12+
## Microstructure Definition
13+
"microstructure": {
14+
"filepath": None, # path to stashed microstructure, must be overwritten by impl
15+
"datasetname": job.inputs.microstructure.datasetname.value,
16+
"L": job.inputs.microstructure.L.get_list()
17+
},
18+
"results_prefix": job.inputs.metadata.options.results_prefix,
19+
## Problem Type and Material Model
20+
"problem_type": job.inputs.problem_type.value,
21+
"matmodel": job.inputs.matmodel.value,
22+
"material_properties": job.inputs.material_properties.get_dict(),
23+
## Solver Settings
24+
"method": job.inputs.method.value,
25+
"n_it": job.inputs.n_it.value,
26+
"error_parameters": {
27+
"measure": job.inputs.error_parameters.measure.value,
28+
"type": job.inputs.error_parameters.type.value,
29+
"tolerance": job.inputs.error_parameters.tolerance.value
30+
},
31+
## Macroscale Loading Conditions
32+
"macroscale_loading": job.inputs.macroscale_loading.get_list(),
33+
## Results Specification
34+
"results": job.inputs.metadata.options.results
35+
}
2836

2937
def arraydata_equal(first: dict[str, ndarray], second: dict[str, ndarray]) -> bool:
3038
"""Return whether two dicts of arrays are roughly equal."""

src/aiida_fans/parsers.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,11 @@ def parse(self, **kwargs) -> ExitCode | None:
2626
return self.exit_codes.ERROR_MISSING_OUTPUT
2727

2828
with h5File(output_path) as h5:
29-
results = h5[self.node.inputs.microstructure.datasetname.value]
29+
results = h5[
30+
self.node.inputs.microstructure.datasetname.value + \
31+
"_results/" + \
32+
self.node.get_option('results_prefix')
33+
]
3034
results.visititems(self.parse_h5)
3135

3236
if self.results_dict:

src/aiida_fans/utils.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from aiida_fans.helpers import arraydata_equal
1111

1212

13-
def aiida_type(value : Any) -> type[Data]:
13+
def aiida_type(value: Any) -> type[Data]:
1414
"""Find the corresponding AiiDA datatype for a variable with pythonic type.
1515
1616
Args:
@@ -39,7 +39,7 @@ def aiida_type(value : Any) -> type[Data]:
3939
case _:
4040
raise NotImplementedError(f"Received an input of value: {value} with type: {type(value)}")
4141

42-
def fetch(label : str, value : Any) -> list[Node]:
42+
def fetch(label: str, value: Any) -> list[Node]:
4343
"""Return a list of nodes matching the label and value provided.
4444
4545
Args:
@@ -61,12 +61,16 @@ def fetch(label : str, value : Any) -> list[Node]:
6161
else:
6262
array_nodes = []
6363
for array_node in nodes:
64-
array_value = {k:v for k, v in [(name, array_node.get_array(name)) for name in array_node.get_arraynames()]}
64+
array_value = {
65+
k: v for k, v in [
66+
(name, array_node.get_array(name)) for name in array_node.get_arraynames() # type: ignore
67+
]
68+
}
6569
if arraydata_equal(value, array_value):
6670
array_nodes.append(array_node)
6771
return array_nodes
6872

69-
def generate(label : str, value : Any) -> Node:
73+
def generate(label: str, value: Any) -> Node:
7074
"""Return a single node with the label and value provided.
7175
7276
Uses an existing node when possible, but otherwise creates one instead.
@@ -89,7 +93,7 @@ def generate(label : str, value : Any) -> Node:
8993
else:
9094
raise RuntimeError
9195

92-
def convert(ins : dict[str, Any], path : list[str] = []):
96+
def convert(ins: dict[str, Any], path: list[str] = []):
9397
"""Takes a dictionary of inputs and converts the values to their respective Nodes.
9498
9599
Args:
@@ -104,7 +108,7 @@ def convert(ins : dict[str, Any], path : list[str] = []):
104108
else:
105109
ins[k] = generate(".".join([*path, k]), v)
106110

107-
def compile_query(ins : dict[str,Any], qb : QueryBuilder) -> None:
111+
def compile_query(ins: dict[str,Any], qb: QueryBuilder) -> None:
108112
"""Interate over the converted input dictionary and append to the QueryBuilder for each node.
109113
110114
Args:
@@ -175,6 +179,10 @@ def execute_fans(
175179
print("ERROR: Calculation strategy must be either 'Fragmented' or 'Stashed'.")
176180
raise ValueError
177181

182+
# move results_prefix and results items to metadata.options
183+
inputs.setdefault("metadata", {}).setdefault("options", {})["results_prefix"] = inputs.pop("results_prefix", "")
184+
inputs.setdefault("metadata", {}).setdefault("options", {})["results"] = inputs.pop("results", [])
185+
178186
# fetch the inputs if possible or otherwise create them
179187
convert(inputs)
180188

0 commit comments

Comments
 (0)