Skip to content

Commit 4bea2b6

Browse files
【PPSCI Export&Infer No.2】Add export & inference for DeepONet (#901)
* add DeepONet export and infer * update docstring of geometry * Update deeponet.py * Update deeponet.py
1 parent 3f26f48 commit 4bea2b6

File tree

4 files changed

+122
-73
lines changed

4 files changed

+122
-73
lines changed

docs/zh/examples/deeponet.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,18 @@
2626
python deeponet.py mode=eval EVAL.pretrained_model_path=https://paddle-org.bj.bcebos.com/paddlescience/models/deeponet/deeponet_pretrained.pdparams
2727
```
2828

29+
=== "模型导出命令"
30+
31+
``` sh
32+
python deeponet.py mode=export
33+
```
34+
35+
=== "模型推理命令"
36+
37+
``` sh
38+
python deeponet.py mode=infer
39+
```
40+
2941
| 预训练模型 | 指标 |
3042
|:--| :--|
3143
| [deeponet_pretrained.pdparams](https://paddle-org.bj.bcebos.com/paddlescience/models/deeponet/deeponet_pretrained.pdparams) | loss(G_eval): 0.00003<br>L2Rel.G(G_eval): 0.01799 |

examples/operator_learning/conf/deeponet.yaml

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,3 +60,21 @@ TRAIN:
6060
EVAL:
6161
pretrained_model_path: null
6262
eval_with_no_grad: true
63+
64+
# inference settings
65+
INFER:
66+
pretrained_model_path: "https://paddle-org.bj.bcebos.com/paddlescience/models/deeponet/deeponet_pretrained.pdparams"
67+
export_path: ./inference/deeponet
68+
pdmodel_path: ${INFER.export_path}.pdmodel
69+
pdiparams_path: ${INFER.export_path}.pdiparams
70+
device: gpu
71+
engine: native
72+
precision: fp32
73+
onnx_path: ${INFER.export_path}.onnx
74+
ir_optim: true
75+
min_subgraph_size: 10
76+
gpu_mem: 4000
77+
gpu_id: 0
78+
max_batch_size: 128
79+
num_cpu_threads: 4
80+
batch_size: 128

examples/operator_learning/deeponet.py

Lines changed: 59 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -89,66 +89,10 @@ def train(cfg: DictConfig):
8989
# evaluate after finished training
9090
solver.eval()
9191

92-
# visualize prediction for different functions u and corresponding G(u)
93-
dtype = paddle.get_default_dtype()
94-
95-
def generate_y_u_G_ref(
96-
u_func: Callable, G_u_func: Callable
97-
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
98-
"""Generate discretized data of given function u and corresponding G(u).
99-
100-
Args:
101-
u_func (Callable): Function u.
102-
G_u_func (Callable): Function G(u).
92+
def predict_func(input_dict):
93+
return solver.predict(input_dict, return_numpy=True)[cfg.MODEL.G_key]
10394

104-
Returns:
105-
Tuple[np.ndarray, np.ndarray, np.ndarray]: Discretized data of u, y and G(u).
106-
"""
107-
x = np.linspace(0, 1, cfg.MODEL.num_loc, dtype=dtype).reshape(
108-
[1, cfg.MODEL.num_loc]
109-
)
110-
u = u_func(x)
111-
u = np.tile(u, [cfg.NUM_Y, 1])
112-
113-
y = np.linspace(0, 1, cfg.NUM_Y, dtype=dtype).reshape([cfg.NUM_Y, 1])
114-
G_ref = G_u_func(y)
115-
return u, y, G_ref
116-
117-
func_u_G_pair = [
118-
# (title_string, func_u, func_G(u)), s.t. dG/dx == u and G(u)(0) = 0
119-
(r"$u=\cos(x), G(u)=sin(x$)", lambda x: np.cos(x), lambda y: np.sin(y)), # 1
120-
(
121-
r"$u=sec^2(x), G(u)=tan(x$)",
122-
lambda x: (1 / np.cos(x)) ** 2,
123-
lambda y: np.tan(y),
124-
), # 2
125-
(
126-
r"$u=sec(x)tan(x), G(u)=sec(x) - 1$",
127-
lambda x: (1 / np.cos(x) * np.tan(x)),
128-
lambda y: 1 / np.cos(y) - 1,
129-
), # 3
130-
(
131-
r"$u=1.5^x\ln{1.5}, G(u)=1.5^x-1$",
132-
lambda x: 1.5**x * np.log(1.5),
133-
lambda y: 1.5**y - 1,
134-
), # 4
135-
(r"$u=3x^2, G(u)=x^3$", lambda x: 3 * x**2, lambda y: y**3), # 5
136-
(r"$u=4x^3, G(u)=x^4$", lambda x: 4 * x**3, lambda y: y**4), # 6
137-
(r"$u=5x^4, G(u)=x^5$", lambda x: 5 * x**4, lambda y: y**5), # 7
138-
(r"$u=6x^5, G(u)=x^6$", lambda x: 5 * x**4, lambda y: y**5), # 8
139-
(r"$u=e^x, G(u)=e^x-1$", lambda x: np.exp(x), lambda y: np.exp(y) - 1), # 9
140-
]
141-
142-
os.makedirs(os.path.join(cfg.output_dir, "visual"), exist_ok=True)
143-
for i, (title, u_func, G_func) in enumerate(func_u_G_pair):
144-
u, y, G_ref = generate_y_u_G_ref(u_func, G_func)
145-
G_pred = solver.predict({"u": u, "y": y}, return_numpy=True)["G"]
146-
plt.plot(y, G_pred, label=r"$G(u)(y)_{ref}$")
147-
plt.plot(y, G_ref, label=r"$G(u)(y)_{pred}$")
148-
plt.legend()
149-
plt.title(title)
150-
plt.savefig(os.path.join(cfg.output_dir, "visual", f"func_{i}_result.png"))
151-
plt.clf()
95+
plot(cfg, predict_func)
15296

15397

15498
def evaluate(cfg: DictConfig):
@@ -189,6 +133,50 @@ def evaluate(cfg: DictConfig):
189133
)
190134
solver.eval()
191135

136+
def predict_func(input_dict):
137+
return solver.predict(input_dict, return_numpy=True)[cfg.MODEL.G_key]
138+
139+
plot(cfg, predict_func)
140+
141+
142+
def export(cfg: DictConfig):
143+
# set model
144+
model = ppsci.arch.DeepONet(**cfg.MODEL)
145+
146+
# initialize solver
147+
solver = ppsci.solver.Solver(
148+
model,
149+
pretrained_model_path=cfg.INFER.pretrained_model_path,
150+
)
151+
152+
# export model
153+
from paddle.static import InputSpec
154+
155+
input_spec = [
156+
{
157+
model.input_keys[0]: InputSpec(
158+
[None, 1000], "float32", name=model.input_keys[0]
159+
),
160+
model.input_keys[1]: InputSpec(
161+
[None, 1], "float32", name=model.input_keys[1]
162+
),
163+
}
164+
]
165+
solver.export(input_spec, cfg.INFER.export_path)
166+
167+
168+
def inference(cfg: DictConfig):
169+
from deploy import python_infer
170+
171+
predictor = python_infer.GeneralPredictor(cfg)
172+
173+
def predict_func(input_dict):
174+
return next(iter(predictor.predict(input_dict).values()))
175+
176+
plot(cfg, predict_func)
177+
178+
179+
def plot(cfg: DictConfig, predict_func: Callable):
192180
# visualize prediction for different functions u and corresponding G(u)
193181
dtype = paddle.get_default_dtype()
194182

@@ -242,13 +230,17 @@ def generate_y_u_G_ref(
242230
os.makedirs(os.path.join(cfg.output_dir, "visual"), exist_ok=True)
243231
for i, (title, u_func, G_func) in enumerate(func_u_G_pair):
244232
u, y, G_ref = generate_y_u_G_ref(u_func, G_func)
245-
G_pred = solver.predict({"u": u, "y": y}, return_numpy=True)["G"]
233+
G_pred = predict_func({"u": u, "y": y})
246234
plt.plot(y, G_pred, label=r"$G(u)(y)_{ref}$")
247235
plt.plot(y, G_ref, label=r"$G(u)(y)_{pred}$")
248236
plt.legend()
249237
plt.title(title)
250238
plt.savefig(os.path.join(cfg.output_dir, "visual", f"func_{i}_result.png"))
239+
logger.message(
240+
f"Saved result of function {i} to {cfg.output_dir}/visual/func_{i}_result.png"
241+
)
251242
plt.clf()
243+
plt.close()
252244

253245

254246
@hydra.main(version_base=None, config_path="./conf", config_name="deeponet.yaml")
@@ -257,8 +249,14 @@ def main(cfg: DictConfig):
257249
train(cfg)
258250
elif cfg.mode == "eval":
259251
evaluate(cfg)
252+
elif cfg.mode == "export":
253+
export(cfg)
254+
elif cfg.mode == "infer":
255+
inference(cfg)
260256
else:
261-
raise ValueError(f"cfg.mode should in ['train', 'eval'], but got '{cfg.mode}'")
257+
raise ValueError(
258+
f"cfg.mode should in ['train', 'eval', 'export', 'infer'], but got '{cfg.mode}'"
259+
)
262260

263261

264262
if __name__ == "__main__":

ppsci/geometry/geometry.py

Lines changed: 33 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525

2626
import numpy as np
2727
import paddle
28+
from typing_extensions import Literal
2829

2930
from ppsci.utils import logger
3031
from ppsci.utils import misc
@@ -129,17 +130,22 @@ def uniform_points(self, n: int, boundary: bool = True) -> np.ndarray:
129130
def sample_interior(
130131
self,
131132
n: int,
132-
random: str = "pseudo",
133-
criteria: Optional[Callable] = None,
133+
random: Literal["pseudo", "Halton", "LHS"] = "pseudo",
134+
criteria: Optional[Callable[..., np.ndarray]] = None,
134135
evenly: bool = False,
135136
compute_sdf_derivatives: bool = False,
136137
) -> Dict[str, np.ndarray]:
137138
"""Sample random points in the geometry and return those meet criteria.
138139
139140
Args:
140141
n (int): Number of points.
141-
random (str): Random method. Defaults to "pseudo".
142-
criteria (Optional[Callable]): Criteria function. Defaults to None.
142+
random (Literal["pseudo", "Halton", "LHS"]): Random method. Defaults to "pseudo".
143+
pseudo: Pseudo random.
144+
Halton: Halton sequence.
145+
LHS: Latin Hypercube Sampling.
146+
criteria (Optional[Callable[..., np.ndarray]]): Criteria function. Given
147+
coords from differnet dimension and return a boolean array with shape [n,].
148+
Defaults to None.
143149
evenly (bool): Evenly sample points. Defaults to False.
144150
compute_sdf_derivatives (bool): Compute SDF derivatives. Defaults to False.
145151
@@ -226,16 +232,21 @@ def sample_interior(
226232
def sample_boundary(
227233
self,
228234
n: int,
229-
random: str = "pseudo",
230-
criteria: Optional[Callable] = None,
235+
random: Literal["pseudo", "Halton", "LHS"] = "pseudo",
236+
criteria: Optional[Callable[..., np.ndarray]] = None,
231237
evenly: bool = False,
232238
) -> Dict[str, np.ndarray]:
233239
"""Compute the random points in the geometry and return those meet criteria.
234240
235241
Args:
236242
n (int): Number of points.
237-
random (str): Random method. Defaults to "pseudo".
238-
criteria (Optional[Callable]): Criteria function. Defaults to None.
243+
random (Literal["pseudo", "Halton", "LHS"]): Random method. Defaults to "pseudo".
244+
pseudo: Pseudo random.
245+
Halton: Halton sequence.
246+
LHS: Latin Hypercube Sampling.
247+
criteria (Optional[Callable[..., np.ndarray]]): Criteria function. Given
248+
coords from differnet dimension and return a boolean array with shape [n,].
249+
Defaults to None.
239250
evenly (bool): Evenly sample points. Defaults to False.
240251
241252
Returns:
@@ -332,12 +343,17 @@ def sample_boundary(
332343
return {**x_dict, **normal_dict}
333344

334345
@abc.abstractmethod
335-
def random_points(self, n: int, random: str = "pseudo") -> np.ndarray:
346+
def random_points(
347+
self, n: int, random: Literal["pseudo", "Halton", "LHS"] = "pseudo"
348+
) -> np.ndarray:
336349
"""Compute the random points in the geometry.
337350
338351
Args:
339352
n (int): Number of points.
340-
random (str): Random method. Defaults to "pseudo".
353+
random (Literal["pseudo", "Halton", "LHS"]): Random method. Defaults to "pseudo".
354+
pseudo: Pseudo random.
355+
Halton: Halton sequence.
356+
LHS: Latin Hypercube Sampling.
341357
342358
Returns:
343359
np.ndarray: Random points in the geometry. The shape is [N, D].
@@ -379,12 +395,17 @@ def uniform_boundary_points(self, n: int) -> np.ndarray:
379395
return self.random_boundary_points(n)
380396

381397
@abc.abstractmethod
382-
def random_boundary_points(self, n: int, random: str = "pseudo") -> np.ndarray:
398+
def random_boundary_points(
399+
self, n: int, random: Literal["pseudo", "Halton", "LHS"] = "pseudo"
400+
) -> np.ndarray:
383401
"""Compute the random points on the boundary.
384402
385403
Args:
386404
n (int): Number of points.
387-
random (str): Random method. Defaults to "pseudo".
405+
random (Literal["pseudo", "Halton", "LHS"]): Random method. Defaults to "pseudo".
406+
pseudo: Pseudo random.
407+
Halton: Halton sequence.
408+
LHS: Latin Hypercube Sampling.
388409
389410
Returns:
390411
np.ndarray: Random points on the boundary. The shape is [N, D].

0 commit comments

Comments
 (0)