Skip to content

Commit 0c4d562

Browse files
[NPU][MLU] Fix test_batch_nrom & test_LeNet_MNIST & test_custom_pass (#1626)
1 parent e256d5c commit 0c4d562

File tree

6 files changed

+274
-232
lines changed

6 files changed

+274
-232
lines changed

backends/mlu/tests/test_LeNet_MNIST.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@
1313
# limitations under the License.
1414

1515
import os
16-
import shutil
1716
import time
1817
import argparse
1918
import datetime
19+
import tempfile
2020
import numpy as np
2121

2222
import paddle
@@ -67,13 +67,9 @@ def test(epoch_id, test_loader, model, cost):
6767
)
6868

6969

70-
def infer(model_dir):
71-
# model file
72-
params_file = os.path.join(model_dir, "model.pdiparams")
73-
model_file = os.path.join(model_dir, "model.pdmodel")
74-
70+
def infer(model_dir, prefix):
7571
# create config
76-
config = paddle_infer.Config(model_file, params_file)
72+
config = paddle_infer.Config(model_dir, prefix)
7773
config.enable_custom_device("mlu")
7874

7975
# create predictor
@@ -203,11 +199,13 @@ def main(args):
203199
model,
204200
input_spec=[paddle.static.InputSpec(shape=[None, 1, 28, 28], dtype="float32")],
205201
)
206-
paddle.jit.save(model, "output/model")
207202

208-
# infernece and clear
209-
infer("output")
210-
shutil.rmtree("output")
203+
with tempfile.TemporaryDirectory() as temp_dir:
204+
prefix = "test_LeNet_MNIST"
205+
paddle.jit.save(model, os.path.join(temp_dir, prefix))
206+
207+
# infernece and clear
208+
infer(temp_dir, prefix)
211209

212210

213211
class AverageMeter(object):

backends/mlu/tests/unittests/test_batch_norm_op_mlu.py

Lines changed: 75 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -478,81 +478,82 @@ def test_with_place(place, data_layout, shape):
478478
]
479479
ground_truth = {name: var_dict[name] for name in var_names}
480480

481-
program = base.Program()
482-
with base.program_guard(program):
483-
block = program.global_block()
484-
for name in ground_truth:
485-
block.create_var(
486-
name=name, dtype="float32", shape=ground_truth[name].shape
481+
with paddle.pir_utils.OldIrGuard():
482+
program = base.Program()
483+
with base.program_guard(program):
484+
block = program.global_block()
485+
for name in ground_truth:
486+
block.create_var(
487+
name=name, dtype="float32", shape=ground_truth[name].shape
488+
)
489+
inputs = {
490+
"X": block.var("x"),
491+
"Scale": block.var("scale"),
492+
"Bias": block.var("bias"),
493+
"Mean": block.var("mean"),
494+
"Variance": block.var("variance"),
495+
}
496+
attrs = {
497+
"epsilon": epsilon,
498+
"is_test": False,
499+
"data_layout": data_layout,
500+
"use_mkldnn": False,
501+
"fuse_with_relu": self.fuse_with_relu,
502+
"use_global_stats": self.use_global_stats,
503+
}
504+
if self.use_momentum_variable:
505+
inputs["MomentumTensor"] = block.var("momentum_var")
506+
else:
507+
attrs["momentum"] = momentum
508+
509+
outputs = {
510+
"Y": block.var("y"),
511+
"MeanOut": block.var("mean"), # share memory
512+
"VarianceOut": block.var("variance"), # share memory
513+
"SavedMean": block.var("saved_mean"),
514+
"SavedVariance": block.var("saved_variance"),
515+
}
516+
block.create_var(name="reserve_space", dtype="float32")
517+
outputs["ReserveSpace"] = block.var("reserve_space")
518+
bn_op = block.append_op(
519+
type="batch_norm", inputs=inputs, outputs=outputs, attrs=attrs
520+
)
521+
block.create_var(name="y@GRAD", dtype="float32", shape=y.shape)
522+
523+
# generate backward op_desc
524+
grad_op_desc_list, op_grad_to_var = core.get_grad_op_desc(
525+
bn_op.desc, self.no_grad_set, []
526+
)
527+
grad_op_desc = grad_op_desc_list[0]
528+
new_op_desc = block.desc.append_op()
529+
new_op_desc.copy_from(grad_op_desc)
530+
for var_name in grad_op_desc.output_arg_names():
531+
block.desc.var(var_name.encode("ascii"))
532+
grad_op_desc.infer_var_type(block.desc)
533+
grad_op_desc.infer_shape(block.desc)
534+
for arg in grad_op_desc.output_arg_names():
535+
grad_var = block.desc.find_var(arg.encode("ascii"))
536+
grad_var.set_dtype(core.VarDesc.VarType.FP32)
537+
538+
program._sync_with_cpp()
539+
540+
exe = base.Executor(place)
541+
out = exe.run(
542+
program,
543+
feed={
544+
name: var_dict[name]
545+
for name in [
546+
"x",
547+
"scale",
548+
"bias",
549+
"mean",
550+
"variance",
551+
"y@GRAD",
552+
"momentum_var",
553+
]
554+
},
555+
fetch_list=self.fetch_list,
487556
)
488-
inputs = {
489-
"X": block.var("x"),
490-
"Scale": block.var("scale"),
491-
"Bias": block.var("bias"),
492-
"Mean": block.var("mean"),
493-
"Variance": block.var("variance"),
494-
}
495-
attrs = {
496-
"epsilon": epsilon,
497-
"is_test": False,
498-
"data_layout": data_layout,
499-
"use_mkldnn": False,
500-
"fuse_with_relu": self.fuse_with_relu,
501-
"use_global_stats": self.use_global_stats,
502-
}
503-
if self.use_momentum_variable:
504-
inputs["MomentumTensor"] = block.var("momentum_var")
505-
else:
506-
attrs["momentum"] = momentum
507-
508-
outputs = {
509-
"Y": block.var("y"),
510-
"MeanOut": block.var("mean"), # share memory
511-
"VarianceOut": block.var("variance"), # share memory
512-
"SavedMean": block.var("saved_mean"),
513-
"SavedVariance": block.var("saved_variance"),
514-
}
515-
block.create_var(name="reserve_space", dtype="float32")
516-
outputs["ReserveSpace"] = block.var("reserve_space")
517-
bn_op = block.append_op(
518-
type="batch_norm", inputs=inputs, outputs=outputs, attrs=attrs
519-
)
520-
block.create_var(name="y@GRAD", dtype="float32", shape=y.shape)
521-
522-
# generate backward op_desc
523-
grad_op_desc_list, op_grad_to_var = core.get_grad_op_desc(
524-
bn_op.desc, self.no_grad_set, []
525-
)
526-
grad_op_desc = grad_op_desc_list[0]
527-
new_op_desc = block.desc.append_op()
528-
new_op_desc.copy_from(grad_op_desc)
529-
for var_name in grad_op_desc.output_arg_names():
530-
block.desc.var(var_name.encode("ascii"))
531-
grad_op_desc.infer_var_type(block.desc)
532-
grad_op_desc.infer_shape(block.desc)
533-
for arg in grad_op_desc.output_arg_names():
534-
grad_var = block.desc.find_var(arg.encode("ascii"))
535-
grad_var.set_dtype(core.VarDesc.VarType.FP32)
536-
537-
program._sync_with_cpp()
538-
539-
exe = base.Executor(place)
540-
out = exe.run(
541-
program,
542-
feed={
543-
name: var_dict[name]
544-
for name in [
545-
"x",
546-
"scale",
547-
"bias",
548-
"mean",
549-
"variance",
550-
"y@GRAD",
551-
"momentum_var",
552-
]
553-
},
554-
fetch_list=self.fetch_list,
555-
)
556557

557558
for id, name in enumerate(self.fetch_list):
558559
if name == "variance":

backends/mlu/tests/unittests/test_custom_pass_mlu.py

Lines changed: 37 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from __future__ import print_function, division
1616

17+
import tempfile
1718
import os
1819
import numpy as np
1920
import unittest
@@ -33,33 +34,54 @@ def replace(x, y, z):
3334
return pattern, replace
3435

3536

36-
@paddle.jit.to_static(
37-
input_spec=[
38-
paddle.static.InputSpec([None, 32], "float32", "x"),
39-
paddle.static.InputSpec([None, 32], "float32", "y"),
40-
paddle.static.InputSpec([None, 32], "float32", "z"),
41-
]
42-
)
43-
def func(x, y, z):
44-
return x + y + z
37+
class TestNet(paddle.nn.Layer):
38+
def __init__(self):
39+
super(TestNet, self).__init__()
40+
if paddle.framework.use_pir_api():
41+
# In PIR, .pdiparams file is necessary when loading model.
42+
# At least one parameter is needed to generate .pdiparams file in paddle.jit.save
43+
self.alpha = paddle.create_parameter(shape=[1], dtype="float32")
4544

46-
47-
MODLE_FILE = "./saved_model"
45+
@paddle.jit.to_static(
46+
full_graph=True,
47+
input_spec=[
48+
paddle.static.InputSpec([None, 32], "float32", "x"),
49+
paddle.static.InputSpec([None, 32], "float32", "y"),
50+
paddle.static.InputSpec([None, 32], "float32", "z"),
51+
],
52+
)
53+
def forward(self, x, y, z):
54+
return x + y + z
4855

4956

5057
class TestCustomPass(unittest.TestCase):
5158
def setUp(self):
59+
self.temp_dir = tempfile.TemporaryDirectory()
60+
self.prefix = "test_custom_pass"
61+
5262
for lib in os.listdir(os.getenv("CUSTOM_DEVICE_ROOT")):
5363
if lib.endswith(".so"):
5464
paddle.utils.cpp_extension.extension_utils.load_op_meta_info_and_register_op(
5565
lib
5666
)
57-
paddle.jit.save(func, MODLE_FILE)
67+
68+
paddle.disable_static()
69+
net = TestNet()
70+
paddle.enable_static()
71+
paddle.jit.save(net, os.path.join(self.temp_dir.name, self.prefix))
72+
73+
def tearDown(self):
74+
self.temp_dir.cleanup()
5875

5976
def test_my_add_n(self):
60-
config = paddle.inference.Config()
61-
config.set_prog_file(MODLE_FILE + ".pdmodel")
62-
config.enable_memory_optim()
77+
if paddle.framework.use_pir_api():
78+
config = paddle.inference.Config(self.temp_dir.name, self.prefix)
79+
else:
80+
config = paddle.inference.Config()
81+
config.set_prog_file(
82+
os.path.join(self.temp_dir.name, self.prefix + ".pdmodel")
83+
)
84+
config.enable_memory_optim()
6385
config.enable_custom_device("mlu")
6486
pass_builder = config.pass_builder()
6587
pass_builder.append_pass("generate_add_n")

backends/npu/tests/test_LeNet_MNIST.py

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@
1313
# limitations under the License.
1414

1515
import os
16-
import shutil
1716
import time
1817
import argparse
1918
import datetime
19+
import tempfile
2020
import numpy as np
2121

2222
import paddle
@@ -67,13 +67,9 @@ def test(epoch_id, test_loader, model, cost):
6767
)
6868

6969

70-
def infer(model_dir):
71-
# model file
72-
params_file = os.path.join(model_dir, "model.pdiparams")
73-
model_file = os.path.join(model_dir, "model.pdmodel")
74-
70+
def infer(model_dir, prefix):
7571
# create config
76-
config = paddle_infer.Config(model_file, params_file)
72+
config = paddle_infer.Config(model_dir, prefix)
7773
config.enable_custom_device("npu")
7874

7975
# create predictor
@@ -203,11 +199,12 @@ def main(args):
203199
model,
204200
input_spec=[paddle.static.InputSpec(shape=[None, 1, 28, 28], dtype="float32")],
205201
)
206-
paddle.jit.save(model, "output/model")
202+
with tempfile.TemporaryDirectory() as temp_dir:
203+
prefix = "test_LeNet_MNIST"
204+
paddle.jit.save(model, os.path.join(temp_dir, prefix))
207205

208-
# infernece and clear
209-
infer("output")
210-
shutil.rmtree("output")
206+
# infernece and clear
207+
infer(temp_dir, prefix)
211208

212209

213210
class AverageMeter(object):

0 commit comments

Comments
 (0)