Skip to content

Commit 428c02c

Browse files
authored
[Bug Fix] Move the log with "[Processing]" hint earlier and support outputs of which the type is list of list. (#448)
* Move the log with "[Processing]" hint earlier. * Flatten all the output tensors to a list when the returned outputs contain list of list.
1 parent 9188178 commit 428c02c

File tree

4 files changed

+75
-68
lines changed

4 files changed

+75
-68
lines changed

graph_net/paddle/test_compiler.py

Lines changed: 51 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,6 @@
44
from pathlib import Path
55
import sys
66
import os
7-
from dataclasses import dataclass
8-
from contextlib import contextmanager
9-
import time
10-
import math
117
import numpy as np
128
import random
139
import platform
@@ -62,7 +58,7 @@ def get_hardward_name(args):
6258
)
6359
)
6460
)
65-
except Exception as e:
61+
except Exception:
6662
pass
6763
elif args.device == "cpu":
6864
hardware = platform.processor()
@@ -128,7 +124,7 @@ def get_static_model(args, model):
128124
backend=None,
129125
)
130126
static_model.eval()
131-
program = static_model.forward.concrete_program.main_program
127+
program = static_model.forward.concrete_program.main_program # noqa
132128
return static_model
133129

134130

@@ -225,47 +221,56 @@ def measure_performance(model_call, args, compiler, profile=False):
225221

226222

227223
def check_outputs(args, expected_out, compiled_out):
228-
if isinstance(expected_out, paddle.Tensor):
229-
expected_out = [expected_out]
230-
if isinstance(compiled_out, paddle.Tensor):
231-
compiled_out = [compiled_out]
232-
233-
eager_dtypes = [None] * len(expected_out)
234-
for i, tensor in enumerate(expected_out):
235-
eager_dtypes[i] = (
236-
str(tensor.dtype).replace("paddle.", "") if tensor is not None else "None"
237-
)
238-
239-
compiled_dtypes = [None] * len(compiled_out)
240-
for i, tensor in enumerate(compiled_out):
241-
compiled_dtypes[i] = (
242-
str(tensor.dtype).replace("paddle.", "") if tensor is not None else "None"
243-
)
244-
224+
def _flatten_outputs_to_list(outs):
225+
flattened_outs = outs
226+
if isinstance(outs, paddle.Tensor):
227+
flattened_outs = [outs]
228+
else:
229+
flattened_outs = [
230+
x
231+
for out in outs
232+
for x in (out if isinstance(out, (tuple, list)) else (out,))
233+
]
234+
return flattened_outs
235+
236+
expected_out = _flatten_outputs_to_list(expected_out)
237+
compiled_out = _flatten_outputs_to_list(compiled_out)
238+
239+
def _get_output_dtypes(outs):
240+
dtypes = [
241+
str(tensor.dtype).replace("paddle.", "")
242+
if isinstance(tensor, paddle.Tensor)
243+
else None
244+
for i, tensor in enumerate(outs)
245+
]
246+
return dtypes
247+
248+
eager_dtypes = _get_output_dtypes(expected_out)
249+
compiled_dtypes = _get_output_dtypes(compiled_out)
245250
type_match = test_compiler_util.check_output_datatype(
246251
args, eager_dtypes, compiled_dtypes
247252
)
248253

249-
eager_shapes = [None] * len(expected_out)
250-
for i, tensor in enumerate(expected_out):
251-
eager_shapes[i] = tensor.shape if tensor is not None else None
252-
253-
compiled_shapes = [None] * len(compiled_out)
254-
for i, tensor in enumerate(compiled_out):
255-
compiled_shapes[i] = tensor.shape if tensor is not None else None
254+
def _get_output_shapes(outs):
255+
shapes = [
256+
tensor.shape if isinstance(tensor, paddle.Tensor) else None
257+
for i, tensor in enumerate(outs)
258+
]
259+
return shapes
256260

261+
eager_shapes = _get_output_shapes(expected_out)
262+
compiled_shapes = _get_output_shapes(compiled_out)
257263
shape_match = test_compiler_util.check_output_shape(
258264
args, eager_shapes, compiled_shapes
259265
)
260266

261-
def transfer_to_float(origin_outputs):
267+
def _transfer_to_float(origin_outputs):
262268
outputs = []
263269
for item in origin_outputs:
264-
if (
265-
item is not None
266-
and isinstance(item, paddle.Tensor)
267-
and item.dtype not in [paddle.float32, paddle.float64]
268-
):
270+
if isinstance(item, paddle.Tensor) and item.dtype not in [
271+
paddle.float32,
272+
paddle.float64,
273+
]:
269274
item = item.astype("float32")
270275
outputs.append(item)
271276
return outputs
@@ -278,8 +283,8 @@ def transfer_to_float(origin_outputs):
278283
cmp_equal_func=get_cmp_equal,
279284
)
280285

281-
expected_out_fp32 = transfer_to_float(expected_out)
282-
compiled_out_fp32 = transfer_to_float(compiled_out)
286+
expected_out_fp32 = _transfer_to_float(expected_out)
287+
compiled_out_fp32 = _transfer_to_float(compiled_out)
283288
test_compiler_util.check_allclose(
284289
args,
285290
expected_out_fp32,
@@ -308,11 +313,16 @@ def check_and_print_gpu_utilization(compiler):
308313

309314

310315
def test_single_model(args):
316+
model_path = os.path.normpath(args.model_path)
317+
test_compiler_util.print_with_log_prompt(
318+
"[Processing]", model_path, args.log_prompt
319+
)
320+
311321
compiler = get_compiler_backend(args)
312322
check_and_print_gpu_utilization(compiler)
313323

314-
input_dict = get_input_dict(args.model_path)
315-
model = get_model(args.model_path)
324+
input_dict = get_input_dict(model_path)
325+
model = get_model(model_path)
316326
model.eval()
317327

318328
test_compiler_util.print_basic_config(
@@ -341,7 +351,7 @@ def test_single_model(args):
341351
compiled_time_stats = {}
342352
try:
343353
print("Run model in compiled mode.", file=sys.stderr, flush=True)
344-
input_spec = get_input_spec(args.model_path)
354+
input_spec = get_input_spec(model_path)
345355
compiled_model = compiler(model, input_spec)
346356
compiled_out, compiled_time_stats = measure_performance(
347357
lambda: compiled_model(**input_dict), args, compiler, profile=False

graph_net/paddle/test_reference_device.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,9 @@
11
import argparse
2-
import importlib.util
32
import paddle
4-
import time
5-
import numpy as np
6-
import random
73
import os
84
from pathlib import Path
95
from contextlib import redirect_stdout, redirect_stderr
106
import json
11-
import re
127
import sys
138
import traceback
149

@@ -28,18 +23,23 @@ def get_reference_output_path(reference_dir, model_path):
2823

2924

3025
def test_single_model(args):
31-
ref_log = get_reference_log_path(args.reference_dir, args.model_path)
32-
ref_dump = get_reference_output_path(args.reference_dir, args.model_path)
26+
model_path = os.path.normpath(args.model_path)
27+
ref_log = get_reference_log_path(args.reference_dir, model_path)
28+
ref_dump = get_reference_output_path(args.reference_dir, model_path)
3329
print(f"Reference log path: {ref_log}", file=sys.stderr, flush=True)
3430
print(f"Reference outputs path: {ref_dump}", file=sys.stderr, flush=True)
3531

3632
with open(ref_log, "w", encoding="utf-8") as log_f:
3733
with redirect_stdout(log_f), redirect_stderr(log_f):
34+
test_compiler_util.print_with_log_prompt(
35+
"[Processing]", model_path, args.log_prompt
36+
)
37+
3838
compiler = test_compiler.get_compiler_backend(args)
3939
test_compiler.check_and_print_gpu_utilization(compiler)
4040

41-
input_dict = test_compiler.get_input_dict(args.model_path)
42-
model = test_compiler.get_model(args.model_path)
41+
input_dict = test_compiler.get_input_dict(model_path)
42+
model = test_compiler.get_model(model_path)
4343
model.eval()
4444

4545
test_compiler_util.print_with_log_prompt(
@@ -55,7 +55,7 @@ def test_single_model(args):
5555
success = False
5656
time_stats = {}
5757
try:
58-
input_spec = test_compiler.get_input_spec(args.model_path)
58+
input_spec = test_compiler.get_input_spec(model_path)
5959
compiled_model = compiler(model, input_spec)
6060
outputs, time_stats = test_compiler.measure_performance(
6161
lambda: compiled_model(**input_dict),

graph_net/paddle/test_target_device.py

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,6 @@
11
import argparse
2-
import importlib.util
3-
import time
4-
import numpy as np
5-
import random
62
import os
7-
from pathlib import Path
83
import json
9-
import re
104
import sys
115
import traceback
126

@@ -64,11 +58,17 @@ def update_args_and_set_seed(args, model_path):
6458

6559

6660
def test_single_model(args):
61+
model_path = os.path.normpath(args.model_path)
62+
test_compiler_util.print_with_log_prompt(
63+
"[Processing]", model_path, args.log_prompt
64+
)
65+
args = update_args_and_set_seed(args, model_path)
66+
6767
compiler = test_compiler.get_compiler_backend(args)
6868
test_compiler.check_and_print_gpu_utilization(compiler)
6969

70-
input_dict = test_compiler.get_input_dict(args.model_path)
71-
model = test_compiler.get_model(args.model_path)
70+
input_dict = test_compiler.get_input_dict(model_path)
71+
model = test_compiler.get_model(model_path)
7272
model.eval()
7373

7474
test_compiler_util.print_basic_config(
@@ -80,7 +80,7 @@ def test_single_model(args):
8080
success = False
8181
time_stats = {}
8282
try:
83-
input_spec = test_compiler.get_input_spec(args.model_path)
83+
input_spec = test_compiler.get_input_spec(model_path)
8484
compiled_model = compiler(model, input_spec)
8585
outputs, time_stats = test_compiler.measure_performance(
8686
lambda: compiled_model(**input_dict), args, compiler, profile=False
@@ -95,17 +95,17 @@ def test_single_model(args):
9595

9696
test_compiler_util.print_running_status(args, success)
9797

98-
model_name = test_compiler_util.get_model_name(args.model_path)
99-
if test_compiler_util.get_subgraph_tag(args.model_path):
100-
model_name += "_" + test_compiler_util.get_subgraph_tag(args.model_path)
98+
model_name = test_compiler_util.get_model_name(model_path)
99+
if test_compiler_util.get_subgraph_tag(model_path):
100+
model_name += "_" + test_compiler_util.get_subgraph_tag(model_path)
101101

102102
ref_dump = test_reference_device.get_reference_output_path(
103-
args.reference_dir, args.model_path
103+
args.reference_dir, model_path
104104
)
105105
ref_out = paddle.load(str(ref_dump))
106106

107107
ref_log = test_reference_device.get_reference_log_path(
108-
args.reference_dir, args.model_path
108+
args.reference_dir, model_path
109109
)
110110
ref_time_stats = parse_time_stats_from_reference_log(ref_log)
111111

@@ -170,7 +170,6 @@ def main(args):
170170
test_compiler.init_env(args)
171171

172172
if path_utils.is_single_model_dir(args.model_path):
173-
args = update_args_and_set_seed(args, args.model_path)
174173
test_single_model(args)
175174
else:
176175
test_multi_models(args)

graph_net/test_compiler_util.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,8 +143,6 @@ def print_with_log_prompt(key, value, log_prompt):
143143

144144
def print_basic_config(args, hardware_name, compile_framework_version):
145145
model_path = os.path.normpath(args.model_path)
146-
print_with_log_prompt("[Processing]", model_path, args.log_prompt)
147-
148146
model_name = get_model_name(model_path)
149147
print_with_log_prompt("[Config] model:", model_name, args.log_prompt)
150148

0 commit comments

Comments
 (0)