Skip to content

Commit 8505e78

Browse files
nghielmejmitrevs
andauthored
Added support for QONNX Resize node ingestion and tested with tiny UNet model (#1122)
* Added support for `Resize` node from QONNX model * Added a test on tiny UNet model in order to test `Resize` node * pre-commit restyling * Aesthetic fix * Second aesthetic fix * Added one test on a simpler model extracted from UNet model `branched_model_ch_last.onnx` * Example models commit updated * An empty list is now appended to the shape of all the inputs of the considered node, in case the input is empty * Cleaned some code and added the removal of RoI input from `Resize` node * revert some unneeded changes * Added some minor checks related to sizes parameter * Minor fix * Minor modification of the error msg * Minor fixes --------- Co-authored-by: Jovan Mitrevski <[email protected]>
1 parent c320f50 commit 8505e78

File tree

6 files changed

+201
-15
lines changed

6 files changed

+201
-15
lines changed

example-models

hls4ml/converters/onnx/reshape.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from hls4ml.converters.onnx_to_hls import onnx_handler
1+
from hls4ml.converters.onnx_to_hls import get_onnx_attribute, onnx_handler
22

33

44
@onnx_handler('Transpose')
@@ -36,3 +36,25 @@ def parse_flatten_layer(node, input_names, input_shapes, graph):
3636
layer['target_shape'] = [-1] # does not contain batch dimension
3737

3838
return layer
39+
40+
41+
@onnx_handler('Resize')
42+
def parse_resize_layer(node, input_names, input_shapes, graph):
43+
layer = {}
44+
layer['name'] = node.name
45+
layer['class_name'] = 'Resize'
46+
layer['inputs'] = input_names
47+
layer['outputs'] = list(node.output)
48+
layer['in_height'] = input_shapes[0][2]
49+
layer['in_width'] = input_shapes[0][1]
50+
layer['out_width'] = input_shapes[0][1]
51+
layer['out_height'] = input_shapes[0][2]
52+
layer['n_chan'] = input_shapes[0][3]
53+
layer['algorithm'] = get_onnx_attribute(node, 'mode')
54+
# The following is used in initialize() method.
55+
# Probably a better solution would be to have a channels last parameter at QONNX level
56+
layer['data_format'] = (
57+
'channels_last' if any(node.domain == 'qonnx.custom_op.channels_last' for node in graph.node) else 'channels_first'
58+
)
59+
60+
return layer

hls4ml/model/layers.py

Lines changed: 60 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1147,20 +1147,67 @@ class Resize(Layer):
11471147
def initialize(self):
11481148
inp = self.get_input_variable()
11491149

1150-
if self.get_attr('data_format') == 'channels_last':
1151-
if len(inp.shape) == 2: # 1D -> width + chan
1152-
shape = [self.get_attr('out_width'), self.get_attr('n_chan')]
1153-
dims = [f'OUT_WIDTH_{self.index}', f'N_CHAN_{self.index}']
1154-
elif len(inp.shape) == 3: # 2D -> height + width + chan
1155-
shape = [self.get_attr('out_height'), self.get_attr('out_width'), self.get_attr('n_chan')]
1156-
dims = [f'OUT_HEIGHT_{self.index}', f'OUT_WIDTH_{self.index}', f'N_CHAN_{self.index}']
1150+
if len(self.inputs) > 1:
1151+
# In order to be correctly ingested by hls4ml the QONNX resize node should have 3 inputs set with RoI left empty
1152+
if len(self.inputs) == 2:
1153+
raise Exception(
1154+
'The number of inputs to Resize node is equal to 2. '
1155+
'In this case, either one is trying to use a version 10 node '
1156+
'or one is using the RoI parameter only to perform the resize operation, '
1157+
'both not supported in hls4ml'
1158+
)
1159+
if len(self.inputs) == 4:
1160+
raise Exception('Sizes parameter is not supported by hls4ml. Use scales instead')
1161+
# get the scales of Resize node from QONNX frontend
1162+
# see doc here: https://onnx.ai/onnx/operators/onnx__Resize.html
1163+
scales_idx = 2 if len(self.inputs) == 3 or len(self.inputs) == 4 else 1
1164+
scales = self.get_input_node(self.inputs[scales_idx]).get_attr('value')
1165+
if len(scales) == 4: # Resize 2D
1166+
self.set_attr('out_width', int(self.get_attr('in_width') * scales[1]))
1167+
self.set_attr('out_height', int(self.get_attr('in_height') * scales[2]))
1168+
self.set_attr('n_chan', int(self.get_attr('n_chan') * scales[3]))
1169+
elif len(scales) == 3: # Resize 1D
1170+
self.set_attr('out_width', int(self.get_attr('in_width') * scales[1]))
1171+
self.set_attr('n_chan', int(self.get_attr('n_chan') * scales[2]))
1172+
else:
1173+
raise Exception('Resize 1D and Resize 2D are the ones supported in hls4ml')
1174+
if self.get_attr('data_format') == 'channels_last':
1175+
if len(inp.shape) == 2: # 1D -> width + chan
1176+
shape = [int(self.get_attr('out_width')), int(self.get_attr('n_chan'))]
1177+
dims = [f'OUT_WIDTH_{self.index}', f'N_CHAN_{self.index}']
1178+
elif len(inp.shape) == 3: # 2D -> height + width + chan
1179+
shape = [
1180+
int(self.get_attr('out_height')),
1181+
int(self.get_attr('out_width')),
1182+
int(self.get_attr('n_chan')),
1183+
]
1184+
dims = [f'OUT_HEIGHT_{self.index}', f'OUT_WIDTH_{self.index}', f'N_CHAN_{self.index}']
1185+
else:
1186+
if len(inp.shape) == 2: # 1D -> width + chan
1187+
shape = [int(self.get_attr('n_chan')), int(self.get_attr('out_width'))]
1188+
dims = [f'N_CHAN_{self.index}', f'OUT_WIDTH_{self.index}']
1189+
elif len(inp.shape) == 3: # 2D -> height + width + chan
1190+
shape = [
1191+
int(self.get_attr('n_chan')),
1192+
int(self.get_attr('out_height')),
1193+
int(self.get_attr('out_width')),
1194+
]
1195+
dims = [f'N_CHAN_{self.index}', f'OUT_HEIGHT_{self.index}', f'OUT_WIDTH_{self.index}']
11571196
else:
1158-
if len(inp.shape) == 2: # 1D -> width + chan
1159-
shape = [self.get_attr('n_chan'), self.get_attr('out_width')]
1160-
dims = [f'N_CHAN_{self.index}', f'OUT_WIDTH_{self.index}']
1161-
elif len(inp.shape) == 3: # 2D -> height + width + chan
1162-
shape = [self.get_attr('n_chan'), self.get_attr('out_height'), self.get_attr('out_width')]
1163-
dims = [f'N_CHAN_{self.index}', f'OUT_HEIGHT_{self.index}', f'OUT_WIDTH_{self.index}']
1197+
if self.get_attr('data_format') == 'channels_last':
1198+
if len(inp.shape) == 2: # 1D -> width + chan
1199+
shape = [self.get_attr('out_width'), self.get_attr('n_chan')]
1200+
dims = [f'OUT_WIDTH_{self.index}', f'N_CHAN_{self.index}']
1201+
elif len(inp.shape) == 3: # 2D -> height + width + chan
1202+
shape = [self.get_attr('out_height'), self.get_attr('out_width'), self.get_attr('n_chan')]
1203+
dims = [f'OUT_HEIGHT_{self.index}', f'OUT_WIDTH_{self.index}', f'N_CHAN_{self.index}']
1204+
else:
1205+
if len(inp.shape) == 2: # 1D -> width + chan
1206+
shape = [self.get_attr('n_chan'), self.get_attr('out_width')]
1207+
dims = [f'N_CHAN_{self.index}', f'OUT_WIDTH_{self.index}']
1208+
elif len(inp.shape) == 3: # 2D -> height + width + chan
1209+
shape = [self.get_attr('n_chan'), self.get_attr('out_height'), self.get_attr('out_width')]
1210+
dims = [f'N_CHAN_{self.index}', f'OUT_HEIGHT_{self.index}', f'OUT_WIDTH_{self.index}']
11641211

11651212
self.add_output_variable(shape, dims, precision=inp.type.precision)
11661213

hls4ml/model/optimizer/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
'parse_qonnx',
3535
[
3636
'reshape_constant',
37+
'resize_remove_constants',
3738
'quant_constant_parameters',
3839
'quant_to_activation',
3940
'fuse_quant_with_constant',
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
from warnings import warn
2+
3+
from hls4ml.model.layers import Constant, Resize
4+
from hls4ml.model.optimizer import OptimizerPass
5+
6+
7+
class ResizeRemoveConstants(OptimizerPass):
8+
"""
9+
This optimizer is intended to clean the Resize node from RoI and Scales parameters that if left cause issues in hls4ml.
10+
"""
11+
12+
def match(self, node):
13+
is_match = isinstance(node, Resize) and len(node.inputs) > 1
14+
return is_match
15+
16+
def transform(self, model, node):
17+
"""
18+
Remove RoI and Scale Constant from new shape input.
19+
"""
20+
# see doc here: https://onnx.ai/onnx/operators/onnx__Resize.html
21+
roi_index = 1
22+
scales_idx = 2
23+
scales_node = node.get_input_node(node.inputs[scales_idx])
24+
node.inputs[scales_idx] = ''
25+
if not isinstance(scales_node, Constant):
26+
raise RuntimeError("Non-constant shape inputs are not supported")
27+
model.remove_node(scales_node, rewire=False)
28+
# RoI position is always 1 when present
29+
roi_node = node.get_input_node(node.inputs[roi_index])
30+
if roi_node.get_attr('value'):
31+
warn('RoI value vector is not empty. Consider that RoI is not supported in hls4ml', stacklevel=2)
32+
node.inputs[roi_index] = ''
33+
if not isinstance(roi_node, Constant):
34+
raise RuntimeError("Non-constant RoI inputs are not supported")
35+
model.remove_node(roi_node, rewire=False)
36+
# Clean all the '' inputs
37+
node.inputs = list(filter(None, node.inputs))
38+
return True

test/pytest/test_qonnx.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,32 @@ def sep_conv_model():
101101
return model
102102

103103

104+
@pytest.fixture(scope='module')
105+
def branched_model():
106+
"""
107+
Load branched model using separable convs, already channels-last and cleaned
108+
"""
109+
dl_file = str(example_model_path / "onnx/branched_model_ch_last.onnx")
110+
assert os.path.isfile(dl_file)
111+
112+
model = ModelWrapper(dl_file)
113+
114+
return model
115+
116+
117+
@pytest.fixture(scope='module')
118+
def tiny_unet_model():
119+
"""
120+
Load tiny unet model, already channels-last and cleaned
121+
"""
122+
dl_file = str(example_model_path / "onnx/tiny_unet_ch_last.onnx")
123+
assert os.path.isfile(dl_file)
124+
125+
model = ModelWrapper(dl_file)
126+
127+
return model
128+
129+
104130
@pytest.fixture(scope='module')
105131
def two_layer_keras_model():
106132
"""
@@ -309,6 +335,58 @@ def test_sep_conv(sep_conv_model, backend):
309335
np.testing.assert_allclose(y_qonnx.ravel(), y_hls4ml.ravel(), atol=1e-2, rtol=1)
310336

311337

338+
@pytest.mark.parametrize('backend', ['Vitis'])
339+
def test_branched_model(branched_model, backend):
340+
model = branched_model
341+
ishape = tuple(model.get_tensor_shape(model.graph.input[0].name))
342+
X = np.random.uniform(low=0, high=1, size=np.prod(ishape)).reshape(ishape)
343+
X = (np.round(X * 2**16) * 2**-16).astype(np.float32)
344+
idict = {model.graph.input[0].name: X}
345+
y_qonnx = oxe.execute_onnx(model, idict)[model.graph.output[0].name]
346+
347+
config = hls4ml.utils.config.config_from_onnx_model(
348+
model, granularity='name', backend=backend, default_precision='fixed<32,16>'
349+
)
350+
hls_model = hls4ml.converters.convert_from_onnx_model(
351+
model,
352+
output_dir=str(test_root_path / f'hls4mlprj_qonnx_branched_model_{backend}'),
353+
io_type='io_stream',
354+
backend=backend,
355+
hls_config=config,
356+
)
357+
hls_model.compile()
358+
y_hls4ml = hls_model.predict(np.ascontiguousarray(X))
359+
360+
np.testing.assert_array_equal(y_qonnx.ravel(), y_hls4ml.ravel())
361+
362+
363+
@pytest.mark.parametrize('backend', ['Vitis'])
364+
def test_tiny_unet_model(tiny_unet_model, backend):
365+
366+
model = tiny_unet_model
367+
ishape = tuple(model.get_tensor_shape(model.graph.input[0].name))
368+
X = np.random.uniform(low=0, high=1, size=np.prod(ishape)).reshape(ishape)
369+
X = (np.round(X * 2**16) * 2**-16).astype(np.float32)
370+
idict = {model.graph.input[0].name: X}
371+
y_qonnx = oxe.execute_onnx(model, idict)[model.graph.output[0].name]
372+
373+
config = hls4ml.utils.config.config_from_onnx_model(
374+
model, granularity='name', backend=backend, default_precision='fixed<32,16>'
375+
)
376+
377+
hls_model = hls4ml.converters.convert_from_onnx_model(
378+
model,
379+
output_dir=str(test_root_path / f'hls4mlprj_qonnx_tiny_unet_model_{backend}'),
380+
io_type='io_stream',
381+
backend=backend,
382+
hls_config=config,
383+
)
384+
hls_model.compile()
385+
y_hls4ml = hls_model.predict(np.ascontiguousarray(X))
386+
387+
np.testing.assert_array_equal(y_qonnx.ravel(), y_hls4ml.ravel())
388+
389+
312390
@pytest.mark.parametrize(
313391
'model_name',
314392
[

0 commit comments

Comments
 (0)