Skip to content

Commit 50c5a3b

Browse files
committed
Merge branch 'main' into quartus_streaming_conv
2 parents 6fd7f56 + b180fe1 commit 50c5a3b

File tree

4 files changed

+37
-8
lines changed

4 files changed

+37
-8
lines changed

hls4ml/backends/vivado/vivado_backend.py

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
from hls4ml.backends import FPGABackend
1515
from hls4ml.backends.fpga.fpga_types import APTypeConverter, HLSTypeConverter, VivadoArrayVariableConverter
1616
from hls4ml.report import parse_vivado_report
17+
from hls4ml.utils.fixed_point_utils import ceil_log2
1718

1819
class VivadoBackend(FPGABackend):
1920
def __init__(self):
@@ -129,7 +130,7 @@ def build(self, model, reset=False, csim=True, synth=True, cosim=False, validati
129130

130131
def _validate_conv_strategy(self, layer):
131132
if layer.model.config.model_strategy.lower() != 'resource':
132-
print('WARNING: Cannot use "Latency" model strategy for {} layer. Switching to "Resource" strategy.')
133+
print(f'WARNING: Cannot use "Latency" model strategy for {layer.name} layer. Switching to "Resource" strategy.')
133134
layer.model.config.model_strategy = 'Resource'
134135

135136
@layer_optimizer(Layer)
@@ -251,6 +252,36 @@ def init_depconv2d(self, layer):
251252
layer.set_attr('n_partitions', 1) #TODO Once we have SeparableConv implementation for io_parallel this should be set properly
252253
layer.set_attr('implementation', layer.model.config.get_conv_implementation(layer).lower())
253254

255+
def _set_pooling_accum_t(self, layer, pool_size):
256+
extra_bits = ceil_log2(pool_size)
257+
accum_t = layer.get_attr('accum_t')
258+
accum_t.precision.fractional += extra_bits
259+
accum_t.precision.integer += extra_bits
260+
261+
@layer_optimizer(Pooling1D)
262+
def init_pooling1d(self, layer):
263+
pool_size = layer.get_attr('pool_width')
264+
self._set_pooling_accum_t(layer, pool_size)
265+
266+
layer.set_attr('implementation', layer.model.config.get_conv_implementation(layer).lower())
267+
268+
@layer_optimizer(Pooling2D)
269+
def init_pooling2d(self, layer):
270+
pool_size = layer.get_attr('pool_height') * layer.get_attr('pool_width')
271+
self._set_pooling_accum_t(layer, pool_size)
272+
273+
layer.set_attr('implementation', layer.model.config.get_conv_implementation(layer).lower())
274+
275+
@layer_optimizer(GlobalPooling1D)
276+
def init_global_pooling1d(self, layer):
277+
pool_size = layer.get_attr('n_in')
278+
self._set_pooling_accum_t(layer, pool_size)
279+
280+
@layer_optimizer(GlobalPooling2D)
281+
def init_global_pooling2d(self, layer):
282+
pool_size = layer.get_attr('in_height') * layer.get_attr('in_width')
283+
self._set_pooling_accum_t(layer, pool_size)
284+
254285
@layer_optimizer(Activation)
255286
def init_activation(self, layer):
256287
if 'table_t' not in layer.attributes:

hls4ml/model/layers.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -573,7 +573,6 @@ def initialize(self):
573573
dims = ['N_FILT_{}'.format(self.index), 'N_OUTPUTS_{}'.format(self.index)]
574574
self.add_output_variable(shape, dims)
575575
self.set_attr('pool_op', self.get_attr('class_name').split('Pooling')[0])
576-
self.set_attr('implementation', self.model.config.get_conv_implementation(self).lower())
577576

578577
class Pooling2D(Layer):
579578
_expected_attributes = [
@@ -607,7 +606,6 @@ def initialize(self):
607606
dims = ['N_FILT_{}'.format(self.index), 'OUT_HEIGHT_{}'.format(self.index), 'OUT_WIDTH_{}'.format(self.index)]
608607
self.add_output_variable(shape, dims)
609608
self.set_attr('pool_op', self.get_attr('class_name').split('Pooling')[0])
610-
self.set_attr('implementation', self.model.config.get_conv_implementation(self).lower())
611609

612610
class GlobalPooling1D(Layer):
613611
_expected_attributes = [

hls4ml/templates/vivado/nnet_utils/nnet_pooling_stream.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ void compute_pool_buffer_2d(
169169
static int sX = 0; // stride X
170170
static int sY = 0; // stride Y
171171

172-
typename data_T::value_type pool_window[CONFIG_T::pool_height * CONFIG_T::pool_width];
172+
typename CONFIG_T::accum_t pool_window[CONFIG_T::pool_height * CONFIG_T::pool_width];
173173
#pragma HLS ARRAY_PARTITION variable=pool_window complete
174174

175175
static typename data_T::value_type kernel_data[CONFIG_T::pool_height * CONFIG_T::pool_width * CONFIG_T::n_filt];
@@ -192,7 +192,7 @@ void compute_pool_buffer_2d(
192192
}
193193

194194
// Compute Pooling
195-
res_pack[i_ic] = reduce_pool<typename data_T::value_type, CONFIG_T::pool_height * CONFIG_T::pool_width, CONFIG_T>(pool_window);
195+
res_pack[i_ic] = reduce_pool<typename CONFIG_T::accum_t, CONFIG_T::pool_height * CONFIG_T::pool_width, CONFIG_T>(pool_window);
196196
}
197197

198198
// Write to output
@@ -378,7 +378,7 @@ void compute_pool_buffer_1d(
378378
static int pX = 0;
379379
static int sX = 0;
380380

381-
typename data_T::value_type pool_window[CONFIG_T::pool_width];
381+
typename CONFIG_T::accum_t pool_window[CONFIG_T::pool_width];
382382
#pragma HLS ARRAY_PARTITION variable=pool_window complete
383383

384384
static typename data_T::value_type kernel_data[CONFIG_T::pool_width * CONFIG_T::n_filt];
@@ -402,7 +402,7 @@ void compute_pool_buffer_1d(
402402
}
403403

404404
// Compute Pooling
405-
res_pack[i_ic] = reduce_pool<typename data_T::value_type, CONFIG_T::pool_width, CONFIG_T>(pool_window);
405+
res_pack[i_ic] = reduce_pool<typename CONFIG_T::accum_t, CONFIG_T::pool_width, CONFIG_T>(pool_window);
406406
}
407407

408408
// Write to output

test/pytest/test_cnn_mnist.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def keras_model(mnist_data):
5757
def test_mnist_cnn(keras_model, mnist_data, backend, io_type, strategy):
5858
x_train, y_train, x_test, y_test = mnist_data
5959

60-
hls_config = hls4ml.utils.config_from_keras_model(keras_model, granularity='name', default_precision='ap_fixed<32, 9>')
60+
hls_config = hls4ml.utils.config_from_keras_model(keras_model, granularity='name')
6161
hls_config['Model']['Strategy'] = strategy
6262
hls_config['LayerName']['softmax']['Strategy'] = 'Stable'
6363
output_dir = str(test_root_path / 'hls4mlprj_cnn_mnist_{}_{}_{}'.format(backend, io_type, strategy))

0 commit comments

Comments
 (0)