Skip to content

Commit 481a142

Browse files
committed
support activation layers
1 parent c35d4b6 commit 481a142

File tree

2 files changed

+114
-0
lines changed

2 files changed

+114
-0
lines changed

hls4ml/converters/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from hls4ml.converters.keras_to_hls import get_supported_keras_layers # noqa: F401
1111
from hls4ml.converters.keras_to_hls import parse_keras_model # noqa: F401
1212
from hls4ml.converters.keras_to_hls import keras_to_hls, register_keras_layer_handler
13+
from hls4ml.converters.keras_v3_to_hls import parse_keras_v3_model # noqa: F401
1314

1415
# from hls4ml.converters.pytorch_to_hls import parse_pytorch_model # noqa: F401
1516
from hls4ml.model import ModelGraph

hls4ml/converters/keras_v3/core.py

Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import inspect
12
import typing
23
from typing import Any, Sequence
34

@@ -89,3 +90,115 @@ def handle(
8990

9091
config['class_name'] = class_name
9192
return config
93+
94+
95+
@register
96+
class KV3ActivationHandler(KerasV3LayerHandler):
97+
handles = ('keras.src.layers.activations.activation.Activation',)
98+
99+
def handle(
100+
self,
101+
layer: 'keras.layers.Activation',
102+
in_tensors: Sequence['KerasTensor'],
103+
out_tensors: Sequence['KerasTensor'],
104+
):
105+
import keras
106+
107+
config = {}
108+
config.update(self.default_config)
109+
110+
activation = getattr(layer, 'activation', keras.activations.linear)
111+
match activation:
112+
case keras.activations.softmax:
113+
class_name = 'Softmax'
114+
config['axis'] = -1
115+
case keras.activations.hard_sigmoid:
116+
class_name = 'HardActivation'
117+
case keras.activations.leaky_relu:
118+
class_name = 'LeakyReLU'
119+
signature = inspect.signature(keras.activations.leaky_relu)
120+
config['activ_param'] = signature.parameters['negative_slope'].default
121+
case keras.activations.elu:
122+
class_name = 'ELU'
123+
signature = inspect.signature(keras.activations.elu)
124+
config['activ_param'] = signature.parameters['alpha'].default
125+
case _:
126+
class_name = 'Activation'
127+
128+
config['activation'] = activation.__name__
129+
config['class_name'] = class_name
130+
return (config,)
131+
132+
133+
@register
134+
class KV3ReLUHandler(KerasV3LayerHandler):
135+
handles = (
136+
'keras.src.layers.activations.leaky_relu.LeakyReLU',
137+
'keras.src.layers.activations.prelu.PReLU',
138+
'keras.src.layers.activations.relu.ReLU',
139+
)
140+
141+
def handle(
142+
self,
143+
layer: 'keras.layers.ReLU',
144+
in_tensors: Sequence['KerasTensor'],
145+
out_tensors: Sequence['KerasTensor'],
146+
):
147+
config = {}
148+
config.update(self.default_config)
149+
150+
if layer.__class__.__name__ == 'ReLU':
151+
config['class_name'] = 'Activation'
152+
config['activation'] = 'relu'
153+
return config
154+
155+
if layer.__class__.__name__ == 'PReLU':
156+
config['class_name'] = 'PReLU'
157+
config['param_data'] = np.array(layer.alpha)
158+
config['activation'] = 'prelu'
159+
else:
160+
config['class_name'] = 'LeakyReLU'
161+
config['activ_param'] = float(layer.negative_slope)
162+
config['activation'] = 'leaky_relu'
163+
164+
return (config,)
165+
166+
167+
@register
168+
class KV3SoftmaxHandler(KerasV3LayerHandler):
169+
handles = ('keras.src.layers.activations.softmax.Softmax',)
170+
171+
def handle(
172+
self,
173+
layer: 'keras.layers.Softmax',
174+
in_tensors: Sequence['KerasTensor'],
175+
out_tensors: Sequence['KerasTensor'],
176+
):
177+
config = {}
178+
config.update(self.default_config)
179+
180+
config['class_name'] = 'Softmax'
181+
config['axis'] = layer.axis
182+
config['activation'] = 'softmax'
183+
184+
return (config,)
185+
186+
187+
@register
188+
class KV3HardActivationHandler(KerasV3LayerHandler):
189+
handles = ('keras.src.layers.activations.elu.ELU',)
190+
191+
def handle(
192+
self,
193+
layer: 'keras.layers.ELU',
194+
in_tensors: Sequence['KerasTensor'],
195+
out_tensors: Sequence['KerasTensor'],
196+
):
197+
config = {}
198+
config.update(self.default_config)
199+
200+
config['class_name'] = 'ELU'
201+
config['activ_param'] = float(layer.alpha)
202+
config['activation'] = 'elu'
203+
204+
return (config,)

0 commit comments

Comments
 (0)