@@ -85,6 +85,11 @@ def dataset_generator2(self):
85
85
for x , y in zip (self .x_train2 , self .y_train2 ):
86
86
yield np .array ([x ]), np .array ([y ])
87
87
88
+ def _batch (self , dims , batch_size ):
89
+ if dims [0 ] is None :
90
+ dims [0 ] = batch_size
91
+ return dims
92
+
88
93
def end_to_end_testing (self , original_model , clusters_check = None ):
89
94
"""Test End to End clustering."""
90
95
@@ -225,6 +230,79 @@ def testSparsityIsPreservedDuringTraining(self):
225
230
nr_of_unique_weights_after ,
226
231
clustering_params ["number_of_clusters" ])
227
232
233
+ @keras_parameterized .run_all_keras_modes (always_skip_v1 = True )
234
+ def testEndToEndSequential (self ):
235
+ """Test End to End clustering - sequential model."""
236
+ original_model = keras .Sequential ([
237
+ layers .Dense (5 , input_shape = (5 ,)),
238
+ layers .Dense (5 ),
239
+ ])
240
+
241
+ def clusters_check (stripped_model ):
242
+ # dense layer
243
+ weights_as_list = stripped_model .get_weights ()[0 ].reshape (- 1 ,).tolist ()
244
+ unique_weights = set (weights_as_list )
245
+ self .assertLessEqual (
246
+ len (unique_weights ), self .params ["number_of_clusters" ])
247
+
248
+ self .end_to_end_testing (original_model , clusters_check )
249
+
250
+ @keras_parameterized .run_all_keras_modes (always_skip_v1 = True )
251
+ def testEndToEndConv1DAndConv1DTranspose (self ):
252
+ """Test End to End clustering - model with Conv1D and Conv1DTranspose."""
253
+ inp = layers .Input (batch_shape = (1 , 16 ))
254
+ x = layers .Conv1D (10 , 16 , 4 , padding = "valid" , use_bias = False )(tf .expand_dims (inp , axis = - 1 ))
255
+ y = layers .Conv1DTranspose (1 , 16 , 4 , padding = "valid" , use_bias = False )(x )
256
+ model = keras .models .Model (inputs = inp , outputs = [y ])
257
+
258
+ def apply_clustering (layer ):
259
+ if isinstance (layer , keras .layers .Conv1D ) or \
260
+ isinstance (layer , keras .layers .Conv1DTranspose ):
261
+ return cluster .cluster_weights (layer , ** self .params )
262
+ return layer
263
+
264
+ model_to_cluster = keras .models .clone_model (
265
+ model ,
266
+ clone_function = apply_clustering ,
267
+ )
268
+
269
+ model_to_cluster .compile (
270
+ loss = keras .losses .categorical_crossentropy ,
271
+ optimizer = "adam" ,
272
+ metrics = ["accuracy" ]
273
+ )
274
+ model_to_cluster .fit (
275
+ np .random .randn (* self ._batch (model .input .get_shape ().as_list (), 16 )),
276
+ np .random .randn (* self ._batch (model .output .get_shape ().as_list (), 16 )),
277
+ steps_per_epoch = 1 )
278
+ clustered_model = cluster .strip_clustering (model_to_cluster )
279
+
280
+ def do_checks (layer , layer_name ):
281
+ self .assertEqual (layer .name , layer_name )
282
+ unique_weights = np .unique (layer .weights [0 ].numpy ().flatten ())
283
+ self .assertLessEqual (len (unique_weights ), self .params ["number_of_clusters" ])
284
+
285
+ do_checks (clustered_model .layers [2 ], 'conv1d' )
286
+ do_checks (clustered_model .layers [3 ], 'conv1d_transpose' )
287
+
288
+ def testStripClusteringSequentialModelWithRegulariser (self ):
289
+ """
290
+ Verifies that stripping the clustering wrappers from a sequential model
291
+ produces the expected config.
292
+ """
293
+ original_model = keras .Sequential ([
294
+ layers .Dense (5 , input_shape = (5 ,)),
295
+ layers .Dense (5 , kernel_regularizer = tf .keras .regularizers .L1 (0.01 )),
296
+ ])
297
+
298
+ def clusters_check (stripped_model ):
299
+ # dense layer
300
+ weights_as_list = stripped_model .get_weights ()[0 ].reshape (- 1 ,).tolist ()
301
+ unique_weights = set (weights_as_list )
302
+ self .assertLessEqual (len (unique_weights ), self .params ["number_of_clusters" ])
303
+
304
+ self .end_to_end_testing (original_model , clusters_check )
305
+
228
306
@keras_parameterized .run_all_keras_modes (always_skip_v1 = True )
229
307
def testEndToEndFunctional (self ):
230
308
"""Test End to End clustering - functional model."""
0 commit comments