@@ -206,20 +206,19 @@ def create_tensorflow_build_flags(self, models):
206206 for model in models :
207207 if self .is_tensorflow (model ["classifier_config" ]["classifier" ]):
208208 if self .nn_inference_engine == "nnom" :
209- #TODO: NNoM
209+ # TODO: NNoM
210210 pass
211211
212212 if self .nn_inference_engine == "tf_micro" :
213213 return tf_micro_cflags
214214
215-
216215 return ""
217216
218217 def create_kb_model_tf_micro_binary (self , models ):
219218 for model in models :
220219 if self .is_tensorflow (model ["classifier_config" ]["classifier" ]):
221220 if self .nn_inference_engine == "nnom" :
222- #TODO: NNoM
221+ # TODO: NNoM
223222 pass
224223
225224 if self .nn_inference_engine == "tf_micro" :
@@ -233,6 +232,9 @@ def create_sml_classification_result_info(self, models_data):
233232 model_fill ["TensorFlow Lite for Microcontrollers" ] = (
234233 "{\n \t tf_micro_model_results_object(kb_models[model_index].classifier_id, (model_results_t *)model_results);\n }"
235234 )
235+ model_fill ["Neural Network" ] = (
236+ "{\n \t tf_micro_model_results_object(kb_models[model_index].classifier_id, (model_results_t *)model_results);\n }"
237+ )
236238
237239 model_fill ["Decision Tree Ensemble" ] = (
238240 "{\n \t tree_ensemble_model_results_object(kb_models[model_index].classifier_id, (model_results_t *)model_results);\n }"
@@ -256,6 +258,20 @@ def create_sml_classification_result_print_info(self, models_data):
256258
257259 """
258260
261+ model_fill [
262+ "Neural Network"
263+ ] = """{
264+ sml_classification_result_info(model_index, &model_result);\n
265+ pbuf += sprintf(pbuf, ",\\ "ModelDebug\\ ":[");
266+ for (int32_t i=0; i<model_result.num_outputs; i++)
267+ {
268+ pbuf += sprintf(pbuf, "%f, ", model_result.output_tensor[i]);
269+ }
270+ pbuf += sprintf(pbuf, "]");
271+ }
272+
273+ """
274+
259275 return self .create_case_fill_template_classifier_type (models_data , model_fill )
260276
261277 def create_classifier_structures (self , classifier_types , kb_models ):
@@ -264,16 +280,17 @@ def create_classifier_structures(self, classifier_types, kb_models):
264280 formated_classifier_types = [
265281 x .lower ().replace (" " , "_" ) for x in classifier_types
266282 ]
267-
283+
268284 def is_neural_netwrok (classifier_type ):
269- return (classifier_type == "tensorflow_lite_for_microcontrollers" or classifier_type == 'neural_network' )
285+ return (
286+ classifier_type == "tensorflow_lite_for_microcontrollers"
287+ or classifier_type == "neural_network"
288+ )
270289
271290 formated_classifier_types = [
272291 x if not is_neural_netwrok (x ) else self .nn_inference_engine
273292 for x in formated_classifier_types
274293 ]
275-
276-
277294
278295 logger .info (
279296 {
@@ -313,12 +330,18 @@ def get_classifier_type_map(self, classifier_config):
313330 return 3
314331 elif classifier_type in ["Bonsai" ]:
315332 return 4
316- elif classifier_type in ["TF Micro" , "TensorFlow Lite for Microcontrollers" , "Neural Network" ]:
333+ elif classifier_type in [
334+ "TF Micro" ,
335+ "TensorFlow Lite for Microcontrollers" ,
336+ "Neural Network" ,
337+ ]:
317338 return 5
318339 elif classifier_type in ["Linear Regression" ]:
319340 return 6
320341 else :
321- raise Exception (f"{ classifier_type } not supported Classifier Type for code generation" )
342+ raise Exception (
343+ f"{ classifier_type } not supported Classifier Type for code generation"
344+ )
322345
323346 def create_debug_flagging (self ):
324347 ret = []
@@ -354,7 +377,7 @@ def create_kb_classifier_headers(self, classifier_types):
354377 output .append ('#include "bonsai_trained_models.h"' )
355378
356379 if self .is_tensorflow (classifier_types ):
357- if self .nn_inference_engine == "nnom" :
380+ if self .nn_inference_engine == "nnom" :
358381 output .append ('#include "nnom_trained_models.h"' )
359382 output .append ('#include "nnom_middleware.h"' )
360383
@@ -385,7 +408,7 @@ def create_kb_classifier_header_calls_only(self, classifier_types):
385408
386409 if self .is_tensorflow (classifier_types ):
387410 if self .nn_inference_engine == "nnom" :
388- #TODO: NNoM
411+ # TODO: NNoM
389412 pass
390413
391414 if self .nn_inference_engine == "tf_micro" :
@@ -424,7 +447,7 @@ def create_kb_classifier_init(self, classifier_types):
424447
425448 if self .is_tensorflow (classifier_types ):
426449 if self .nn_inference_engine == "nnom" :
427- output .append (c_line (1 , "nnom_init(nnom_classifier_rows, 0);" ))
450+ output .append (c_line (1 , "nnom_init(nnom_classifier_rows, 0);" ))
428451
429452 if self .nn_inference_engine == "tf_micro" :
430453 output .append (c_line (1 , "tf_micro_init(tf_micro_classifier_rows, 0);" ))
@@ -482,15 +505,15 @@ def create_classifier_calls(self, models_data):
482505 elif model ["classifier_config" ]["classifier" ] in [
483506 "TF Micro" ,
484507 "TensorFlow Lite for Microcontrollers" ,
485- "Neural Network"
508+ "Neural Network" ,
486509 ]:
487- if self .nn_inference_engine == ' tf_micro' :
510+ if self .nn_inference_engine == " tf_micro" :
488511 output_str += c_line (
489512 1 ,
490513 "ret = tf_micro_simple_submit(kb_model->classifier_id, kb_model->pfeature_vector, kb_model->pmodel_results);" ,
491514 )
492- if self .nn_inference_engine == ' nnom' :
493- output_str += c_line (
515+ if self .nn_inference_engine == " nnom" :
516+ output_str += c_line (
494517 1 ,
495518 "ret = nnom_simple_submit(kb_model->classifier_id, kb_model->pfeature_vector, kb_model->pmodel_results);" ,
496519 )
0 commit comments