Skip to content

Commit b4aa86c

Browse files
committed
first commit
0 parents  commit b4aa86c

File tree

142 files changed

+41438
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

142 files changed

+41438
-0
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
cmake-build-debug
2+
.idea

examples/SineExample/SineExample.ino

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
#include <EloquentTinyML.h>
2+
#include "sine_model.h"
3+
4+
5+
Eloquent::TinyML::TinyML<1, 1, 2048> ml(sine_model_quantized_tflite);
6+
7+
8+
void setup() {
9+
Serial.begin(115200);
10+
}
11+
12+
void loop() {
13+
float input[1] = {random(10) > 5 ? 3.14/2 : 0};
14+
float output = ml.predict(input);
15+
16+
Serial.println(output);
17+
delay(1000);
18+
}

examples/SineExample/sine_model.h

Lines changed: 225 additions & 0 deletions
Large diffs are not rendered by default.

library.json

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"name": "EloquentTinyML",
3+
"keywords": "tinyml",
4+
"description": "Eloquent interface to Tensorflow Lite for Microcontrollers",
5+
"repository": {
6+
"type": "git",
7+
"url": "https://github.com/eloquentarduino/EloquentTinyML"
8+
},
9+
"version": "0.0.1",
10+
"authors": {
11+
"name": "Simone Salerno",
12+
"url": "https://github.com/eloquentarduino"
13+
},
14+
"frameworks": "arduino",
15+
"platforms": "*"
16+
}

library.properties

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
name=EloquentTinyML
2+
version=0.0.1
3+
author=Simone Salerno <github.com/eloquentarduino>
4+
maintainer=Simone Salerno <github.com/eloquentarduino>
5+
sentence=An eloquent interface to Tensorflow Lite for Microcontrollers
6+
paragraph=
7+
category=Other
8+
url=https://github.com/eloquentarduino/EloquentTinyML
9+
architectures=*

src/EloquentTinyML.h

Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
#pragma once
2+
3+
#include <Arduino.h>
4+
#include <math.h>
5+
#include "tensorflow/lite/version.h"
6+
#include "tensorflow/lite/micro/kernels/all_ops_resolver.h"
7+
#include "tensorflow/lite/micro/micro_error_reporter.h"
8+
#include "tensorflow/lite/micro/micro_interpreter.h"
9+
10+
11+
namespace Eloquent {
12+
namespace TinyML {
13+
14+
/**
15+
* Eloquent interface to Tensorflow Lite for Microcontrollers
16+
*
17+
* @tparam inputSize
18+
* @tparam outputSize
19+
* @tparam tensorArenaSize how much memory to allocate to the tensors
20+
*/
21+
template<size_t inputSize, size_t outputSize, size_t tensorArenaSize>
22+
class TinyML {
23+
public:
24+
/**
25+
* Contructor
26+
* @param modelData a model as exported by tinymlgen
27+
*/
28+
TinyML(unsigned char *modelData) {
29+
static tflite::MicroErrorReporter microReporter;
30+
static tflite::ops::micro::AllOpsResolver resolver;
31+
32+
reporter = &microReporter;
33+
model = tflite::GetModel(modelData);
34+
35+
// assert model version and runtime version match
36+
if (model->version() != TFLITE_SCHEMA_VERSION) {
37+
failed = true;
38+
reporter->Report(
39+
"Model provided is schema version %d not equal "
40+
"to supported version %d.",
41+
model->version(), TFLITE_SCHEMA_VERSION);
42+
43+
return;
44+
}
45+
46+
static tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, reporter);
47+
48+
if (interpreter.AllocateTensors() != kTfLiteOk) {
49+
failed = true;
50+
reporter->Report("AllocateTensors() failed");
51+
return;
52+
}
53+
54+
input = interpreter.input(0);
55+
output = interpreter.output(0);
56+
this->interpreter = &interpreter;
57+
}
58+
59+
/**
60+
* Test if the initialization completed fine
61+
*/
62+
bool initialized() {
63+
return !failed;
64+
}
65+
66+
/**
67+
* Run inference
68+
* @return output[0], so you can use it directly if it's the only output
69+
*/
70+
float predict(float *input, float *output = NULL) {
71+
// abort if initialization failed
72+
if (failed)
73+
return sqrt(-1);
74+
75+
// copy input
76+
for (size_t i = 0; i < inputSize; i++)
77+
this->input->data.f[i] = input[i];
78+
79+
if (interpreter->Invoke() != kTfLiteOk) {
80+
reporter->Report("Inference failed");
81+
82+
return sqrt(-1);
83+
}
84+
85+
// copy output
86+
if (output != NULL) {
87+
for (size_t i = 0; i < outputSize; i++)
88+
output[i] = this->output->data.f[0];
89+
}
90+
91+
return this->output->data.f[0];
92+
}
93+
94+
protected:
95+
bool failed;
96+
uint8_t tensorArena[tensorArenaSize];
97+
tflite::ErrorReporter *reporter;
98+
tflite::MicroInterpreter *interpreter;
99+
TfLiteTensor *input;
100+
TfLiteTensor *output;
101+
const tflite::Model *model;
102+
};
103+
}
104+
}

0 commit comments

Comments
 (0)