|
| 1 | +#include <faasm/faasm.h> |
| 2 | +#include <faasm/input.h> |
| 3 | + |
| 4 | +#include "tensorflow/lite/interpreter.h" |
| 5 | +#include "tensorflow/lite/kernels/register.h" |
| 6 | +#include "tensorflow/lite/model.h" |
| 7 | +#include "tensorflow/lite/optional_debug_tools.h" |
| 8 | + |
| 9 | +#define INPUT_FILE_PATH "faasm://tflite/sample_model.tflite" |
| 10 | +#define TFLITE_MINIMAL_CHECK(x) \ |
| 11 | + if (!(x)) { \ |
| 12 | + fprintf(stderr, "Error at %s:%d\n", __FILE__, __LINE__); \ |
| 13 | + exit(1); \ |
| 14 | + } |
| 15 | + |
| 16 | +/** |
| 17 | + * This is an example that is minimal to read a model |
| 18 | + * from disk to check if the lib was compiled successfully. |
| 19 | + * |
| 20 | + * Example inspired from: |
| 21 | + * https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/examples/minimal/minimal.cc |
| 22 | + */ |
| 23 | + |
| 24 | +int main() |
| 25 | +{ |
| 26 | + // Load model from file |
| 27 | + std::unique_ptr<tflite::FlatBufferModel> model = |
| 28 | + tflite::FlatBufferModel::BuildFromFile(INPUT_FILE_PATH); |
| 29 | + TFLITE_MINIMAL_CHECK(model != nullptr); |
| 30 | + |
| 31 | + // Build the interpreter with the InterpreterBuilder. |
| 32 | + tflite::ops::builtin::BuiltinOpResolver resolver; |
| 33 | + tflite::InterpreterBuilder builder(*model, resolver); |
| 34 | + std::unique_ptr<tflite::Interpreter> interpreter; |
| 35 | + builder(&interpreter); |
| 36 | + TFLITE_MINIMAL_CHECK(interpreter != nullptr); |
| 37 | + |
| 38 | + // Allocate tensor buffers. |
| 39 | + TFLITE_MINIMAL_CHECK(interpreter->AllocateTensors() == kTfLiteOk); |
| 40 | + printf("=== Pre-invoke Interpreter State ===\n"); |
| 41 | + tflite::PrintInterpreterState(interpreter.get()); |
| 42 | + |
| 43 | + return 0; |
| 44 | +} |
0 commit comments