Skip to content

Commit e50c103

Browse files
committed
samples: tflite-micro: Use FVP for test validation
What is changed? - Replaced printk with printf in the sample Why do we need this change? - Running the sample with fvp just prints the below output and not the complete logs that we expect: `sender 0: Sending inference` We get the expected output with CONFIG_LOG_IMMEDIATE=y but, its not ideal. printf works without the config and plus it looks a better fit in the current sample than printk. This change is validate using `mps3/corstone300/fvp`. Signed-off-by: Sudan Landge <[email protected]>
1 parent 15c5954 commit e50c103

File tree

2 files changed

+22
-22
lines changed

2 files changed

+22
-22
lines changed

samples/modules/tflite-micro/tflm_ethosu/src/inference_process.cpp

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ bool copyOutput(const TfLiteTensor &src, InferenceProcess::DataPtr &dst)
2828
}
2929

3030
if (src.bytes > dst.size) {
31-
printk("Tensor size mismatch (bytes): actual=%d, expected%d.\n", src.bytes,
31+
printf("Tensor size mismatch (bytes): actual=%d, expected%d.\n", src.bytes,
3232
dst.size);
3333
return true;
3434
}
@@ -112,7 +112,7 @@ bool InferenceProcess::runJob(InferenceJob &job)
112112
/* Get model handle and verify that the version is correct */
113113
const tflite::Model *model = ::tflite::GetModel(job.networkModel.data);
114114
if (model->version() != TFLITE_SCHEMA_VERSION) {
115-
printk("Model schema version unsupported: version=%" PRIu32 ", supported=%d.\n",
115+
printf("Model schema version unsupported: version=%" PRIu32 ", supported=%d.\n",
116116
model->version(), TFLITE_SCHEMA_VERSION);
117117
return true;
118118
}
@@ -126,12 +126,12 @@ bool InferenceProcess::runJob(InferenceJob &job)
126126
/* Allocate tensors */
127127
TfLiteStatus allocate_status = interpreter.AllocateTensors();
128128
if (allocate_status != kTfLiteOk) {
129-
printk("Failed to allocate tensors for inference. job=%p\n", &job);
129+
printf("Failed to allocate tensors for inference. job=%p\n", &job);
130130
return true;
131131
}
132132

133133
if (job.input.size() != interpreter.inputs_size()) {
134-
printk("Number of job and network inputs do not match. input=%zu, network=%zu\n",
134+
printf("Number of job and network inputs do not match. input=%zu, network=%zu\n",
135135
job.input.size(), interpreter.inputs_size());
136136
return true;
137137
}
@@ -142,7 +142,7 @@ bool InferenceProcess::runJob(InferenceJob &job)
142142
const TfLiteTensor *tensor = interpreter.input(i);
143143

144144
if (input.size != tensor->bytes) {
145-
printk("Input tensor size mismatch. index=%zu, input=%zu, network=%u\n", i,
145+
printf("Input tensor size mismatch. index=%zu, input=%zu, network=%u\n", i,
146146
input.size, tensor->bytes);
147147
return true;
148148
}
@@ -154,14 +154,14 @@ bool InferenceProcess::runJob(InferenceJob &job)
154154
/* Run the inference */
155155
TfLiteStatus invoke_status = interpreter.Invoke();
156156
if (invoke_status != kTfLiteOk) {
157-
printk("Invoke failed for inference. job=%s\n", job.name.c_str());
157+
printf("Invoke failed for inference. job=%s\n", job.name.c_str());
158158
return true;
159159
}
160160

161161
/* Copy output data */
162162
if (job.output.size() > 0) {
163163
if (interpreter.outputs_size() != job.output.size()) {
164-
printk("Number of job and network outputs do not match. job=%zu, network=%u\n",
164+
printf("Number of job and network outputs do not match. job=%zu, network=%u\n",
165165
job.output.size(), interpreter.outputs_size());
166166
return true;
167167
}
@@ -175,7 +175,7 @@ bool InferenceProcess::runJob(InferenceJob &job)
175175

176176
if (job.expectedOutput.size() > 0) {
177177
if (job.expectedOutput.size() != interpreter.outputs_size()) {
178-
printk("Number of job and network expected outputs do not match. job=%zu, network=%zu\n",
178+
printf("Number of job and network expected outputs do not match. job=%zu, network=%zu\n",
179179
job.expectedOutput.size(), interpreter.outputs_size());
180180
return true;
181181
}
@@ -185,15 +185,15 @@ bool InferenceProcess::runJob(InferenceJob &job)
185185
const TfLiteTensor *output = interpreter.output(i);
186186

187187
if (expected.size != output->bytes) {
188-
printk("Expected output tensor size mismatch. index=%u, expected=%zu, network=%zu\n",
188+
printf("Expected output tensor size mismatch. index=%u, expected=%zu, network=%zu\n",
189189
i, expected.size, output->bytes);
190190
return true;
191191
}
192192

193193
for (unsigned int j = 0; j < output->bytes; ++j) {
194194
if (output->data.uint8[j] !=
195195
static_cast<uint8_t *>(expected.data)[j]) {
196-
printk("Expected output tensor data mismatch. index=%u, offset=%u, expected=%02x, network=%02x\n",
196+
printf("Expected output tensor data mismatch. index=%u, offset=%u, expected=%02x, network=%02x\n",
197197
i, j, static_cast<uint8_t *>(expected.data)[j],
198198
output->data.uint8[j]);
199199
return true;

samples/modules/tflite-micro/tflm_ethosu/src/main.cpp

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ void *allocateHeap(const size_t size)
108108
uint8_t *buf = static_cast<uint8_t *>(k_malloc(size));
109109

110110
if ((buf == nullptr) || (heap == nullptr)) {
111-
printk("Heap allocation failed. heap=%p, buf=%p, size=%zu\n", heap, buf, size);
111+
printf("Heap allocation failed. heap=%p, buf=%p, size=%zu\n", heap, buf, size);
112112
exit(1);
113113
}
114114

@@ -133,17 +133,17 @@ void inferenceProcessTask(void *_name, void *heap, void *_params)
133133
xInferenceJob *job =
134134
static_cast<xInferenceJob *>(k_queue_get(params->queueHandle, Z_FOREVER));
135135

136-
printk("%s: Received inference job. job=%p\n", name->c_str(), job);
136+
printf("%s: Received inference job. job=%p\n", name->c_str(), job);
137137

138138
/* Run inference */
139139
job->status = inferenceProcess.runJob(*job);
140140

141-
printk("%s: Sending inference response. job=%p\n", name->c_str(), job);
141+
printf("%s: Sending inference response. job=%p\n", name->c_str(), job);
142142

143143
/* Return inference message */
144144
int ret = k_queue_alloc_append(job->responseQueue, job);
145145
if (0 != ret) {
146-
printk("%s: Failed to send message\n", name->c_str());
146+
printf("%s: Failed to send message\n", name->c_str());
147147
exit(1);
148148
}
149149
}
@@ -177,13 +177,13 @@ void inferenceSenderTask(void *_name, void *heap, void *_queue)
177177
{ DataPtr(expectedOutputData, sizeof(expectedOutputData)) },
178178
&senderQueue);
179179

180-
printk("%s: Sending inference. job=%p, name=%s\n", name->c_str(), &job,
180+
printf("%s: Sending inference. job=%p, name=%s\n", name->c_str(), &job,
181181
job.name.c_str());
182182

183183
/* Queue job */
184184
ret = k_queue_alloc_append(inferenceQueue, &job);
185185
if (0 != ret) {
186-
printk("%s: Failed to send message\n", name->c_str());
186+
printf("%s: Failed to send message\n", name->c_str());
187187
exit(1);
188188
}
189189
}
@@ -193,7 +193,7 @@ void inferenceSenderTask(void *_name, void *heap, void *_queue)
193193
xInferenceJob *job =
194194
static_cast<xInferenceJob *>(k_queue_get(&senderQueue, Z_FOREVER));
195195

196-
printk("%s: Received job response. job=%p, status=%u\n", name->c_str(), job,
196+
printf("%s: Received job response. job=%p, status=%u\n", name->c_str(), job,
197197
job->status);
198198

199199
totalCompletedJobs++;
@@ -229,7 +229,7 @@ int main()
229229
const size_t stackSize = 2048;
230230
k_thread_stack_t *stack = static_cast<k_thread_stack_t *>(k_malloc(stackSize));
231231
if (stack == nullptr) {
232-
printk("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
232+
printf("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
233233
exit(1);
234234
}
235235

@@ -239,7 +239,7 @@ int main()
239239
thread.id = k_thread_create(&thread.thread, stack, stackSize, inferenceSenderTask,
240240
name, heapPtr, &inferenceQueue, 3, 0, K_FOREVER);
241241
if (thread.id == 0) {
242-
printk("Failed to create 'inferenceSenderTask%i'\n", n);
242+
printf("Failed to create 'inferenceSenderTask%i'\n", n);
243243
exit(1);
244244
}
245245

@@ -252,7 +252,7 @@ int main()
252252
const size_t stackSize = 8192;
253253
k_thread_stack_t *stack = static_cast<k_thread_stack_t *>(k_malloc(stackSize));
254254
if (stack == nullptr) {
255-
printk("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
255+
printf("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
256256
exit(1);
257257
}
258258

@@ -265,7 +265,7 @@ int main()
265265
thread.id = k_thread_create(&thread.thread, stack, stackSize, inferenceProcessTask,
266266
name, heapPtr, &taskParam, 2, 0, K_FOREVER);
267267
if (thread.id == 0) {
268-
printk("Failed to create 'inferenceProcessTask%i'\n", n);
268+
printf("Failed to create 'inferenceProcessTask%i'\n", n);
269269
exit(1);
270270
}
271271

@@ -283,7 +283,7 @@ int main()
283283
/* Safety belt */
284284
k_thread_suspend(k_current_get());
285285

286-
printk("Zephyr application failed to initialise \n");
286+
printf("Zephyr application failed to initialise \n");
287287

288288
return 1;
289289
}

0 commit comments

Comments
 (0)