Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions modules/hal_ethos_u/Kconfig
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,16 @@ config ARM_ETHOS_U65_256
bool "using Ethos-U65 with 256 macs"
config ARM_ETHOS_U65_512
bool "using Ethos-U65 with 512 macs"
config ARM_ETHOS_U85_128
bool "using Ethos-U85 with 128 macs"
config ARM_ETHOS_U85_256
bool "using Ethos-U85 with 256 macs"
config ARM_ETHOS_U85_512
bool "using Ethos-U85 with 512 macs"
config ARM_ETHOS_U85_1024
bool "using Ethos-U85 with 1024 macs"
config ARM_ETHOS_U85_2048
bool "using Ethos-U85 with 2048 macs"
endchoice
endmenu

Expand All @@ -37,6 +47,11 @@ config ARM_ETHOS_U_NPU_NAME
default "ethos-u65-128" if ARM_ETHOS_U65_128
default "ethos-u65-256" if ARM_ETHOS_U65_256
default "ethos-u65-512" if ARM_ETHOS_U65_512
default "ethos-u85-128" if ARM_ETHOS_U85_128
default "ethos-u85-256" if ARM_ETHOS_U85_256
default "ethos-u85-512" if ARM_ETHOS_U85_512
default "ethos-u85-1024" if ARM_ETHOS_U85_1024
default "ethos-u85-2048" if ARM_ETHOS_U85_2048
help
Name of the used Arm NPU

Expand Down
2 changes: 1 addition & 1 deletion samples/modules/tflite-micro/hello_world/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ the :envvar:`PATH` variable, then building and testing can be done with followin
commands.

```
$ west build -p auto -b mps3/corstone300/an547 samples/modules/tflite-micro/hello_world/ -T sample.tensorflow.helloworld.cmsis_nn
$ west build -p auto -b mps3/corstone300/fvp samples/modules/tflite-micro/hello_world/ -T sample.tensorflow.helloworld.cmsis_nn
$ FVP_Corstone_SSE-300_Ethos-U55 build/zephyr/zephyr.elf
```

Expand Down
2 changes: 1 addition & 1 deletion samples/modules/tflite-micro/hello_world/sample.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,6 @@ tests:
sample.tensorflow.helloworld.cmsis_nn:
tags: tensorflow
platform_allow:
- mps3/corstone300/an547
- mps3/corstone300/fvp
extra_configs:
- CONFIG_TENSORFLOW_LITE_MICRO_CMSIS_NN_KERNELS=y
2 changes: 1 addition & 1 deletion samples/modules/tflite-micro/tflm_ethosu/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,5 +43,5 @@ commands.

.. code-block:: bash

$ west build -b mps3/corstone300/an547 zephyr/samples/modules/tflite-micro/tflm_ethosu
$ west build -b mps3/corstone300/fvp zephyr/samples/modules/tflite-micro/tflm_ethosu
$ FVP_Corstone_SSE-300_Ethos-U55 build/zephyr/zephyr.elf
2 changes: 1 addition & 1 deletion samples/modules/tflite-micro/tflm_ethosu/sample.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ tests:
filter: dt_compat_enabled("arm,ethos-u")
build_only: true
integration_platforms:
- mps3/corstone300/an547
- mps3/corstone300/fvp
20 changes: 10 additions & 10 deletions samples/modules/tflite-micro/tflm_ethosu/src/inference_process.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ bool copyOutput(const TfLiteTensor &src, InferenceProcess::DataPtr &dst)
}

if (src.bytes > dst.size) {
printk("Tensor size mismatch (bytes): actual=%d, expected%d.\n", src.bytes,
printf("Tensor size mismatch (bytes): actual=%d, expected%d.\n", src.bytes,
dst.size);
return true;
}
Expand Down Expand Up @@ -112,7 +112,7 @@ bool InferenceProcess::runJob(InferenceJob &job)
/* Get model handle and verify that the version is correct */
const tflite::Model *model = ::tflite::GetModel(job.networkModel.data);
if (model->version() != TFLITE_SCHEMA_VERSION) {
printk("Model schema version unsupported: version=%" PRIu32 ", supported=%d.\n",
printf("Model schema version unsupported: version=%" PRIu32 ", supported=%d.\n",
model->version(), TFLITE_SCHEMA_VERSION);
return true;
}
Expand All @@ -126,12 +126,12 @@ bool InferenceProcess::runJob(InferenceJob &job)
/* Allocate tensors */
TfLiteStatus allocate_status = interpreter.AllocateTensors();
if (allocate_status != kTfLiteOk) {
printk("Failed to allocate tensors for inference. job=%p\n", &job);
printf("Failed to allocate tensors for inference. job=%p\n", &job);
return true;
}

if (job.input.size() != interpreter.inputs_size()) {
printk("Number of job and network inputs do not match. input=%zu, network=%zu\n",
printf("Number of job and network inputs do not match. input=%zu, network=%zu\n",
job.input.size(), interpreter.inputs_size());
return true;
}
Expand All @@ -142,7 +142,7 @@ bool InferenceProcess::runJob(InferenceJob &job)
const TfLiteTensor *tensor = interpreter.input(i);

if (input.size != tensor->bytes) {
printk("Input tensor size mismatch. index=%zu, input=%zu, network=%u\n", i,
printf("Input tensor size mismatch. index=%zu, input=%zu, network=%u\n", i,
input.size, tensor->bytes);
return true;
}
Expand All @@ -154,14 +154,14 @@ bool InferenceProcess::runJob(InferenceJob &job)
/* Run the inference */
TfLiteStatus invoke_status = interpreter.Invoke();
if (invoke_status != kTfLiteOk) {
printk("Invoke failed for inference. job=%s\n", job.name.c_str());
printf("Invoke failed for inference. job=%s\n", job.name.c_str());
return true;
}

/* Copy output data */
if (job.output.size() > 0) {
if (interpreter.outputs_size() != job.output.size()) {
printk("Number of job and network outputs do not match. job=%zu, network=%u\n",
printf("Number of job and network outputs do not match. job=%zu, network=%u\n",
job.output.size(), interpreter.outputs_size());
return true;
}
Expand All @@ -175,7 +175,7 @@ bool InferenceProcess::runJob(InferenceJob &job)

if (job.expectedOutput.size() > 0) {
if (job.expectedOutput.size() != interpreter.outputs_size()) {
printk("Number of job and network expected outputs do not match. job=%zu, network=%zu\n",
printf("Number of job and network expected outputs do not match. job=%zu, network=%zu\n",
job.expectedOutput.size(), interpreter.outputs_size());
return true;
}
Expand All @@ -185,15 +185,15 @@ bool InferenceProcess::runJob(InferenceJob &job)
const TfLiteTensor *output = interpreter.output(i);

if (expected.size != output->bytes) {
printk("Expected output tensor size mismatch. index=%u, expected=%zu, network=%zu\n",
printf("Expected output tensor size mismatch. index=%u, expected=%zu, network=%zu\n",
i, expected.size, output->bytes);
return true;
}

for (unsigned int j = 0; j < output->bytes; ++j) {
if (output->data.uint8[j] !=
static_cast<uint8_t *>(expected.data)[j]) {
printk("Expected output tensor data mismatch. index=%u, offset=%u, expected=%02x, network=%02x\n",
printf("Expected output tensor data mismatch. index=%u, offset=%u, expected=%02x, network=%02x\n",
i, j, static_cast<uint8_t *>(expected.data)[j],
output->data.uint8[j]);
return true;
Expand Down
24 changes: 12 additions & 12 deletions samples/modules/tflite-micro/tflm_ethosu/src/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ void *allocateHeap(const size_t size)
uint8_t *buf = static_cast<uint8_t *>(k_malloc(size));

if ((buf == nullptr) || (heap == nullptr)) {
printk("Heap allocation failed. heap=%p, buf=%p, size=%zu\n", heap, buf, size);
printf("Heap allocation failed. heap=%p, buf=%p, size=%zu\n", heap, buf, size);
exit(1);
}

Expand All @@ -133,17 +133,17 @@ void inferenceProcessTask(void *_name, void *heap, void *_params)
xInferenceJob *job =
static_cast<xInferenceJob *>(k_queue_get(params->queueHandle, Z_FOREVER));

printk("%s: Received inference job. job=%p\n", name->c_str(), job);
printf("%s: Received inference job. job=%p\n", name->c_str(), job);

/* Run inference */
job->status = inferenceProcess.runJob(*job);

printk("%s: Sending inference response. job=%p\n", name->c_str(), job);
printf("%s: Sending inference response. job=%p\n", name->c_str(), job);

/* Return inference message */
int ret = k_queue_alloc_append(job->responseQueue, job);
if (0 != ret) {
printk("%s: Failed to send message\n", name->c_str());
printf("%s: Failed to send message\n", name->c_str());
exit(1);
}
}
Expand Down Expand Up @@ -177,13 +177,13 @@ void inferenceSenderTask(void *_name, void *heap, void *_queue)
{ DataPtr(expectedOutputData, sizeof(expectedOutputData)) },
&senderQueue);

printk("%s: Sending inference. job=%p, name=%s\n", name->c_str(), &job,
printf("%s: Sending inference. job=%p, name=%s\n", name->c_str(), &job,
job.name.c_str());

/* Queue job */
ret = k_queue_alloc_append(inferenceQueue, &job);
if (0 != ret) {
printk("%s: Failed to send message\n", name->c_str());
printf("%s: Failed to send message\n", name->c_str());
exit(1);
}
}
Expand All @@ -193,7 +193,7 @@ void inferenceSenderTask(void *_name, void *heap, void *_queue)
xInferenceJob *job =
static_cast<xInferenceJob *>(k_queue_get(&senderQueue, Z_FOREVER));

printk("%s: Received job response. job=%p, status=%u\n", name->c_str(), job,
printf("%s: Received job response. job=%p, status=%u\n", name->c_str(), job,
job->status);

totalCompletedJobs++;
Expand Down Expand Up @@ -229,7 +229,7 @@ int main()
const size_t stackSize = 2048;
k_thread_stack_t *stack = static_cast<k_thread_stack_t *>(k_malloc(stackSize));
if (stack == nullptr) {
printk("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
printf("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
exit(1);
}

Expand All @@ -239,7 +239,7 @@ int main()
thread.id = k_thread_create(&thread.thread, stack, stackSize, inferenceSenderTask,
name, heapPtr, &inferenceQueue, 3, 0, K_FOREVER);
if (thread.id == 0) {
printk("Failed to create 'inferenceSenderTask%i'\n", n);
printf("Failed to create 'inferenceSenderTask%i'\n", n);
exit(1);
}

Expand All @@ -252,7 +252,7 @@ int main()
const size_t stackSize = 8192;
k_thread_stack_t *stack = static_cast<k_thread_stack_t *>(k_malloc(stackSize));
if (stack == nullptr) {
printk("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
printf("Failed to allocate stack to 'inferenceSenderTask%i'\n", n);
exit(1);
}

Expand All @@ -265,7 +265,7 @@ int main()
thread.id = k_thread_create(&thread.thread, stack, stackSize, inferenceProcessTask,
name, heapPtr, &taskParam, 2, 0, K_FOREVER);
if (thread.id == 0) {
printk("Failed to create 'inferenceProcessTask%i'\n", n);
printf("Failed to create 'inferenceProcessTask%i'\n", n);
exit(1);
}

Expand All @@ -283,7 +283,7 @@ int main()
/* Safety belt */
k_thread_suspend(k_current_get());

printk("Zephyr application failed to initialise \n");
printf("Zephyr application failed to initialise \n");

return 1;
}
2 changes: 1 addition & 1 deletion west.yml
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ manifest:
groups:
- hal
- name: hal_ethos_u
revision: 8e2cf756b474eff9a32a9bdf1775d9620f1eadcf
revision: 50ddffca1cc700112f25ad9bc077915a0355ee5d
path: modules/hal/ethos_u
groups:
- hal
Expand Down
Loading