Skip to content

Commit 14ced7c

Browse files
Add onnxruntime as wasi-nn backend (#4485)
* Add onnxruntime as wasi-nn backend * remove global context * put checks under the lock * tensor type will not support legacy wasi-nn abi * Manually set the imported target with name space
1 parent cbb6d03 commit 14ced7c

File tree

6 files changed

+924
-2
lines changed

6 files changed

+924
-2
lines changed

build-scripts/config_common.cmake

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -546,7 +546,8 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
546546
# Variant backends
547547
if (NOT WAMR_BUILD_WASI_NN_TFLITE EQUAL 1 AND
548548
NOT WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1 AND
549-
NOT WAMR_BUILD_WASI_NN_LLAMACPP EQUAL 1)
549+
NOT WAMR_BUILD_WASI_NN_LLAMACPP EQUAL 1 AND
550+
NOT WAMR_BUILD_WASI_NN_ONNX EQUAL 1)
550551
message (FATAL_ERROR " Need to select a backend for WASI-NN")
551552
endif ()
552553

@@ -562,6 +563,10 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
562563
message (" WASI-NN: backend llamacpp enabled")
563564
add_definitions (-DWASM_ENABLE_WASI_NN_LLAMACPP)
564565
endif ()
566+
if (WAMR_BUILD_WASI_NN_ONNX EQUAL 1)
567+
message (" WASI-NN: backend onnx enabled")
568+
add_definitions (-DWASM_ENABLE_WASI_NN_ONNX)
569+
endif ()
565570
# Variant devices
566571
if (WAMR_BUILD_WASI_NN_ENABLE_GPU EQUAL 1)
567572
message (" WASI-NN: GPU enabled")

core/iwasm/libraries/wasi-nn/README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ $ cmake -DWAMR_BUILD_WASI_NN=1 <other options> ...
2626
- `WAMR_BUILD_WASI_NN_TFLITE`. This option designates TensorFlow Lite as the backend.
2727
- `WAMR_BUILD_WASI_NN_OPENVINO`. This option designates OpenVINO as the backend.
2828
- `WAMR_BUILD_WASI_NN_LLAMACPP`. This option designates Llama.cpp as the backend.
29+
- `WAMR_BUILD_WASI_NN_ONNX`. This option designates ONNX Runtime as the backend.
2930

3031
### Wasm
3132

@@ -151,7 +152,7 @@ docker run \
151152

152153
Supported:
153154

154-
- Graph encoding: `tensorflowlite`, `openvino` and `ggml`
155+
- Graph encoding: `tensorflowlite`, `openvino`, `ggml` and `onnx`
155156
- Execution target: `cpu` for all. `gpu` and `tpu` for `tensorflowlite`.
156157
- Tensor type: `fp32`.
157158

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
# Copyright 2025 Sony Semiconductor Solutions Corporation.
2+
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
3+
4+
# Find ONNX Runtime library
5+
#
6+
# This module defines the following variables:
7+
#
8+
# ::
9+
#
10+
# onnxruntime_FOUND - True if onnxruntime is found
11+
# onnxruntime_INCLUDE_DIRS - Include directories for onnxruntime
12+
# onnxruntime_LIBRARIES - List of libraries for onnxruntime
13+
# onnxruntime_VERSION - Version of onnxruntime
14+
#
15+
# ::
16+
#
17+
# Example usage:
18+
#
19+
# find_package(onnxruntime)
20+
# if(onnxruntime_FOUND)
21+
# target_link_libraries(app onnxruntime)
22+
# endif()
23+
24+
# First try to find ONNX Runtime using the CMake config file
25+
# FIXME: This is a temporary workaround for ONNX Runtime's broken CMake config on Linux.
26+
# See https://github.com/microsoft/onnxruntime/issues/25279
27+
# Once the upstream issue is fixed, this conditional can be safely removed.
28+
if(NOT CMAKE_SYSTEM_NAME STREQUAL "Linux")
29+
find_package(onnxruntime CONFIG QUIET)
30+
if(onnxruntime_FOUND)
31+
return()
32+
endif()
33+
endif()
34+
35+
# If not found via CMake config, try to find manually
36+
find_path(onnxruntime_INCLUDE_DIR
37+
NAMES onnxruntime_c_api.h
38+
PATHS
39+
/usr/include
40+
/usr/local/include
41+
/opt/onnxruntime/include
42+
$ENV{ONNXRUNTIME_ROOT}/include
43+
${CMAKE_CURRENT_LIST_DIR}/../../../../..
44+
)
45+
46+
find_library(onnxruntime_LIBRARY
47+
NAMES onnxruntime
48+
PATHS
49+
/usr/lib
50+
/usr/local/lib
51+
/opt/onnxruntime/lib
52+
$ENV{ONNXRUNTIME_ROOT}/lib
53+
${CMAKE_CURRENT_LIST_DIR}/../../../../..
54+
)
55+
56+
# Try to determine version from header file
57+
if(onnxruntime_INCLUDE_DIR)
58+
file(STRINGS "${onnxruntime_INCLUDE_DIR}/onnxruntime_c_api.h" onnxruntime_version_str
59+
REGEX "^#define[\t ]+ORT_API_VERSION[\t ]+[0-9]+")
60+
61+
if(onnxruntime_version_str)
62+
string(REGEX REPLACE "^#define[\t ]+ORT_API_VERSION[\t ]+([0-9]+)" "\\1"
63+
onnxruntime_VERSION "${onnxruntime_version_str}")
64+
endif()
65+
endif()
66+
67+
include(FindPackageHandleStandardArgs)
68+
find_package_handle_standard_args(onnxruntime
69+
REQUIRED_VARS onnxruntime_LIBRARY onnxruntime_INCLUDE_DIR
70+
VERSION_VAR onnxruntime_VERSION
71+
)
72+
73+
if(onnxruntime_FOUND)
74+
set(onnxruntime_LIBRARIES ${onnxruntime_LIBRARY})
75+
set(onnxruntime_INCLUDE_DIRS ${onnxruntime_INCLUDE_DIR})
76+
77+
if(NOT TARGET onnxruntime::onnxruntime)
78+
add_library(onnxruntime::onnxruntime UNKNOWN IMPORTED)
79+
set_target_properties(onnxruntime::onnxruntime PROPERTIES
80+
IMPORTED_LOCATION "${onnxruntime_LIBRARY}"
81+
INTERFACE_INCLUDE_DIRECTORIES "${onnxruntime_INCLUDE_DIRS}"
82+
)
83+
endif()
84+
endif()
85+
86+
mark_as_advanced(onnxruntime_INCLUDE_DIR onnxruntime_LIBRARY)

core/iwasm/libraries/wasi-nn/cmake/wasi_nn.cmake

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,3 +109,24 @@ if(WAMR_BUILD_WASI_NN_LLAMACPP EQUAL 1)
109109

110110
install(TARGETS wasi_nn_llamacpp DESTINATION lib)
111111
endif()
112+
113+
# - onnx
114+
if(WAMR_BUILD_WASI_NN_ONNX EQUAL 1)
115+
find_package(onnxruntime REQUIRED)
116+
enable_language(CXX)
117+
118+
add_library(
119+
wasi_nn_onnx
120+
SHARED
121+
${WASI_NN_ROOT}/src/wasi_nn_onnx.cpp
122+
)
123+
124+
target_link_libraries(
125+
wasi_nn_onnx
126+
PUBLIC
127+
vmlib
128+
onnxruntime::onnxruntime
129+
)
130+
131+
install(TARGETS wasi_nn_onnx DESTINATION lib)
132+
endif()

core/iwasm/libraries/wasi-nn/src/wasi_nn.c

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
#define TFLITE_BACKEND_LIB "libwasi_nn_tflite" LIB_EXTENTION
3434
#define OPENVINO_BACKEND_LIB "libwasi_nn_openvino" LIB_EXTENTION
3535
#define LLAMACPP_BACKEND_LIB "libwasi_nn_llamacpp" LIB_EXTENTION
36+
#define ONNX_BACKEND_LIB "libwasi_nn_onnx" LIB_EXTENTION
3637

3738
/* Global variables */
3839
static korp_mutex wasi_nn_lock;
@@ -240,6 +241,17 @@ choose_a_backend()
240241
return openvino;
241242
}
242243

244+
#ifndef NDEBUG
245+
NN_WARN_PRINTF("%s", dlerror());
246+
#endif
247+
248+
handle = dlopen(ONNX_BACKEND_LIB, RTLD_LAZY);
249+
if (handle) {
250+
NN_INFO_PRINTF("Using onnx backend");
251+
dlclose(handle);
252+
return onnx;
253+
}
254+
243255
#ifndef NDEBUG
244256
NN_WARN_PRINTF("%s", dlerror());
245257
#endif
@@ -363,6 +375,8 @@ graph_encoding_to_backend_lib_name(graph_encoding encoding)
363375
return TFLITE_BACKEND_LIB;
364376
case ggml:
365377
return LLAMACPP_BACKEND_LIB;
378+
case onnx:
379+
return ONNX_BACKEND_LIB;
366380
default:
367381
return NULL;
368382
}

0 commit comments

Comments
 (0)