Skip to content

Commit 4932a99

Browse files
authored
Merge branch 'main' into shama/csharp_sample_update
2 parents ddd0e3e + 67598e4 commit 4932a99

File tree

75 files changed

+5508
-156
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

75 files changed

+5508
-156
lines changed

c_cxx/MNIST/MNIST.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,8 @@ struct MNIST {
4040
const char* input_names[] = {"Input3"};
4141
const char* output_names[] = {"Plus214_Output_0"};
4242

43-
session_.Run(Ort::RunOptions{nullptr}, input_names, &input_tensor_, 1, output_names, &output_tensor_, 1);
43+
Ort::RunOptions run_options;
44+
session_.Run(run_options, input_names, &input_tensor_, 1, output_names, &output_tensor_, 1);
4445
softmax(results_);
4546
result_ = std::distance(results_.begin(), std::max_element(results_.begin(), results_.end()));
4647
return result_;

c_cxx/README.md

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,7 @@ This directory contains a few C/C++ sample applications for demoing onnxruntime
1212
## Prerequisites
1313
1. Visual Studio 2015/2017/2019
1414
2. cmake(version >=3.13)
15-
3. (optional) [libpng 1.6](http://www.libpng.org/pub/png/libpng.html)
16-
17-
You may get a precompiled libpng library from [https://onnxruntimetestdata.blob.core.windows.net/models/libpng.zip](https://onnxruntimetestdata.blob.core.windows.net/models/libpng.zip)
15+
3. (optional) [libpng 1.6](https://libpng.sourceforge.io/)
1816

1917
## Install ONNX Runtime
2018
### Option 1: download a prebuilt package
@@ -31,16 +29,16 @@ build.bat --config RelWithDebInfo --build_shared_lib --parallel
3129
By default this will build a project with "C:\Program Files (x86)\onnxruntime" install destination. This is a protected folder on Windows. If you do not want to run installation with elevated priviliges you will need to override the default installation location by passing extra CMake arguments. For example:
3230

3331
```
34-
build.bat --config RelWithDebInfo --build_shared_lib --parallel --cmake_extra_defines CMAKE_INSTALL_PREFIX=c:\dev\ort_install
32+
build.bat --config RelWithDebInfo --build_dir .\build --build_shared_lib --parallel --cmake_extra_defines CMAKE_INSTALL_PREFIX=c:\dev\ort_install
3533
```
3634

37-
By default products of the build on Windows go to .\build\Windows\<config> folder. In the case above it would be .\build\Windows\RelWithDebInfo.
35+
By default products of the build on Windows go to .\build\Windows\<config> folder. In the case above it would be .\build\RelWithDebInfo since the build folder is mentioned explicitly.
3836
If you did not specify alternative installation location above you would need to open an elevated command prompt to install onnxruntime.
3937
Run the following commands.
4038

4139
```
42-
cd .\Windows\RelWithDebInfo
43-
msbuild INSTALL.vcxproj /p:Configuration=RelWithDebInfo
40+
cmake --install .\build\RelWithDebInfo --config RelWithDebInfo
41+
4442
```
4543

4644
## Build the samples
@@ -63,6 +61,7 @@ Or build it using msbuild
6361

6462
```bat
6563
msbuild onnxruntime_samples.sln /p:Configuration=Debug|Release
64+
cmake --install .\build\Debug|Release --config Debug
6665
```
6766

6867
To run the samples make sure that your Install Folder Bin is in the path so your sample executable can find onnxruntime dll and libpng if you used it.

c_cxx/Snpe_EP/CMakeLists.txt

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,13 @@ ADD_EXECUTABLE(snpe_ep_sample main.cpp)
1616
if(HAVE_TENSORRT_PROVIDER_FACTORY_H)
1717
target_compile_definitions(snpe_ep_sample PRIVATE -DHAVE_TENSORRT_PROVIDER_FACTORY_H)
1818
endif()
19-
target_link_libraries(snpe_ep_sample onnxruntime)
19+
20+
set(ORT_LIBS onnxruntime)
21+
if(ANDROID)
22+
list(APPEND ORT_LIBS libc++_shared.so)
23+
endif()
24+
target_link_libraries(snpe_ep_sample ${ORT_LIBS})
25+
2026
if(MSVC)
2127
target_link_directories(snpe_ep_sample PRIVATE ${ONNXRUNTIME_ROOTDIR}/build/Windows/Release/Release)
2228
else()

c_cxx/Snpe_EP/README.md

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@
4747
std::vector<const char*> options_keys = {"runtime", "buffer_type"};
4848
std::vector<const char*> options_values = {"CPU", "TF8"}; // set to TF8 if use quantized data
4949
50-
CheckStatus(g_ort, g_ort->SessionOptionsAppendExecutionProvider_SNPE(session_options, options_keys.data(),
51-
options_values.data(), options_keys.size()));
50+
g_ort->SessionOptionsAppendExecutionProvider(session_options, "SNPE", options_keys.data(),
51+
options_values.data(), options_keys.size());
5252
```
5353
Please refers to the unit test case [Snpe_ConvertFromAbs.QuantizedModelTf8Test](https://github.com/microsoft/onnxruntime/blob/5ecfaef042380995fb15587ccf6ff77f9d3a01d2/onnxruntime/test/contrib_ops/snpe_op_test.cc#L209-L251) for more details.
5454
@@ -74,7 +74,12 @@
7474
chairs.raw -- from $SNPE_ROOT/models/inception_v3/data/cropped
7575
imagenet_slim_labels.txt -- from $SNPE_ROOT/models/inception_v3/data
7676
77-
Run snpe_ep_sample.exe, it will output:
77+
Run
78+
```
79+
snpe_ep_sample.exe --cpu chairs.raw
80+
```
81+
82+
it will output:
7883
7984
```
8085
832, 0.299591, studio couch
@@ -103,6 +108,7 @@
103108
adb push [$SNPE_ROOT]/lib/dsp/*.so /data/local/tmp/snpeexample
104109
adb push [$Onnxruntime_ROOT]/build/Android/Release/libonnxruntime.so /data/local/tmp/snpeexample
105110
adb push [$SNPE_ROOT]/models/inception_v3/data/cropped/chairs.raw /data/local/tmp/snpeexample
111+
adb push [$SNPE_ROOT]/models/inception_v3/data/imagenet_slim_labels.txt /data/local/tmp/snpeexample
106112
adb push [$SNPE_ROOT]/models/inception_v3/snpe_inception_v3.onnx /data/local/tmp/snpeexample
107113
adb push ./onnxruntime-inference-examples/c_cxx/Snpe_EP/build_android/snpe_ep_sample /data/local/tmp/snpeexample
108114
```
@@ -115,7 +121,8 @@
115121
chmod +x *
116122
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/data/local/tmp/snpeexample
117123
export PATH=$PATH:/data/local/tmp/snpeexample
118-
snpe_ep_sample
124+
snpe_ep_sample --cpu chairs.raw
125+
snpe_ep_sample --dsp chairs.raw
119126
```
120127
121128
it will output:

c_cxx/Snpe_EP/main.cpp

Lines changed: 35 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,12 @@ bool CheckStatus(const OrtApi* g_ort, OrtStatus* status) {
1616
const char* msg = g_ort->GetErrorMessage(status);
1717
std::cerr << msg << std::endl;
1818
g_ort->ReleaseStatus(status);
19-
throw std::exception();
19+
throw Ort::Exception(msg, OrtErrorCode::ORT_EP_FAIL);
2020
}
2121
return true;
2222
}
2323

24-
void run_ort_snpe_ep() {
24+
void run_ort_snpe_ep(std::string backend, std::string input_path) {
2525
#ifdef _WIN32
2626
const wchar_t* model_path = L"snpe_inception_v3.onnx";
2727
#else
@@ -38,10 +38,10 @@ void run_ort_snpe_ep() {
3838
CheckStatus(g_ort, g_ort->SetSessionGraphOptimizationLevel(session_options, ORT_ENABLE_BASIC));
3939

4040
std::vector<const char*> options_keys = {"runtime", "buffer_type"};
41-
std::vector<const char*> options_values = {"CPU", "FLOAT"}; // set to TF8 if use quantized data
41+
std::vector<const char*> options_values = {backend.c_str(), "FLOAT"}; // set to TF8 if use quantized data
4242

43-
CheckStatus(g_ort, g_ort->SessionOptionsAppendExecutionProvider_SNPE(session_options, options_keys.data(),
44-
options_values.data(), options_keys.size()));
43+
CheckStatus(g_ort, g_ort->SessionOptionsAppendExecutionProvider(session_options, "SNPE", options_keys.data(),
44+
options_values.data(), options_keys.size()));
4545
OrtSession* session;
4646
CheckStatus(g_ort, g_ort->CreateSession(env, model_path, session_options, &session));
4747

@@ -124,7 +124,7 @@ void run_ort_snpe_ep() {
124124
size_t input_data_length = input_data_size * sizeof(float);
125125
std::vector<float> input_data(input_data_size, 1.0);
126126

127-
std::ifstream input_raw_file("chairs.raw", std::ios::binary);
127+
std::ifstream input_raw_file(input_path, std::ios::binary);
128128
input_raw_file.seekg(0, std::ios::end);
129129
const size_t num_elements = input_raw_file.tellg() / sizeof(float);
130130
input_raw_file.seekg(0, std::ios::beg);
@@ -162,7 +162,35 @@ void run_ort_snpe_ep() {
162162
printf("%d, %f, %s \n", max_index, *max, label_table[max_index].c_str());
163163
}
164164

165+
void PrintHelp() {
166+
std::cout << "To run the sample, use the following command:" << std::endl;
167+
std::cout << "Example: ./snpe_ep_sample --cpu <path_to_raw_input>" << std::endl;
168+
std::cout << "To Run with SNPE CPU backend. Example: ./snpe_ep_sample --cpu chairs.raw" << std::endl;
169+
std::cout << "To Run with SNPE DSP backend. Example: ./snpe_ep_sample --dsp chairs.raw" << std::endl;
170+
}
171+
172+
constexpr const char* CPUBACKEDN = "--cpu";
173+
constexpr const char* DSPBACKEDN = "--dsp";
174+
165175
int main(int argc, char* argv[]) {
166-
run_ort_snpe_ep();
176+
std::string backend = "CPU";
177+
178+
if (argc != 3) {
179+
PrintHelp();
180+
return 1;
181+
}
182+
183+
if (strcmp(argv[1], CPUBACKEDN) == 0) {
184+
backend = "CPU";
185+
} else if (strcmp(argv[1], DSPBACKEDN) == 0) {
186+
backend = "DSP";
187+
} else {
188+
std::cout << "This sample only support CPU, DSP." << std::endl;
189+
PrintHelp();
190+
return 1;
191+
}
192+
std::string input_path(argv[2]);
193+
194+
run_ort_snpe_ep(backend, input_path);
167195
return 0;
168196
}

c_cxx/imagenet/CMakeLists.txt

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
# Licensed under the MIT License.
33

44
set(FS_SOURCES local_filesystem.h sync_api.h controller.h controller.cc)
5+
56
if(WIN32)
67
LIST(APPEND FS_SOURCES local_filesystem_win.cc sync_api_win.cc)
78
else()
@@ -17,17 +18,22 @@ if(JPEG_FOUND)
1718
elseif(WIN32)
1819
SET(IMAGE_SRC image_loader_wic.cc)
1920
endif()
21+
2022
add_executable(image_classifier main.cc runnable_task.h data_processing.h ${IMAGE_SRC}
21-
async_ring_buffer.h image_loader.cc image_loader.h cached_interpolation.h single_consumer.h)
23+
async_ring_buffer.h image_loader.cc image_loader.h cached_interpolation.h single_consumer.h)
24+
2225
if(JPEG_FOUND)
2326
target_compile_definitions(image_classifier PRIVATE HAVE_JPEG)
2427
SET(IMAGE_HEADERS ${JPEG_INCLUDE_DIR})
2528
SET(IMAGE_LIBS ${JPEG_LIBRARIES})
2629
endif()
30+
2731
target_include_directories(image_classifier PRIVATE ${PROJECT_SOURCE_DIR}/include ${IMAGE_HEADERS})
32+
2833
if(WIN32)
2934
target_compile_definitions(image_classifier PRIVATE WIN32_LEAN_AND_MEAN NOMINMAX)
3035
endif()
36+
3137
target_link_libraries(image_classifier PRIVATE onnxruntime slim_fs_lib ${IMAGE_LIBS})
3238

3339

c_cxx/imagenet/image_loader.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
#include "cached_interpolation.h"
99
#include "sync_api.h"
1010
#include "data_processing.h"
11-
#include <onnxruntime_c_api.h>
11+
#include <onnxruntime_cxx_api.h>
1212

1313
template <typename T>
1414
void ResizeImageInMemory(const T* input_data, float* output_data, int in_height, int in_width, int out_height,

c_cxx/imagenet/local_filesystem.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#include <unistd.h>
2020
#include <sys/mman.h>
2121
#endif
22+
2223
#include <onnxruntime_c_api.h>
2324
void ReadFileAsString(const ORTCHAR_T* fname, void*& p, size_t& len);
2425

c_cxx/imagenet/main.cc

Lines changed: 14 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,11 @@
1212
#include <vector>
1313
#include <memory>
1414
#include <atomic>
15+
#include <optional>
1516

16-
#include "providers.h"
17-
#include "local_filesystem.h"
18-
#include "sync_api.h"
17+
#include "providers.h"
18+
#include "local_filesystem.h"
19+
#include "sync_api.h"
1920

2021
#include <onnxruntime_cxx_api.h>
2122

@@ -26,8 +27,10 @@
2627
#ifdef _WIN32
2728
#include <atlbase.h>
2829
#endif
30+
2931
using namespace std::chrono;
3032

33+
3134
class Validator : public OutputCollector<TCharString> {
3235
private:
3336
static std::vector<std::string> ReadFileToVec(const TCharString& file_path, size_t expected_line_count) {
@@ -81,20 +84,15 @@ class Validator : public OutputCollector<TCharString> {
8184
int image_size_;
8285

8386
std::mutex m_;
84-
char* input_name_ = nullptr;
85-
char* output_name_ = nullptr;
87+
std::optional<Ort::AllocatedStringPtr> input_name_;
88+
std::optional<Ort::AllocatedStringPtr> output_name_;
8689
Ort::Env& env_;
8790
const TCharString model_path_;
8891
system_clock::time_point start_time_;
8992

9093
public:
9194
int GetImageSize() const { return image_size_; }
9295

93-
~Validator() {
94-
free(input_name_);
95-
free(output_name_);
96-
}
97-
9896
void PrintResult() {
9997
if (finished_count_ == 0) return;
10098
printf("Top-1 Accuracy %f\n", ((float)top_1_correct_count_.load() / finished_count_));
@@ -124,20 +122,15 @@ class Validator : public OutputCollector<TCharString> {
124122
VerifyInputOutputCount(session_);
125123
Ort::AllocatorWithDefaultOptions ort_alloc;
126124
{
127-
char* t = session_.GetInputName(0, ort_alloc);
128-
input_name_ = my_strdup(t);
129-
ort_alloc.Free(t);
130-
t = session_.GetOutputName(0, ort_alloc);
131-
output_name_ = my_strdup(t);
132-
ort_alloc.Free(t);
125+
input_name_.emplace(session_.GetInputNameAllocated(0, ort_alloc));
126+
output_name_.emplace(session_.GetOutputNameAllocated(0, ort_alloc));
133127
}
134128

135129
Ort::TypeInfo info = session_.GetInputTypeInfo(0);
136130
auto tensor_info = info.GetTensorTypeAndShapeInfo();
137131
size_t dim_count = tensor_info.GetDimensionsCount();
138132
assert(dim_count == 4);
139-
std::vector<int64_t> dims(dim_count);
140-
tensor_info.GetDimensions(dims.data(), dims.size());
133+
std::vector<int64_t> dims = tensor_info.GetShape();
141134
if (dims[1] != dims[2] || dims[3] != 3) {
142135
throw std::runtime_error("This model is not supported by this program. input tensor need be in NHWC format");
143136
}
@@ -150,8 +143,10 @@ class Validator : public OutputCollector<TCharString> {
150143
{
151144
std::lock_guard<std::mutex> l(m_);
152145
const size_t remain = task_id_list.size();
146+
const char* input_names[] = {input_name_->get()};
147+
char* output_names[] = {output_name_->get()};
153148
Ort::Value output_tensor{nullptr};
154-
session_.Run(Ort::RunOptions{nullptr}, &input_name_, &input_tensor, 1, &output_name_, &output_tensor, 1);
149+
session_.Run(Ort::RunOptions{nullptr}, input_names, &input_tensor, 1, output_names, &output_tensor, 1);
155150
float* probs = output_tensor.GetTensorMutableData<float>();
156151
for (const auto& s : task_id_list) {
157152
float* end = probs + output_class_count_;

c_cxx/imagenet/sync_api.h

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@
88
#else
99
#include <vector>
1010
#endif
11-
#include <onnxruntime_c_api.h>
12-
#include <onnxruntime_cxx_api.h>
1311

1412
#ifdef _WIN32
1513
#define my_strtol wcstol

0 commit comments

Comments
 (0)