Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion c_cxx/README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
This directory contains a few C/C++ sample applications for demonstrating onnxruntime usage:
# ORT Tutorials - C/C++ Samples

There is a suite of C/C++ samples in the [ort_tutorial directory](./ort_tutorial).
All these samples are aimed to rely fully on cross vendor ONNX Runtime APIs, and should be able to run on any platform.
Each sample show cases a specific feature of the ONNX Runtime API and illustrates how and why you should use it. The minimal ONNX Runtime version for these samples is 1.23.0.

## Other samples

This directory contains a few other C/C++ sample applications to demonstrate more specific onnxruntime usage:

1. (Windows and Linux) fns_candy_style_transfer: A C application that uses the FNS-Candy style transfer model to re-style images. It is written purely in C, no C++.
2. (Windows only) MNIST: A windows GUI application for doing handwriting recognition
Expand Down
36 changes: 36 additions & 0 deletions c_cxx/ort_tutorial/10_ep-device-selection/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
cmake_minimum_required(VERSION 3.20)
project(winai-samples)

list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../cmake")
include(onnxruntimesetup)

add_executable(ep-device-selection
main.cpp
utils.cpp
lodepng/lodepng.cpp
)

set_target_properties(ep-device-selection PROPERTIES
CXX_STANDARD 20
CXX_EXTENSIONS OFF
)
target_link_libraries(ep-device-selection PRIVATE
onnxruntime_interface
)
target_include_directories(ep-device-selection PRIVATE
lode_png
)

set(DEPTH_ANYTHING_FP16_ONNX "depth_anything_v2_torch.float16_240.onnx")

copy_file_to_bin_dir(${DEPTH_ANYTHING_FP16_ONNX})
copy_file_to_bin_dir(Input.png)

target_compile_definitions(ep-device-selection
PRIVATE -DMODEL_FILE="${DEPTH_ANYTHING_FP16_ONNX}")
set_target_properties(ep-device-selection
PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${RUNTIME_DIRECTORY}"
LIBRARY_OUTPUT_DIRECTORY "${RUNTIME_DIRECTORY}"
RUNTIME_OUTPUT_DIRECTORY "${RUNTIME_DIRECTORY}"
)
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
55 changes: 55 additions & 0 deletions c_cxx/ort_tutorial/10_ep-device-selection/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# EP Device Selection

ONNX Runtime provides since version 1.23.0 an execution provider independent way of querying and selecting
inference devices. This involves typically 3 steps.

- 1. Registration of execution provider libraries
```cpp
auto env = Ort::Env(ORT_LOGGING_LEVEL_WARNING);
env.RegisterExecutionProviderLibrary("openvino", ORT_TSTR("onnxruntime_providers_openvino.dll"));
env.RegisterExecutionProviderLibrary("qnn", ORT_TSTR("onnxruntime_providers_qnn.dll"));
env.RegisterExecutionProviderLibrary("nv_tensorrt_rtx", ORT_TSTR("onnxruntime_providers_nv_tensorrt_rtx.dll"));
```

- 2. Querying and selecting Execution Provider (EP) Devices

```cpp
auto ep_devices = env.GetEpDevices();
auto selected_devices = my_ep_selection_function(ep_devices);

Ort::SessionOptions session_options;
session_options.AppendExecutionProvider_V2(env, selected_devices, ep_options);
// Optionally, set device policy. E.g. OrtExecutionProviderDevicePolicy_PREFER_GPU, OrtExecutionProviderDevicePolicy_PREFER_NPU, OrtExecutionProviderDevicePolicy_MAX_PERFORMANCE
session_options.SetEpSelectionPolicy(OrtExecutionProviderDevicePolicy_PREFER_GPU);
```
- 3. Use the session options to create a inference session

```cpp
Ort::Session session(env, ORT_TSTR("path/to/model.onnx"), session_options);
```


## Building the sample

`cmake -B build -S . -DONNX_RUNTIME_PATH=path/to/onnxruntime> -DTRTRTX_RUNTIME_PATH=<path/to/TRTRTX/libs> && cmake --build build --config Release`

Then run
```
./build/Release/ep-device-selection -i ./Input.png -o ./output.png
```

Run

```
./build/Release/ep-device-selection -h
```
to know about more available command line options that influence device selection.

## Model

The ONNX file in this folder was generated using code from https://github.com/DepthAnything/Depth-Anything-V2 (Apache 2.0)
with weights from https://huggingface.co/depth-anything/Depth-Anything-V2-Small/ (Apache 2.0).

## Dependencies

This sample vendors a copy of https://github.com/lvandeve/lodepng (Zlib license)
160 changes: 160 additions & 0 deletions c_cxx/ort_tutorial/10_ep-device-selection/argparsing.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
#pragma once

#include "utils.h"

#include <onnxruntime/core/graph/constants.h>
#include <onnxruntime/core/session/onnxruntime_cxx_api.h>

#include <filesystem>
#include <functional>
#include <string>
#include <string_view>

struct Opts {
std::string input_image;
std::string output_image;
std::string select_vendor;
std::string select_ep;
bool enableEpContext{true};
OrtExecutionProviderDevicePolicy ep_device_policy =
OrtExecutionProviderDevicePolicy_PREFER_GPU;
};

struct ArgumentSpec {
const char *name;
const char *short_name;
const char *help;
int num_args;
std::function<bool(int)> lambda;
};

static Opts parse_args(int argc, char **argv) {
using namespace std::string_view_literals;
Opts opts;
auto arg_specs = std::array{
// clang-format off
ArgumentSpec{
"--input", "-i", "Path to input image (*.png)", 1, [&](int i) {
opts.input_image = argv[i + 1];
if (opts.input_image.starts_with("-")) {
LOG("Path to input image can't start with -: \"{}\"",
opts.input_image.c_str());
return false;
}
return true;
}},
ArgumentSpec{
"--output", "-o", "Path where to save output image (*.png)", 1, [&](int i) {
opts.output_image = argv[i + 1];
if (opts.output_image.starts_with("-")) {
LOG("Path to output image can't start with -: \"{}\"",
opts.output_image.c_str());
return false;
}
return true;
}},
ArgumentSpec{
"--select-vendor", "-f", "Select device of provided vendor.", 1, [&](int i) {
opts.select_vendor = argv[i + 1];
if (opts.select_vendor.starts_with("-")) {
LOG("Vendor can't start with -: \"{}\"",
opts.select_vendor.c_str());
return false;
}
return true;
}},
ArgumentSpec{
"--select-ep", "-f", "Select devices that support a specific execution provider. "
"See https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/graph/constants.h for EP names."
, 1, [&](int i) {
opts.select_vendor = argv[i + 1];
if (opts.select_vendor.starts_with("-")) {
LOG("Execution provider can't start with -: \"{}\"",
opts.select_vendor.c_str());
return false;
}
return true;
}},
ArgumentSpec{
"--ep-device-policy", "-p", "Set a EP device policy: e.g. prefer-cpu, prefer-gpu, prefer-npu, max-performance, max-efficiency, min-overall-power", 1, [&](int i) {
if(argv[i+1] == "prefer-cpu"sv) {
opts.ep_device_policy = OrtExecutionProviderDevicePolicy_PREFER_CPU;
} else if(argv[i+1] == "prefer-gpu"sv) {
opts.ep_device_policy = OrtExecutionProviderDevicePolicy_PREFER_GPU;
} else if(argv[i+1] == "prefer-npu"sv) {
opts.ep_device_policy = OrtExecutionProviderDevicePolicy_PREFER_NPU;
} else if(argv[i+1] == "max-performance"sv) {
opts.ep_device_policy = OrtExecutionProviderDevicePolicy_MAX_PERFORMANCE;
} else if(argv[i+1] == "max-efficiency"sv) {
opts.ep_device_policy = OrtExecutionProviderDevicePolicy_MAX_EFFICIENCY;
} else if(argv[i+1] == "min-overall-power"sv) {
opts.ep_device_policy = OrtExecutionProviderDevicePolicy_MIN_OVERALL_POWER;
} else {
LOG("Invalid execution provider policy: \"{}\"! Choose among prefer-cpu, prefer-gpu, prefer-npu, max-performance, max-efficiency, min-overall-power", argv[i+1]);
return false;
}
return true;
}}
// clang-format on
};
auto print_usage = [&] {
LOG("");
LOG("Usage:");
LOG("{} <options>", argv[0]);
for (auto &spec : arg_specs) {
if (spec.short_name) {
LOG("\t{} {} {}", spec.name, spec.short_name, spec.help);
} else {
LOG("\t{} {}", spec.name, spec.help);
}
}
};
for (int i = 1; i < argc; i++) {
bool arg_found = false;
for (auto &spec : arg_specs) {
if (std::strcmp(spec.name, argv[i]) == 0 ||
(spec.short_name && std::strcmp(spec.short_name, argv[i]) == 0)) {
if (i + spec.num_args < argc) {
bool ok = spec.lambda(i);
if (!ok) {
LOG("Failed to parse arguments for {}!", spec.name);
exit(EXIT_FAILURE);
}
arg_found = true;
i += spec.num_args;
break;
} else {
LOG("Not enough arguments for {} specified!", spec.name);
exit(EXIT_FAILURE);
}
}
}
if (!arg_found) {
auto arg = argv[i];
LOG("Unknown argument: {}", arg);
print_usage();
exit(EXIT_FAILURE);
}
}
if (opts.input_image.empty()) {
opts.input_image = (get_executable_path().parent_path() / "Input.png").string();
}
if (opts.output_image.empty()) {
opts.output_image = (get_executable_path().parent_path() / "output.png").string();
}
if (!std::filesystem::is_regular_file(opts.input_image)) {
LOG("Please make sure that provided input image path exists: \"{}\"!",
opts.input_image.c_str());
print_usage();
exit(EXIT_FAILURE);
}
if (!std::filesystem::is_directory(
std::filesystem::path(opts.output_image).parent_path())) {
LOG("Please make sure that the parent directory of the provided output "
"path exists: \"{}\"!",
opts.output_image.c_str());
print_usage();
exit(EXIT_FAILURE);
}
return opts;
}
Loading
Loading