@@ -29,6 +29,8 @@ struct ONNXFunctor {
2929
3030 std::vector<const char *> input_node_names;
3131 std::vector<const char *> output_node_names;
32+ std::vector<std::string> input_node_str;
33+ std::vector<std::string> output_node_str;
3234
3335 std::vector<float > input_tensor_values;
3436
@@ -53,19 +55,25 @@ struct ONNXFunctor {
5355 // std::cout << "benchmarking model " << model_path << std::endl;
5456 session = std::make_shared<Ort::Session>(env, model_path.c_str (), session_options);
5557
56-
5758
58- Ort::AllocatorWithDefaultOptions allocator;
59- input_node_names.push_back (session->GetInputName (0 , allocator));
60- output_node_names.push_back ( session->GetOutputName (0 , allocator));
6159
60+ Ort::AllocatorWithDefaultOptions allocator;
61+ #if ORT_API_VERSION > 12
62+ input_node_str.push_back (session->GetInputNameAllocated (0 , allocator).get ());
63+ output_node_str.push_back (session->GetOutputNameAllocated (0 , allocator).get ());
64+ #else
65+ input_node_str.push_back (session->GetInputName (0 , allocator));
66+ output_node_str.push_back ( session->GetOutputName (0 , allocator));
67+ #endif
68+ input_node_names.push_back (input_node_str.back ().c_str ());
69+ output_node_names.push_back (output_node_str.back ().c_str ());
6270 // Getting the shapes
6371
6472 input_node_dims = session->GetInputTypeInfo (0 ).GetTensorTypeAndShapeInfo ().GetShape ();
6573 output_node_dims = session->GetOutputTypeInfo (0 ).GetTensorTypeAndShapeInfo ().GetShape ();
6674
6775 // Calculating the dimension of the input tensor
68-
76+
6977
7078 size_t input_tensor_size = std::accumulate (input_node_dims.begin (), input_node_dims.end (), 1 , std::multiplies<int >());
7179 // std::vector<float> input_tensor_values(input_tensor_size );
@@ -94,7 +102,7 @@ struct ONNXFunctor {
94102 inputArray[off + 5 ] = x5;
95103 inputArray[off + 6 ] = x6;
96104
97-
105+
98106
99107 auto output_tensors = session->Run (Ort::RunOptions{nullptr }, input_node_names.data (), &inputTensor, 1 , output_node_names.data (), 1 );
100108 float * floatarr = output_tensors.front ().GetTensorMutableData <float >();
@@ -130,8 +138,10 @@ void BM_RDF_ONNX_Inference(benchmark::State &state)
130138 auto fileName = " Higgs_data_full.root" ;
131139 // file is available at "https://cernbox.cern.ch/index.php/s/YuSHwTXBa0UBEhD/download";
132140 // do curl https://cernbox.cern.ch/index.php/s/XaPBtaGrnN38wU0 -o Higgs_data_full.root
141+ // https://cernbox.cern.ch/s/vLOqclhWirZEWpj
142+ std::string directLink = " https://cernbox.cern.ch/remote.php/dav/public-files/vLOqclhWirZEWpj/Higgs_data_full.root" ;
133143 if (gSystem ->AccessPathName (fileName)) {
134- std::string cmd = " curl https://cernbox.cern.ch/index.php/s/YuSHwTXBa0UBEhD/download -o " ;
144+ std::string cmd = " curl " + directLink + " -o " ;
135145 cmd += fileName;
136146 gSystem ->Exec (cmd.c_str ());
137147 }
0 commit comments