Skip to content

Commit 5cf6f97

Browse files
authored
Support Models with Image Denotation Input (#209)
* Added support for models with image denotation * Added bind image support for models with image denotation * Added mnist and mnist input files * Model tensor inputs should have channel 3 or 1 if binding image * Add mnist tests * Removed modelbinding.h * PRcomments * Fixed is image, is csv input check
1 parent 4554ed6 commit 5cf6f97

File tree

10 files changed

+351
-378
lines changed

10 files changed

+351
-378
lines changed

Testing/WinMLRunnerTest/WinMLRunnerTest.cpp

Lines changed: 47 additions & 127 deletions
Original file line numberDiff line numberDiff line change
@@ -212,9 +212,10 @@ namespace WinMLRunnerTest
212212
return true;
213213
}
214214

215-
TEST_CLASS(GarbageInputTest){ public : TEST_CLASS_INITIALIZE(SetupClass){
216-
// Make test_folder_input folder before starting the tests
217-
std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
215+
TEST_CLASS(GarbageInputTest) {
216+
public: TEST_CLASS_INITIALIZE(SetupClass) {
217+
// Make test_folder_input folder before starting the tests
218+
std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
218219
system(mkFolderCommand.c_str());
219220

220221
std::vector<std::string> models = { "SqueezeNet.onnx", "keras_Add_ImageNet_small.onnx" };
@@ -226,7 +227,7 @@ namespace WinMLRunnerTest
226227
copyCommand += model;
227228
copyCommand += ' ' + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
228229
system(copyCommand.c_str());
229-
}
230+
}
230231
} // namespace WinMLRunnerTest
231232

232233
TEST_CLASS_CLEANUP(CleanupClass)
@@ -284,18 +285,6 @@ namespace WinMLRunnerTest
284285
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
285286
}
286287

287-
TEST_METHOD(GarbageInputCpuClientDeviceCpuBoundRGBImage)
288-
{
289-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
290-
const std::wstring command =
291-
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", L"-CPU",
292-
L"-CPUBoundInput", L"-RGB", L"-CreateDeviceOnClient" });
293-
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
294-
295-
// We need to expect one more line because of the header
296-
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
297-
}
298-
299288
TEST_METHOD(GarbageInputCpuWinMLDeviceCpuBoundRGBImage)
300289
{
301290
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
@@ -308,18 +297,6 @@ namespace WinMLRunnerTest
308297
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
309298
}
310299

311-
TEST_METHOD(GarbageInputCpuClientDeviceCpuBoundBGRImage)
312-
{
313-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
314-
const std::wstring command =
315-
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", L"-CPU",
316-
L"-CPUBoundInput", L"-BGR", L"-CreateDeviceOnClient" });
317-
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
318-
319-
// We need to expect one more line because of the header
320-
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
321-
}
322-
323300
TEST_METHOD(GarbageInputCpuWinMLDeviceCpuBoundBGRImage)
324301
{
325302
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
@@ -332,18 +309,6 @@ namespace WinMLRunnerTest
332309
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
333310
}
334311

335-
TEST_METHOD(GarbageInputCpuClientDeviceCpuBoundTensor)
336-
{
337-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
338-
const std::wstring command =
339-
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", L"-CPU",
340-
L"-CPUBoundInput", L"-tensor", L"-CreateDeviceOnClient" });
341-
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
342-
343-
// We need to expect one more line because of the header
344-
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
345-
}
346-
347312
TEST_METHOD(GarbageInputCpuWinMLDeviceCpuBoundTensor)
348313
{
349314
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
@@ -356,18 +321,6 @@ namespace WinMLRunnerTest
356321
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
357322
}
358323

359-
TEST_METHOD(GarbageInputCpuClientDeviceGpuBoundRGBImage)
360-
{
361-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
362-
const std::wstring command =
363-
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", L"-CPU",
364-
L"-GPUBoundInput", L"-RGB", L"-CreateDeviceOnClient" });
365-
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
366-
367-
// We need to expect one more line because of the header
368-
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
369-
}
370-
371324
TEST_METHOD(GarbageInputCpuWinMLDeviceGpuBoundRGBImage)
372325
{
373326
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
@@ -380,18 +333,6 @@ namespace WinMLRunnerTest
380333
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
381334
}
382335

383-
TEST_METHOD(GarbageInputCpuClientDeviceGpuBoundBGRImage)
384-
{
385-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
386-
const std::wstring command =
387-
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", L"-CPU",
388-
L"-GPUBoundInput", L"-BGR", L"-CreateDeviceOnClient" });
389-
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
390-
391-
// We need to expect one more line because of the header
392-
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
393-
}
394-
395336
TEST_METHOD(GarbageInputCpuWinMLDeviceGpuBoundBGRImage)
396337
{
397338
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
@@ -404,18 +345,6 @@ namespace WinMLRunnerTest
404345
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
405346
}
406347

407-
TEST_METHOD(GarbageInputCpuClientDeviceGpuBoundTensor)
408-
{
409-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
410-
const std::wstring command =
411-
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", L"-CPU",
412-
L"-GPUBoundInput", L"-tensor", L"-CreateDeviceOnClient" });
413-
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
414-
415-
// We need to expect one more line because of the header
416-
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
417-
}
418-
419348
TEST_METHOD(GarbageInputCpuWinMLDeviceGpuBoundTensor)
420349
{
421350
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
@@ -572,32 +501,6 @@ namespace WinMLRunnerTest
572501
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount());
573502
}
574503

575-
TEST_METHOD(GarbageInputAllPermutations)
576-
{
577-
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
578-
const std::wstring command = BuildCommand({
579-
EXE_PATH,
580-
L"-model",
581-
modelPath,
582-
L"-PerfOutput",
583-
OUTPUT_PATH,
584-
L"-perf",
585-
L"-CPU",
586-
L"-GPU",
587-
L"-CreateDeviceOnClient",
588-
L"-CreateDeviceInWinML",
589-
L"-CPUBoundInput",
590-
L"-GPUBoundInput",
591-
L"-RGB",
592-
L"-BGR",
593-
L"-tensor"
594-
});
595-
Assert::AreEqual(S_OK, RunProc((wchar_t *)command.c_str()));
596-
597-
// We need to expect one more line because of the header
598-
Assert::AreEqual(static_cast<size_t>(25), GetOutputCSVLineCount());
599-
}
600-
601504
TEST_METHOD(RunAllModelsInFolderGarbageInput)
602505
{
603506
const std::wstring command = BuildCommand({ EXE_PATH, L"-folder", INPUT_FOLDER_PATH, L"-PerfOutput", OUTPUT_PATH, L"-perf" });
@@ -606,31 +509,6 @@ namespace WinMLRunnerTest
606509
// We need to expect one more line because of the header
607510
Assert::AreEqual(static_cast<size_t>(5), GetOutputCSVLineCount());
608511
}
609-
610-
TEST_METHOD(RunAllModelsInFolderGarbageInputWithAllPermutations)
611-
{
612-
const std::wstring command = BuildCommand({
613-
EXE_PATH,
614-
L"-folder",
615-
INPUT_FOLDER_PATH,
616-
L"-PerfOutput",
617-
OUTPUT_PATH,
618-
L"-perf",
619-
L"-CPU",
620-
L"-GPU",
621-
L"-CreateDeviceOnClient",
622-
L"-CreateDeviceInWinML",
623-
L"-CPUBoundInput",
624-
L"-GPUBoundInput",
625-
L"-RGB",
626-
L"-BGR",
627-
L"-tensor"
628-
});
629-
Assert::AreEqual(S_OK, RunProc((wchar_t *)command.c_str()));
630-
631-
// We need to expect one more line because of the header
632-
Assert::AreEqual(static_cast<size_t>(49), GetOutputCSVLineCount());
633-
}
634512
};
635513

636514
TEST_CLASS(ImageInputTest)
@@ -697,6 +575,27 @@ namespace WinMLRunnerTest
697575
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_CPU.csv",
698576
TENSOR_DATA_PATH + L"\\softmaxout_1CpuIteration1.csv"));
699577
}
578+
579+
TEST_METHOD(ProvidedImageInputOnlyGpuSaveTensorImageDenotation)
580+
{
581+
const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
582+
const std::wstring inputPath = CURRENT_PATH + L"mnist_28.png";
583+
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
584+
L"-SaveTensorData", L"First", TENSOR_DATA_PATH, L"-GPU" });
585+
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
586+
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_GPU.csv",
587+
TENSOR_DATA_PATH + L"\\Plus214_Output_0GpuIteration1.csv"));
588+
}
589+
TEST_METHOD(ProvidedImageInputOnlyCpuSaveTensorImageDenotation)
590+
{
591+
const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
592+
const std::wstring inputPath = CURRENT_PATH + L"mnist_28.png";
593+
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
594+
L"-SaveTensorData", L"First", TENSOR_DATA_PATH, L"-CPU" });
595+
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
596+
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_CPU.csv",
597+
TENSOR_DATA_PATH + L"\\Plus214_Output_0CpuIteration1.csv"));
598+
}
700599
TEST_METHOD(ProvidedImageInputOnlyGpuSaveTensorFp16)
701600
{
702601
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
@@ -787,6 +686,27 @@ namespace WinMLRunnerTest
787686
Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\Squeezenet_fp16_fish_input_CPU.csv",
788687
TENSOR_DATA_PATH + L"\\softmaxout_1CpuIteration1.csv"));
789688
}
689+
690+
TEST_METHOD(ProvidedCSVInputOnlyGpuSaveTensorImageDenotation)
691+
{
692+
const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
693+
const std::wstring inputPath = CURRENT_PATH + L"mnist_28.csv";
694+
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
695+
L"-SaveTensorData", L"First", TENSOR_DATA_PATH, L"-GPU" });
696+
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
697+
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_GPU.csv",
698+
TENSOR_DATA_PATH + L"\\Plus214_Output_0GpuIteration1.csv"));
699+
}
700+
TEST_METHOD(ProvidedCSVInputOnlyCpuSaveTensorImageDenotation)
701+
{
702+
const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
703+
const std::wstring inputPath = CURRENT_PATH + L"mnist_28.csv";
704+
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
705+
L"-SaveTensorData", L"First", TENSOR_DATA_PATH, L"-CPU" });
706+
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
707+
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_CPU.csv",
708+
TENSOR_DATA_PATH + L"\\Plus214_Output_0CpuIteration1.csv"));
709+
}
790710
};
791711

792712
TEST_CLASS(ConcurrencyTest)

Testing/WinMLRunnerTest/WinMLRunnerTest.vcxproj

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -368,6 +368,39 @@
368368
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</DeploymentContent>
369369
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
370370
</Content>
371+
<Content Include="OutputTensorData\Mnist_8_input_CPU.csv">
372+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</ExcludedFromBuild>
373+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</DeploymentContent>
374+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</ExcludedFromBuild>
375+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</DeploymentContent>
376+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
377+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</DeploymentContent>
378+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
379+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</DeploymentContent>
380+
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
381+
</Content>
382+
<Content Include="OutputTensorData\Mnist_8_input_GPU.csv">
383+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</ExcludedFromBuild>
384+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</DeploymentContent>
385+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</ExcludedFromBuild>
386+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</DeploymentContent>
387+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
388+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</DeploymentContent>
389+
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
390+
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</DeploymentContent>
391+
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
392+
</Content>
393+
</ItemGroup>
394+
<ItemGroup>
395+
<CopyFileToFolders Include="..\..\SharedContent\media\mnist_28.csv">
396+
<FileType>Document</FileType>
397+
</CopyFileToFolders>
398+
<CopyFileToFolders Include="..\..\SharedContent\models\mnist.onnx">
399+
<FileType>Document</FileType>
400+
</CopyFileToFolders>
401+
</ItemGroup>
402+
<ItemGroup>
403+
<CopyFileToFolders Include="..\..\SharedContent\media\mnist_28.png" />
371404
</ItemGroup>
372405
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
373406
<ImportGroup Label="ExtensionTargets">

Testing/WinMLRunnerTest/WinMLRunnerTest.vcxproj.filters

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,4 +33,15 @@
3333
<Filter>SharedContent</Filter>
3434
</Content>
3535
</ItemGroup>
36+
<ItemGroup>
37+
<CopyFileToFolders Include="..\..\SharedContent\media\mnist_28.png">
38+
<Filter>SharedContent</Filter>
39+
</CopyFileToFolders>
40+
<CopyFileToFolders Include="..\..\SharedContent\media\mnist_28.csv">
41+
<Filter>SharedContent</Filter>
42+
</CopyFileToFolders>
43+
<CopyFileToFolders Include="..\..\SharedContent\models\mnist.onnx">
44+
<Filter>SharedContent</Filter>
45+
</CopyFileToFolders>
46+
</ItemGroup>
3647
</Project>

Tools/WinMLRunner/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ Required command-Line arguments:
3535
-GPU : run model on default GPU
3636
-GPUHighPerformance : run model on GPU with highest performance
3737
-GPUMinPower : run model on GPU with the least power
38-
-CreateDeviceOnClient : create the device on the client and pass it to WinML
38+
-CreateDeviceOnClient : create the D3D device on the client and pass it to WinML to create session
3939
-CreateDeviceInWinML : create the device inside WinML
4040
-CPUBoundInput : bind the input to the CPU
4141
-GPUBoundInput : bind the input to the GPU

0 commit comments

Comments
 (0)