@@ -32,13 +32,16 @@ static HRESULT RunProc(LPWSTR commandLine)
3232 return HRESULT_FROM_WIN32 (exitCode);
3333}
3434
35+ // Use this test method definition, if the test needs access to the METHOD_NAME
36+ #define TEST_METHOD_WITH_NAME (methodName ) TEST_METHOD(methodName) { const std::wstring METHOD_NAME (L#methodName);
37+
3538namespace WinMLRunnerTest
3639{
3740 static const std::wstring CURRENT_PATH = FileHelper::GetModulePath();
3841 static const std::wstring EXE_PATH = CURRENT_PATH + L" WinMLRunner.exe" ;
3942 static const std::wstring INPUT_FOLDER_PATH = CURRENT_PATH + L" test_folder_input" ;
4043 static const std::wstring OUTPUT_PATH = CURRENT_PATH + L" test_output.csv" ;
41- static const std::wstring TENSOR_DATA_PATH = CURRENT_PATH + L" TestResult " ;
44+ static const std::wstring TENSOR_DATA_PATH = CURRENT_PATH + L" TestResults " ;
4245
4346 static std::wstring BuildCommand (std::initializer_list<std::wstring>&& arguments)
4447 {
@@ -217,8 +220,7 @@ namespace WinMLRunnerTest
217220 return true ;
218221 }
219222
220- TEST_CLASS (GarbageInputTest) {
221- public: TEST_CLASS_INITIALIZE (SetupClass) {
223+ TEST_CLASS (GarbageInputTest) { public: TEST_CLASS_INITIALIZE (SetupClass) {
222224 // Make test_folder_input folder before starting the tests
223225 std::string mkFolderCommand = " mkdir " + std::string (INPUT_FOLDER_PATH.begin (), INPUT_FOLDER_PATH.end ());
224226 system (mkFolderCommand.c_str ());
@@ -241,6 +243,13 @@ public: TEST_CLASS_INITIALIZE(SetupClass) {
241243 std::string copyCommand = " rd /s /q " ;
242244 copyCommand += std::string (INPUT_FOLDER_PATH.begin (), INPUT_FOLDER_PATH.end ());
243245 system (copyCommand.c_str ());
246+ try
247+ {
248+ std::filesystem::remove_all (std::string (TENSOR_DATA_PATH.begin (), TENSOR_DATA_PATH.end ()).c_str ());
249+ }
250+ catch (const std::filesystem::filesystem_error &)
251+ {
252+ }
244253 }
245254
246255 TEST_METHOD_CLEANUP (CleanupMethod)
@@ -521,6 +530,7 @@ public: TEST_CLASS_INITIALIZE(SetupClass) {
521530 }
522531 catch (const std::filesystem::filesystem_error &) {}
523532 }
533+
524534 TEST_METHOD (ProvidedImageInputCpuAndGpu)
525535 {
526536 const std::wstring modelPath = CURRENT_PATH + L" SqueezeNet.onnx" ;
@@ -552,78 +562,84 @@ public: TEST_CLASS_INITIALIZE(SetupClass) {
552562 BuildCommand ({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath, L" -autoScale" , L" Cubic" });
553563 Assert::AreEqual (S_OK, RunProc ((wchar_t *)command.c_str ()));
554564 }
555- TEST_METHOD (ProvidedImageInputOnlyGpuSaveTensor)
556- {
565+
566+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyGpuSaveTensor)
557567 const std::wstring modelPath = CURRENT_PATH + L" SqueezeNet.onnx" ;
558568 const std::wstring inputPath = CURRENT_PATH + L" fish.png" ;
569+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
559570 const std::wstring command = BuildCommand ({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
560- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" });
571+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" });
561572 Assert::AreEqual (S_OK, RunProc ((wchar_t *)command.c_str ()));
562573 Assert::AreEqual (true , CompareTensors (L" OutputTensorData\\ Squeezenet_fish_input_GPU.csv" ,
563- TENSOR_DATA_PATH + L" \\ softmaxout_1GpuIteration1.csv" ));
574+ tensorDataPath + L" \\ softmaxout_1GpuIteration1.csv" ));
564575 }
565- TEST_METHOD (ProvidedImageInputOnlyCpuSaveTensor)
566- {
576+
577+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyCpuSaveTensor)
567578 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
568579 const std::wstring inputPath = CURRENT_PATH + L" fish.png" ;
580+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
569581 const std::wstring command = BuildCommand({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
570- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -CPU" });
582+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -CPU" });
571583 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
572584 Assert::AreEqual (true , CompareTensors(L" OutputTensorData\\ Squeezenet_fish_input_CPU.csv" ,
573- TENSOR_DATA_PATH + L" \\ softmaxout_1CpuIteration1.csv" ));
585+ tensorDataPath + L" \\ softmaxout_1CpuIteration1.csv" ));
574586 }
575587
576- TEST_METHOD (ProvidedImageInputOnlyGpuSaveTensorImageDenotation)
577- {
588+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyGpuSaveTensorImageDenotation)
578589 const std::wstring modelPath = CURRENT_PATH + L" mnist.onnx" ;
579590 const std::wstring inputPath = CURRENT_PATH + L" mnist_28.png" ;
591+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
580592 const std::wstring command = BuildCommand ({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
581- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" });
593+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" });
582594 Assert::AreEqual (S_OK, RunProc ((wchar_t *)command.c_str ()));
583595 Assert::AreEqual (true , CompareTensors (L" OutputTensorData\\ Mnist_8_input_GPU.csv" ,
584- TENSOR_DATA_PATH + L" \\ Plus214_Output_0GpuIteration1.csv" ));
596+ tensorDataPath + L" \\ Plus214_Output_0GpuIteration1.csv" ));
585597 }
586- TEST_METHOD (ProvidedImageInputOnlyCpuSaveTensorImageDenotation)
587- {
598+
599+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyCpuSaveTensorImageDenotation)
588600 const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
589601 const std::wstring inputPath = CURRENT_PATH + L" mnist_28.png" ;
602+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
590603 const std::wstring command = BuildCommand({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
591- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -CPU" });
604+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -CPU" });
592605 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
593606 Assert::AreEqual (true , CompareTensors(L" OutputTensorData\\ Mnist_8_input_CPU.csv" ,
594- TENSOR_DATA_PATH + L" \\ Plus214_Output_0CpuIteration1.csv" ));
607+ tensorDataPath + L" \\ Plus214_Output_0CpuIteration1.csv" ));
595608 }
596- TEST_METHOD (ProvidedImageInputOnlyGpuSaveTensorFp16)
597- {
609+
610+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyGpuSaveTensorFp16)
598611 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
599612 const std::wstring inputPath = CURRENT_PATH + L" fish.png" ;
613+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
600614 const std::wstring command = BuildCommand({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
601- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" });
615+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" });
602616 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
603617 Assert::AreEqual (true , CompareTensorsFP16(L" OutputTensorData\\ Squeezenet_fp16_fish_input_GPU.csv" ,
604- TENSOR_DATA_PATH + L" \\ softmaxout_1GpuIteration1.csv" ));
618+ tensorDataPath + L" \\ softmaxout_1GpuIteration1.csv" ));
605619 }
606- TEST_METHOD (ProvidedImageInputOnlyCpuSaveTensorFp16)
607- {
620+
621+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyCpuSaveTensorFp16)
608622 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
609623 const std::wstring inputPath = CURRENT_PATH + L" fish.png" ;
624+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
610625 const std::wstring command = BuildCommand({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
611- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -CPU" });
626+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -CPU" });
612627 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
613628 Assert::AreEqual (true , CompareTensorsFP16(L" OutputTensorData\\ Squeezenet_fp16_fish_input_CPU.csv" ,
614- TENSOR_DATA_PATH + L" \\ softmaxout_1CpuIteration1.csv" ));
629+ tensorDataPath + L" \\ softmaxout_1CpuIteration1.csv" ));
615630 }
616- TEST_METHOD (ProvidedImageInputOnlyCpuPerIterationPerformance)
617- {
631+
632+ TEST_METHOD_WITH_NAME (ProvidedImageInputOnlyCpuPerIterationPerformance)
618633 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
634+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
619635 const std::wstring command =
620636 BuildCommand ({ EXE_PATH, L" -model" , modelPath, L" -PerfOutput" , OUTPUT_PATH, L" -perf" ,
621- L" -SavePerIterationPerf" , L" -BaseOutputPath" , TENSOR_DATA_PATH ,
637+ L" -SavePerIterationPerf" , L" -BaseOutputPath" , tensorDataPath ,
622638 L" -PerIterationPath PerIterationData" , L" -CPU" });
623639 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
624640
625641 // We need to expect one more line because of the header
626- Assert::AreEqual (static_cast <size_t >(2 ), GetOutputCSVLineCount (TENSOR_DATA_PATH + L" \\ PerIterationData\\ Summary.csv" ));
642+ Assert::AreEqual (static_cast <size_t >(2 ), GetOutputCSVLineCount(tensorDataPath + L" \\ PerIterationData\\ Summary.csv" ));
627643 }
628644 };
629645
@@ -641,90 +657,98 @@ public: TEST_CLASS_INITIALIZE(SetupClass) {
641657 }
642658 catch (const std::filesystem::filesystem_error &) {}
643659 }
660+
644661 TEST_METHOD (ProvidedCSVInput)
645662 {
646663 const std::wstring modelPath = CURRENT_PATH + L" SqueezeNet.onnx" ;
647664 const std::wstring inputPath = CURRENT_PATH + L" kitten_224.csv" ;
648665 const std::wstring command = BuildCommand ({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath });
649666 Assert::AreEqual (S_OK, RunProc ((wchar_t *)command.c_str ()));
650667 }
668+
651669 TEST_METHOD (ProvidedCSVBadBinding)
652670 {
653671 const std::wstring modelPath = CURRENT_PATH + L" SqueezeNet.onnx" ;
654672 const std::wstring inputPath = CURRENT_PATH + L" horizontal-crop.csv" ;
655673 const std::wstring command = BuildCommand ({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath });
656674 Assert::AreEqual (HRESULT_FROM_WIN32 (ERROR_INVALID_PARAMETER), RunProc ((wchar_t *)command.c_str ()));
657675 }
658- TEST_METHOD (ProvidedCSVInputGPUSaveCpuBoundTensor)
659- {
676+
677+ TEST_METHOD_WITH_NAME (ProvidedCSVInputGPUSaveCpuBoundTensor)
660678 const std::wstring modelPath = CURRENT_PATH + L" SqueezeNet.onnx" ;
661679 const std::wstring inputPath = CURRENT_PATH + L" fish.csv" ;
680+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
662681 const std::wstring command = BuildCommand ({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath,
663- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" });
682+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" });
664683 Assert::AreEqual (S_OK, RunProc ((wchar_t *)command.c_str ()));
665684 Assert::AreEqual (true , CompareTensors (L" OutputTensorData\\ Squeezenet_fish_input_GPU.csv" ,
666- TENSOR_DATA_PATH + L" \\ softmaxout_1GpuIteration1.csv" ));
685+ tensorDataPath + L" \\ softmaxout_1GpuIteration1.csv" ));
667686 }
668- TEST_METHOD (ProvidedCSVInputGPUSaveGpuBoundTensor)
669- {
687+
688+ TEST_METHOD_WITH_NAME (ProvidedCSVInputGPUSaveGpuBoundTensor)
670689 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
671690 const std::wstring inputPath = CURRENT_PATH + L" fish.csv" ;
691+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
672692 const std::wstring command = BuildCommand({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath,
673- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" , L" -GPUBoundInput" });
693+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" , L" -GPUBoundInput" });
674694 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
675695 Assert::AreEqual (true , CompareTensors(L" OutputTensorData\\ Squeezenet_fish_input_GPU.csv" ,
676- TENSOR_DATA_PATH + L" \\ softmaxout_1GpuIteration1.csv" ));
696+ tensorDataPath + L" \\ softmaxout_1GpuIteration1.csv" ));
677697 }
678- TEST_METHOD (ProvidedCSVInputCPUSaveCpuBoundTensor)
679- {
698+
699+ TEST_METHOD_WITH_NAME (ProvidedCSVInputCPUSaveCpuBoundTensor)
680700 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
681701 const std::wstring inputPath = CURRENT_PATH + L" fish.csv" ;
702+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
682703 const std::wstring command = BuildCommand({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath,
683- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -CPU" });
704+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -CPU" });
684705 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
685706 Assert::AreEqual (true , CompareTensors(L" OutputTensorData\\ Squeezenet_fish_input_CPU.csv" ,
686- TENSOR_DATA_PATH + L" \\ softmaxout_1CpuIteration1.csv" ));
707+ tensorDataPath + L" \\ softmaxout_1CpuIteration1.csv" ));
687708 }
688- TEST_METHOD (ProvidedCSVInputGPUSaveCpuBoundTensorFp16)
689- {
709+
710+ TEST_METHOD_WITH_NAME (ProvidedCSVInputGPUSaveCpuBoundTensorFp16)
690711 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
691712 const std::wstring inputPath = CURRENT_PATH + L" fish.csv" ;
713+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
692714 const std::wstring command = BuildCommand({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath,
693- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" });
715+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" });
694716 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
695717 Assert::AreEqual (true , CompareTensorsFP16(L" OutputTensorData\\ Squeezenet_fp16_fish_input_GPU.csv" ,
696- TENSOR_DATA_PATH + L" \\ softmaxout_1GpuIteration1.csv" ));
718+ tensorDataPath + L" \\ softmaxout_1GpuIteration1.csv" ));
697719 }
698- TEST_METHOD (ProvidedCSVInputCPUSaveCpuBoundTensorFp16)
699- {
720+
721+ TEST_METHOD_WITH_NAME (ProvidedCSVInputCPUSaveCpuBoundTensorFp16)
700722 const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
701723 const std::wstring inputPath = CURRENT_PATH + L" fish.csv" ;
724+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
702725 const std::wstring command = BuildCommand({ EXE_PATH, L" -model" , modelPath, L" -input" , inputPath,
703- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -CPU" });
726+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -CPU" });
704727 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
705728 Assert::AreEqual (true , CompareTensorsFP16(L" OutputTensorData\\ Squeezenet_fp16_fish_input_CPU.csv" ,
706- TENSOR_DATA_PATH + L" \\ softmaxout_1CpuIteration1.csv" ));
729+ tensorDataPath + L" \\ softmaxout_1CpuIteration1.csv" ));
707730 }
708731
709- TEST_METHOD (ProvidedCSVInputOnlyGpuSaveCpuBoundTensorImageDenotation)
710- {
732+ TEST_METHOD_WITH_NAME (ProvidedCSVInputOnlyGpuSaveCpuBoundTensorImageDenotation)
711733 const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
712734 const std::wstring inputPath = CURRENT_PATH + L" mnist_28.csv" ;
735+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
713736 const std::wstring command = BuildCommand({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
714- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -GPU" });
737+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -GPU" });
715738 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
716739 Assert::AreEqual (true , CompareTensors(L" OutputTensorData\\ Mnist_8_input_GPU.csv" ,
717- TENSOR_DATA_PATH + L" \\ Plus214_Output_0GpuIteration1.csv" ));
740+ tensorDataPath + L" \\ Plus214_Output_0GpuIteration1.csv" ));
718741 }
719- TEST_METHOD (ProvidedCSVInputOnlyCpuSaveCpuBoundTensorImageDenotation)
720- {
742+
743+ TEST_METHOD_WITH_NAME (ProvidedCSVInputOnlyCpuSaveCpuBoundTensorImageDenotation)
721744 const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
722745 const std::wstring inputPath = CURRENT_PATH + L" mnist_28.csv" ;
746+ const std::wstring tensorDataPath = TENSOR_DATA_PATH + L" \\ " + METHOD_NAME;
723747 const std::wstring command = BuildCommand({ EXE_PATH, L" -model " , modelPath, L" -input" , inputPath,
724- L" -SaveTensorData" , L" First" , L" -PerIterationPath" , TENSOR_DATA_PATH , L" -CPU" });
748+ L" -SaveTensorData" , L" First" , L" -PerIterationPath" , tensorDataPath , L" -CPU" });
725749 Assert::AreEqual (S_OK, RunProc((wchar_t *)command.c_str()));
726750 Assert::AreEqual (true , CompareTensors(L" OutputTensorData\\ Mnist_8_input_CPU.csv" ,
727- TENSOR_DATA_PATH + L" \\ Plus214_Output_0CpuIteration1.csv" ));
751+ tensorDataPath + L" \\ Plus214_Output_0CpuIteration1.csv" ));
728752 }
729753 };
730754
0 commit comments