Skip to content

Commit 81ddd9c

Browse files
chemwolf6922Aditya Rastogi
authored andcommitted
update winml python example (#516)
1 parent 1e211a3 commit 81ddd9c

File tree

6 files changed

+115
-81
lines changed

6 files changed

+115
-81
lines changed

Samples/WindowsML/python/Download-Model.ps1

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,16 @@ function Get-FileFromUri {
1111
param (
1212
[Parameter(Mandatory=$true)][string]$Uri,
1313
[Parameter(Mandatory=$true)][string]$OutFile,
14-
[Parameter(Mandatory=$true)][string]$ExpectedMD5
14+
[Parameter(Mandatory=$true)][string]$ExpectedHash
1515
)
1616

1717
$needsDownload = $true
1818
Write-Host "Downloading $OutFile from $Uri" -ForegroundColor Cyan
1919

2020
if (Test-Path $OutFile -PathType Leaf) {
21-
Write-Host "File exists. Verifying MD5 checksum..." -ForegroundColor Yellow
22-
$md5 = Get-FileHash -Path $OutFile -Algorithm MD5
23-
if ($md5.Hash -eq $ExpectedMD5) {
21+
Write-Host "File exists. Verifying checksum..." -ForegroundColor Yellow
22+
$hash = Get-FileHash -Path $OutFile -Algorithm SHA256
23+
if ($hash.Hash -eq $ExpectedHash) {
2424
Write-Host "File checksum matches. Using existing file." -ForegroundColor Green
2525
$needsDownload = $false
2626
} else {
@@ -44,9 +44,9 @@ function Get-FileFromUri {
4444
# Download the model file
4545
$modelPath = Join-Path $targetPath 'SqueezeNet.onnx'
4646
$modelUri = 'https://github.com/microsoft/Windows-Machine-Learning/blob/02b586811c8beb1ae2208c8605393267051257ae/SharedContent/models/SqueezeNet.onnx?raw=true'
47-
Get-FileFromUri -Uri $modelUri -OutFile $modelPath -ExpectedMD5 "3EA0D4AECA899A4F99216FC4BEBD9D0D" | Out-Null
47+
Get-FileFromUri -Uri $modelUri -OutFile $modelPath -ExpectedHash "D95E2191E056F1912A9B8F6000DA3B9C7818441B9EB48137033C2ADBF6398BC8" | Out-Null
4848

4949
# Download the labels file
5050
$labelsPath = Join-Path $targetPath 'SqueezeNet.Labels.txt'
5151
$labelsUri = 'https://github.com/microsoft/Windows-Machine-Learning/blob/02b586811c8beb1ae2208c8605393267051257ae/Samples/SqueezeNetObjectDetection/Desktop/cpp/Labels.txt?raw=true'
52-
Get-FileFromUri -Uri $labelsUri -OutFile $labelsPath -ExpectedMD5 "7317EA720B83CB3CADB75AD91419F6A8" | Out-Null
52+
Get-FileFromUri -Uri $labelsUri -OutFile $labelsPath -ExpectedHash "DC1FD0D4747096D3AA690BD65EC2F51FDB3E5117535BFBCE46FA91088A8F93A9" | Out-Null

Samples/WindowsML/python/SqueezeNetPython/main.py

Lines changed: 24 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,9 @@
55
from PIL import Image
66
import numpy as np
77
import onnxruntime as ort
8-
import subprocess
9-
import sys
10-
import json
8+
from winml import WinML
119

12-
def register_execution_providers():
13-
# Run this in another process to avoid a issue where
14-
# the pywinrt module stops the tensorRT execution provider from loading.
15-
worker_script = str(Path(__file__).parent / 'winml_worker.py')
16-
result = subprocess.check_output([sys.executable, worker_script], text=True)
17-
paths = json.loads(result)
18-
for item in paths.items():
19-
ort.register_execution_provider_library(item[0], item[1])
20-
print(f"Registered execution provider: {item[0]} with library path: {item[1]}")
21-
22-
def load_and_preprocess_image(image_path):
10+
def load_and_preprocess_image(image_path: Path) -> np.ndarray:
2311
img = Image.open(image_path)
2412
if img.mode != 'RGB':
2513
img = img.convert('RGB')
@@ -32,12 +20,12 @@ def load_and_preprocess_image(image_path):
3220
img_array = np.expand_dims(img_array, axis=0)
3321
return img_array.astype(np.float32)
3422

35-
def load_labels(label_file):
23+
def load_labels(label_file: Path) -> list[str]:
3624
with open(label_file, 'r') as f:
3725
labels = [line.strip().split(',')[1] for line in f.readlines()]
3826
return labels
3927

40-
def print_results(labels, results, is_logit=False):
28+
def print_results(labels : list[str], results: np.ndarray, is_logit=False) -> None:
4129
def softmax(x):
4230
exp_x = np.exp(x - np.max(x))
4331
return exp_x / exp_x.sum()
@@ -58,36 +46,35 @@ def softmax(x):
5846

5947
if __name__ == "__main__":
6048
print("Registering execution providers ...")
61-
register_execution_providers()
62-
63-
print("Creating session ...")
49+
registered_eps = WinML().register_execution_providers_to_ort()
50+
print(f"Registered execution providers: {registered_eps}")
6451

6552
resource_path = Path(__file__).parent.parent
66-
model_path = resource_path / "Model" / "SqueezeNet.onnx"
67-
compiled_model_path = resource_path / "Model" / "SqueezeNet_ctx.onnx"
53+
54+
print("Setting session options ...")
6855
session_options = ort.SessionOptions()
6956
# Change your policy here.
7057
session_options.set_provider_selection_policy(ort.OrtExecutionProviderDevicePolicy.PREFER_NPU)
71-
assert session_options.has_providers()
7258

59+
print("Compiling model ...")
60+
model_path = resource_path / "Model" / "SqueezeNet.onnx"
61+
compiled_model_path = resource_path / "Model" / "SqueezeNet_ctx.onnx"
62+
if not compiled_model_path.exists():
63+
model_compiler = ort.ModelCompiler(session_options, model_path)
64+
model_compiler.compile_to_file(str(compiled_model_path))
65+
else:
66+
print("Using existing compiled model.")
7367
if compiled_model_path.exists():
74-
print("Using compiled model")
68+
print("Using compiled model.")
69+
model_path_to_use = compiled_model_path
7570
else:
76-
print("No compiled model found, attempting to create compiled model at ", compiled_model_path)
77-
model_compiler = ort.ModelCompiler(session_options, model_path)
78-
print("Starting compile, this may take a few moments..." )
79-
try:
80-
model_compiler.compile_to_file(compiled_model_path)
81-
print("Model compiled successfully")
82-
except Exception as e:
83-
print("Model compilation failed:", e)
84-
print("Falling back to uncompiled model")
85-
86-
model_path_to_use = compiled_model_path if compiled_model_path.exists() else model_path
71+
print("No compile output. Using original model.")
72+
model_path_to_use = model_path
8773

74+
print("Creating session ...")
8875
session = ort.InferenceSession(
8976
model_path_to_use,
90-
sess_options=session_options,
77+
sess_options=session_options
9178
)
9279

9380
labels = load_labels(resource_path / "Model" / "SqueezeNet.Labels.txt")
@@ -100,4 +87,6 @@ def softmax(x):
10087
print("Running inference ...")
10188
input_name = session.get_inputs()[0].name
10289
results = session.run(None, {input_name: img_array})[0]
90+
if not isinstance(results, np.ndarray):
91+
raise TypeError("Unexpected output type from model.")
10392
print_results(labels, results, is_logit=False)

Samples/WindowsML/python/SqueezeNetPython/readme.md

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,21 @@ extendedZipContent:
1212
target: LICENSE
1313
---
1414

15-
## Prerequisites
16-
* Python 3.10 - 3.13 on Windows
17-
* Download model file
15+
## Steps
16+
### Download test model
1817
```PowerShell
1918
..\Download-Model.ps1
2019
```
21-
* Install dependencies
20+
### Prepare the python environment
21+
* Create a python environment with Python 3.10 - 3.13
22+
* Install the latest WinML python packages
2223
```PowerShell
23-
pip install -r requirements.txt
24-
```
24+
pip install --upgrade --pre -r requirements.txt
25+
```
26+
### Install WindowsAppRuntime
27+
Please install the WindowsAppRuntime that matches the version of the python package `wasdk-Microsoft.Windows.ApplicationModel.DynamicDependency.Bootstrap`
28+
> For experimental or preview WASDK. The version tags `-xxxN` are changed to `.devN` to fit Python's version requirements.
29+
### Run the example
30+
```PowerShell
31+
python main.py
32+
```
Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,5 @@
1-
--index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/pypi/simple
2-
--extra-index-url https://pypi.org/simple
3-
41
pillow
52
numpy
6-
onnxruntime-winml==1.22.0.post2
7-
winrt-runtime~=3.2.1.0
8-
winrt-Windows.Foundation~=3.2.1.0
9-
winrt-Windows.Foundation.Collections~=3.2.1.0
10-
winui3-Microsoft.Windows.AI.MachineLearning==1!1.8.250702007.dev4
11-
winui3-Microsoft.Windows.ApplicationModel.DynamicDependency.Bootstrap==1!1.8.250702007.dev4
3+
onnxruntime-winml
4+
wasdk-Microsoft.Windows.AI.MachineLearning[all]
5+
wasdk-Microsoft.Windows.ApplicationModel.DynamicDependency.Bootstrap
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
# Copyright (c) Microsoft Corporation.
2+
# Licensed under the MIT License.
3+
4+
import sys
5+
from pathlib import Path
6+
import traceback
7+
8+
# This needs to be alive throughout the lifetime of the application
9+
_instance = None
10+
11+
class WinML:
12+
def __new__(cls, *args, **kwargs):
13+
global _instance
14+
if _instance is None:
15+
_instance = super(WinML, cls).__new__(cls, *args, **kwargs)
16+
_instance._initialized = False
17+
return _instance
18+
19+
def __init__(self):
20+
if self._initialized:
21+
return
22+
self._initialized = True
23+
24+
self._fix_winrt_runtime()
25+
from winui3.microsoft.windows.applicationmodel.dynamicdependency.bootstrap import (
26+
InitializeOptions,
27+
initialize
28+
)
29+
import winui3.microsoft.windows.ai.machinelearning as winml
30+
self._win_app_sdk_handle = initialize(options=InitializeOptions.ON_NO_MATCH_SHOW_UI)
31+
self._win_app_sdk_handle.__enter__()
32+
catalog = winml.ExecutionProviderCatalog.get_default()
33+
self._providers = catalog.find_all_providers()
34+
self._ep_paths : dict[str, str] = {}
35+
for provider in self._providers:
36+
provider.ensure_ready_async().get()
37+
if provider.library_path == '':
38+
continue
39+
self._ep_paths[provider.name] = provider.library_path
40+
self._registered_eps : list[str] = []
41+
42+
def __del__(self):
43+
self._providers = None
44+
self._win_app_sdk_handle.__exit__(None, None, None)
45+
46+
def _fix_winrt_runtime(self):
47+
"""
48+
This function removes the msvcp140.dll from the winrt-runtime package.
49+
So it does not cause issues with other libraries.
50+
"""
51+
from importlib import metadata
52+
site_packages_path = Path(str(metadata.distribution('winrt-runtime').locate_file('')))
53+
dll_path = site_packages_path / 'winrt' / 'msvcp140.dll'
54+
if dll_path.exists():
55+
dll_path.unlink()
56+
57+
def register_execution_providers_to_ort(self) -> list[str]:
58+
import onnxruntime as ort
59+
for name, path in self._ep_paths.items():
60+
if name not in self._registered_eps:
61+
try:
62+
ort.register_execution_provider_library(name, path)
63+
self._registered_eps.append(name)
64+
except Exception as e:
65+
print(f"Failed to register execution provider {name}: {e}", file=sys.stderr)
66+
traceback.print_exc()
67+
return self._registered_eps
68+

Samples/WindowsML/python/SqueezeNetPython/winml_worker.py

Lines changed: 0 additions & 25 deletions
This file was deleted.

0 commit comments

Comments
 (0)