Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion samples/cs/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
</PropertyGroup>
<ItemGroup>
<PackageVersion Include="Microsoft.AI.Foundry.Local" Version="1.0.0-rc4" />
<PackageVersion Include="Microsoft.AI.Foundry.Local.WinML" Version="1.0.0-rc4" />
<PackageVersion Include="Microsoft.AI.Foundry.Local.WinML" Version="1.0.0" />
<PackageVersion Include="Microsoft.AI.Foundry.Local.Core.WinML" Version="1.0.0-dev-20260414T221200-0eef67c" />
<PackageVersion Include="Betalgo.Ranul.OpenAI" Version="9.1.1" />
<PackageVersion Include="Microsoft.Extensions.Logging" Version="9.0.10" />
<PackageVersion Include="Microsoft.Extensions.Logging.Console" Version="9.0.10" />
Expand Down
5 changes: 2 additions & 3 deletions samples/cs/nuget.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,11 @@
<package pattern="*" />
</packageSource>
<packageSource key="ORT-Nightly">
<package pattern="Microsoft.AI.Foundry.Local*" />
<package pattern="Microsoft.ML.OnnxRuntime*" />
<package pattern="Microsoft.AI.Foundry.Local.Core*" />
</packageSource>
<packageSource key="local-sdk">
<package pattern="Microsoft.AI.Foundry.Local" />
<package pattern="Microsoft.AI.Foundry.Local.WinML" />
</packageSource>
</packageSourceMapping>
</configuration>
</configuration>
257 changes: 257 additions & 0 deletions samples/cs/verify-winml/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,257 @@
/// <summary>
/// Foundry Local SDK - WinML 2.0 EP Verification (C#)
///
/// Verifies:
/// 1. Execution providers are discovered and registered
/// 2. Accelerated models appear in catalog after EP registration
/// 3. Streaming chat completions work on an accelerated model
/// </summary>

using Microsoft.AI.Foundry.Local;
using Microsoft.Extensions.Logging;
using FoundryChatMessage = Microsoft.AI.Foundry.Local.OpenAI.ChatMessage;
using FoundryChatMessageRole = Microsoft.AI.Foundry.Local.OpenAI.ChatMessageRole;

const string PASS = "\x1b[92m[PASS]\x1b[0m";
const string FAIL = "\x1b[91m[FAIL]\x1b[0m";
const string INFO = "\x1b[94m[INFO]\x1b[0m";
const string WARN = "\x1b[93m[WARN]\x1b[0m";

var results = new List<(string Name, bool Passed)>();

void LogResult(string testName, bool passed, string detail = "")
{
var status = passed ? PASS : FAIL;
var msg = string.IsNullOrEmpty(detail) ? $"{status} {testName}" : $"{status} {testName} - {detail}";
Console.WriteLine(msg);
results.Add((testName, passed));
}

void PrintSeparator(string title)
{
Console.WriteLine($"\n{new string('=', 60)}");
Console.WriteLine($" {title}");
Console.WriteLine($"{new string('=', 60)}\n");
}

void PrintSummary()
{
PrintSeparator("Summary");
var passed = results.Count(r => r.Passed);
foreach (var (name, p) in results)
{
Console.WriteLine($" {(p ? "✓" : "✗")} {name}");
}

Console.WriteLine($"\n {passed}/{results.Count} tests passed");
}

bool IsAcceleratedVariant(IModel model)
{
var runtime = model.Info?.Runtime;
return runtime != null && (runtime.DeviceType == DeviceType.GPU || runtime.DeviceType == DeviceType.NPU);
}

CancellationToken ct = CancellationToken.None;

// ── 0. Initialize FoundryLocalManager ──────────────────────
PrintSeparator("Initialization");
var config = new Configuration
{
AppName = "verify_winml",
LogLevel = Microsoft.AI.Foundry.Local.LogLevel.Information
};

using var loggerFactory = LoggerFactory.Create(builder =>
builder.SetMinimumLevel(Microsoft.Extensions.Logging.LogLevel.Information));
var logger = loggerFactory.CreateLogger<Program>();

await FoundryLocalManager.CreateAsync(config, logger);
var mgr = FoundryLocalManager.Instance;
Console.WriteLine($"{INFO} FoundryLocalManager initialized.");

// ── 1. Discover & Register EPs ────────────────────────────
PrintSeparator("Step 1: Discover & Register Execution Providers");
EpInfo[] eps = [];
try
{
eps = mgr.DiscoverEps();
Console.WriteLine($"{INFO} Discovered {eps.Length} execution providers:");
foreach (var ep in eps)
{
Console.WriteLine($" - {ep.Name,-40} Registered: {ep.IsRegistered}");
}

LogResult("EP Discovery", true, $"{eps.Length} EP(s) found");
}
catch (Exception e)
{
LogResult("EP Discovery", false, e.Message);
}

if (eps.Length == 0)
{
var detail = "No execution providers discovered on this machine";
LogResult("EP Download & Registration", false, detail);
Console.WriteLine($"\n{FAIL} {detail}.");
PrintSummary();
return;
}

try
{
string? currentProgressEp = null;
var currentProgressPercent = -1d;

var epResult = await mgr.DownloadAndRegisterEpsAsync(
new Action<string, double>((epName, percent) =>
{
if (currentProgressEp != null &&
(!epName.Equals(currentProgressEp, StringComparison.OrdinalIgnoreCase) || percent < currentProgressPercent))
{
Console.WriteLine();
}

currentProgressEp = epName;
currentProgressPercent = percent;
Console.Write($"\r Downloading {epName}: {percent:F1}%");
}),
ct);

if (currentProgressEp != null)
{
Console.WriteLine();
}

Console.WriteLine($"{INFO} EP registration: success={epResult.Success}, status={epResult.Status}");
if (epResult.RegisteredEps?.Any() == true)
{
Console.WriteLine($" Registered: {string.Join(", ", epResult.RegisteredEps)}");
}

if (epResult.FailedEps?.Any() == true)
{
Console.WriteLine($" Failed: {string.Join(", ", epResult.FailedEps)}");
}

var downloadOk = epResult.Success;
var detail = downloadOk && epResult.RegisteredEps?.Any() == true
? $"{epResult.RegisteredEps.Length} EP(s) registered"
: epResult.Status;
LogResult("EP Download & Registration", downloadOk, detail);
if (!downloadOk)
{
PrintSummary();
return;
}
}
catch (Exception e)
{
Console.WriteLine();
LogResult("EP Download & Registration", false, e.Message);
PrintSummary();
return;
}

// ── 2. List Models & Find Accelerated Variants ────────────
PrintSeparator("Step 2: Model Catalog - Accelerated Models");
var catalog = await mgr.GetCatalogAsync();
var models = await catalog.ListModelsAsync();
Console.WriteLine($"{INFO} Total models in catalog: {models.Count}");

var acceleratedVariants = new List<IModel>();
foreach (var model in models)
{
foreach (var variant in model.Variants)
{
if (IsAcceleratedVariant(variant))
{
acceleratedVariants.Add(variant);
var runtime = variant.Info?.Runtime;
Console.WriteLine($" - {variant.Id,-50} Device: {runtime?.DeviceType,-3} EP: {runtime?.ExecutionProvider ?? "?"}");
}
}
}

var chosen = acceleratedVariants.FirstOrDefault();
LogResult("Catalog - Accelerated models found", chosen != null,
chosen != null ? $"{acceleratedVariants.Count} accelerated variant(s)" : "No accelerated model variants");

if (chosen == null)
{
Console.WriteLine($"\n{FAIL} No accelerated model variants are available.");
Console.WriteLine($"{WARN} Ensure the system has a compatible accelerator and matching model variants installed.");
PrintSummary();
return;
}

Console.WriteLine($"\n{INFO} Selected model: {chosen.Id} (EP: {chosen.Info?.Runtime?.ExecutionProvider ?? "unknown"})");

// ── 3. Download & Load Model ──────────────────────────────
PrintSeparator("Step 3: Download & Load Model");
try
{
await chosen.DownloadAsync(progress =>
Console.Write($"\r Downloading model: {progress:F1}%"));
Console.WriteLine();
LogResult("Model Download", true);
}
catch (Exception e)
{
Console.WriteLine();
LogResult("Model Download", false, e.Message);
PrintSummary();
return;
}

try
{
await chosen.LoadAsync();
LogResult("Model Load", true, $"Loaded {chosen.Id}");
}
catch (Exception e)
{
LogResult("Model Load", false, e.Message);
PrintSummary();
return;
}

// ── 4. Streaming Chat Completions (Native SDK) ────────────
PrintSeparator("Step 4: Streaming Chat Completions (Native)");
try
{
var chatClient = await chosen.GetChatClientAsync();
var messages = new List<FoundryChatMessage>
{
new() { Role = FoundryChatMessageRole.System, Content = "You are a helpful assistant." },
new() { Role = FoundryChatMessageRole.User, Content = "What is 2 + 2? Reply with just the number." },
};

var fullResponse = "";
var start = DateTime.UtcNow;
await foreach (var chunk in chatClient.CompleteChatStreamingAsync(messages, ct))
{
var content = chunk.Choices?.FirstOrDefault()?.Message?.Content;
if (!string.IsNullOrEmpty(content))
{
Console.Write(content);
Console.Out.Flush();
fullResponse += content;
}
}

var elapsed = (DateTime.UtcNow - start).TotalSeconds;
Console.WriteLine();
LogResult("Streaming Chat (Native)", fullResponse.Length > 0,
$"{fullResponse.Length} chars in {elapsed:F2}s");
}
catch (Exception e)
{
LogResult("Streaming Chat (Native)", false, e.Message);
}

// ── Summary ──────────────────────────────────────────────
PrintSummary();

await chosen.UnloadAsync();
Console.WriteLine("Model unloaded. Done!");
21 changes: 21 additions & 0 deletions samples/cs/verify-winml/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Verify WinML 2.0 Execution Providers (C#)

This sample verifies that WinML 2.0 execution providers are correctly discovered,
downloaded, and registered using the Foundry Local C# SDK. It uses registered WinML
EP-backed model variants and finishes with one native streaming chat check.

## Prerequisites

- Windows with a compatible GPU
- Windows App SDK 2.0 runtime installed (preview1 or experimental)
- .NET 9.0 SDK

## Build & Run

This sample uses the public `Microsoft.AI.Foundry.Local.WinML` SDK package and
overrides its native `Microsoft.AI.Foundry.Local.Core.WinML` dependency with the
preview package from ORT-Nightly via the shared `..\nuget.config`.

```bash
dotnet run
```
25 changes: 25 additions & 0 deletions samples/cs/verify-winml/VerifyWinML.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9.0-windows10.0.26100</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<WindowsAppSDKSelfContained>true</WindowsAppSDKSelfContained>
<Platforms>x64;ARM64</Platforms>
<WindowsPackageType>None</WindowsPackageType>
<EnableCoreMrtTooling>false</EnableCoreMrtTooling>
</PropertyGroup>

<PropertyGroup Condition="'$(RuntimeIdentifier)'==''">
<RuntimeIdentifier>$(NETCoreSdkRuntimeIdentifier)</RuntimeIdentifier>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.AI.Foundry.Local.WinML" />
<PackageReference Include="Microsoft.AI.Foundry.Local.Core.WinML" />
<PackageReference Include="Microsoft.Extensions.Logging" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" />
</ItemGroup>

</Project>
27 changes: 27 additions & 0 deletions samples/js/verify-winml/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Verify WinML 2.0 Execution Providers (JavaScript)

This sample verifies that WinML 2.0 execution providers are correctly discovered,
downloaded, and registered using the Foundry Local JavaScript SDK. It uses registered
WinML EP-backed model variants and finishes with one native streaming chat check.

## Prerequisites

- Windows with a compatible GPU
- Windows App SDK 2.0 runtime installed (preview1 or experimental)
- Node.js 18+

## Setup

`package.json` installs the repo-local `foundry-local-sdk` package and then
runs its WinML installer script, so the sample always uses the current
branch's WinML artifact pins:

```bash
npm install
```

## Run

```bash
node app.js
```
Loading
Loading