diff --git a/Samples/WindowsML/Shared/cpp/ArgumentParser.cpp b/Samples/WindowsML/Shared/cpp/ArgumentParser.cpp index bd33e9fe0..b82966ae7 100644 --- a/Samples/WindowsML/Shared/cpp/ArgumentParser.cpp +++ b/Samples/WindowsML/Shared/cpp/ArgumentParser.cpp @@ -191,7 +191,7 @@ namespace Shared << L" --download Download required packages\n" << L" --use_model_catalog Use the model catalog for model selection\n" << L" --model Path to the input ONNX model (default: SqueezeNet.onnx in executable directory)\n" - << L" --compiled_output Path for compiled output model (default: SqueezeNet_ctx.onnx)\n" + << L" --compiled_output Path for compiled output model (default: auto-generated with device info)\n" << L" --image_path Path to the input image (default: sample kitten image)\n" << L"\n" << L"Exactly one of --ep_policy or --ep_name must be specified.\n" diff --git a/Samples/WindowsML/Shared/cpp/ModelManager.cpp b/Samples/WindowsML/Shared/cpp/ModelManager.cpp index db745c3eb..87cf79029 100644 --- a/Samples/WindowsML/Shared/cpp/ModelManager.cpp +++ b/Samples/WindowsML/Shared/cpp/ModelManager.cpp @@ -1,4 +1,4 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. +// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE.md in the repo root for license information. #include "ModelManager.h" #include @@ -253,5 +253,56 @@ namespace Shared return ModelVariant::Default; } + std::filesystem::path ModelManager::GenerateCompiledModelPath( + const std::filesystem::path& modelPath, + const std::filesystem::path& executableFolder, + const CommandLineOptions& options) + { + // If user explicitly specified --compiled_output, use it as-is + if (!options.output_path.empty()) + { + return std::filesystem::path(options.output_path); + } + + std::wstring baseName = modelPath.stem().wstring(); + std::wstring suffix; + + if (options.ep_policy.has_value()) + { + std::string policyStr = ArgumentParser::ToString(options.ep_policy.value()); + suffix = L"_" + std::wstring(policyStr.begin(), policyStr.end()); + } + else if (!options.ep_name.empty()) + { + suffix = L"_" + options.ep_name; + + // Try to determine device type + std::wstring deviceType; + if (options.device_type.has_value()) + { + deviceType = options.device_type.value(); + } + + if (!deviceType.empty()) + { + suffix += L"_" + deviceType; + } + } + + if (options.perf_mode == PerformanceMode::MaxPerformance) + { + suffix += L"_MaxPerformance"; + } + else if (options.perf_mode == PerformanceMode::MaxEfficiency) + { + suffix += L"_MaxEfficiency"; + } + + std::wstring fileName = baseName + L"_ctx" + suffix + L".onnx"; + auto result = executableFolder / fileName; + std::wcout << L"Compiled model path: " << result.wstring() << std::endl; + return result; + } + } // namespace Shared } // namespace WindowsML diff --git a/Samples/WindowsML/Shared/cpp/ModelManager.h b/Samples/WindowsML/Shared/cpp/ModelManager.h index 26212349f..59f34d128 100644 --- a/Samples/WindowsML/Shared/cpp/ModelManager.h +++ b/Samples/WindowsML/Shared/cpp/ModelManager.h @@ -54,6 +54,16 @@ namespace Shared std::wstring& outputModelPath, std::wstring& outputImagePath); + /// + /// Generate a device-specific compiled model path. + /// Encodes EP policy/name, device type, and performance mode so compiled models + /// for different device configurations don't collide. + /// + static std::filesystem::path GenerateCompiledModelPath( + const std::filesystem::path& modelPath, + const std::filesystem::path& executableFolder, + const CommandLineOptions& options); + /// /// Get model path for specified variant /// diff --git a/Samples/WindowsML/Shared/cs/ArgumentParser.cs b/Samples/WindowsML/Shared/cs/ArgumentParser.cs index 3f7e79716..3ec871ea9 100644 --- a/Samples/WindowsML/Shared/cs/ArgumentParser.cs +++ b/Samples/WindowsML/Shared/cs/ArgumentParser.cs @@ -38,7 +38,7 @@ public class Options public bool Download { get; set; } = false; public bool UseModelCatalog { get; set; } = false; public string ModelPath { get; set; } = string.Empty; - public string OutputPath { get; set; } = "SqueezeNet_ctx.onnx"; + public string OutputPath { get; set; } = string.Empty; public string ImagePath { get; set; } = string.Empty; public ModelVariant Variant { get; set; } = ModelVariant.Default; public PerformanceMode PerfMode { get; set; } = PerformanceMode.Default; @@ -221,7 +221,7 @@ public static void PrintHelp() Console.WriteLine(" --download Download required packages"); Console.WriteLine(" --use_model_catalog Use the model catalog for model discovery"); Console.WriteLine(" --model Path to input ONNX model (default: SqueezeNet.onnx)"); - Console.WriteLine(" --compiled_output Path for compiled output model (default: SqueezeNet_ctx.onnx)"); + Console.WriteLine(" --compiled_output Path for compiled output model (default: auto-generated with device info)"); Console.WriteLine(" --image_path Path to the input image (default: sample kitten image)"); Console.WriteLine(" --help, -h Display this help message"); Console.WriteLine(); diff --git a/Samples/WindowsML/Shared/cs/ExecutionProviderManager.cs b/Samples/WindowsML/Shared/cs/ExecutionProviderManager.cs index 16c319e85..5e1c6ffe2 100644 --- a/Samples/WindowsML/Shared/cs/ExecutionProviderManager.cs +++ b/Samples/WindowsML/Shared/cs/ExecutionProviderManager.cs @@ -42,6 +42,7 @@ public static async Task InitializeProvidersAsync(bool allowDownload = false) if (allowDownload || readyState != ExecutionProviderReadyState.NotPresent) { await provider.EnsureReadyAsync(); + Console.WriteLine($" Updated Ready state: {provider.ReadyState}"); } provider.TryRegister(); diff --git a/Samples/WindowsML/Shared/cs/ModelManager.cs b/Samples/WindowsML/Shared/cs/ModelManager.cs index e63602be0..1d901f60f 100644 --- a/Samples/WindowsML/Shared/cs/ModelManager.cs +++ b/Samples/WindowsML/Shared/cs/ModelManager.cs @@ -110,6 +110,60 @@ public static string GetModelVariantPath(string executableFolder, ModelVariant v return modelPath; } + /// + /// Generate a device-specific compiled model path. + /// Encodes EP policy/name, device type, and performance mode into the filename + /// so that compiled models for different device configurations don't collide. + /// + public static string GenerateCompiledModelPath(string modelPath, string executableFolder, Options options) + { + // If user explicitly specified --compiled_output, use it as-is + if (!string.IsNullOrEmpty(options.OutputPath)) + { + return options.OutputPath.Contains(Path.DirectorySeparatorChar) ? + options.OutputPath : Path.Combine(executableFolder, options.OutputPath); + } + + string baseName = Path.GetFileNameWithoutExtension(modelPath); + string suffix = BuildDeviceSuffix(options); + + string fileName = $"{baseName}_ctx{suffix}.onnx"; + Console.WriteLine($"Compiled model path: {Path.Combine(executableFolder, fileName)}"); + return Path.Combine(executableFolder, fileName); + } + + /// + /// Build a device-identifying suffix for the compiled model filename. + /// + private static string BuildDeviceSuffix(Options options) + { + var parts = new List(); + + if (options.EpPolicy.HasValue) + { + parts.Add(options.EpPolicy.Value.ToString()); + } + else if (!string.IsNullOrEmpty(options.EpName)) + { + parts.Add(options.EpName); + + // Try to determine device type + string? deviceType = options.DeviceType; + + if (!string.IsNullOrEmpty(deviceType)) + { + parts.Add(deviceType); + } + } + + if (options.PerfMode != PerformanceMode.Default) + { + parts.Add(options.PerfMode.ToString()); + } + + return parts.Count > 0 ? "_" + string.Join("_", parts) : ""; + } + /// /// Resolve model paths with intelligent variant selection /// @@ -192,8 +246,7 @@ public static string GetModelVariantPath(string executableFolder, ModelVariant v modelPath = GetModelVariantPath(executableFolder, variant); } - string compiledModelPath = options.OutputPath.Contains(Path.DirectorySeparatorChar) ? - options.OutputPath : Path.Combine(executableFolder, options.OutputPath); + string compiledModelPath = GenerateCompiledModelPath(modelPath, executableFolder, options); if (!File.Exists(labelsPath)) { @@ -225,8 +278,7 @@ public static (string modelPath, string compiledModelPath, string labelsPath) Re modelPath = GetModelVariantPath(executableFolder, options.Variant); } - string compiledModelPath = options.OutputPath.Contains(Path.DirectorySeparatorChar) ? - options.OutputPath : Path.Combine(executableFolder, options.OutputPath); + string compiledModelPath = GenerateCompiledModelPath(modelPath, executableFolder, options); string labelsPath = Path.Combine(executableFolder, "SqueezeNet.Labels.txt"); diff --git a/Samples/WindowsML/cpp/CppConsoleDesktop/CppConsoleDesktop.cpp b/Samples/WindowsML/cpp/CppConsoleDesktop/CppConsoleDesktop.cpp index 8eeec0c17..236a9a0d1 100644 --- a/Samples/WindowsML/cpp/CppConsoleDesktop/CppConsoleDesktop.cpp +++ b/Samples/WindowsML/cpp/CppConsoleDesktop/CppConsoleDesktop.cpp @@ -110,7 +110,7 @@ IAsyncAction RunInferenceAsync(const CommandLineOptions& options) std::vector labels = ModelManager::LoadLabels(labelsPath); std::filesystem::path outputPath = - options.output_path.empty() ? executableFolder / L"SqueezeNet_ctx.onnx" : std::filesystem::path(options.output_path); + ModelManager::GenerateCompiledModelPath(modelPath, executableFolder, options); std::filesystem::path imagePath = options.image_path.empty() ? executableFolder / L"image.png" : std::filesystem::path(options.image_path); diff --git a/Samples/WindowsML/cpp/CppConsoleDesktop/README.md b/Samples/WindowsML/cpp/CppConsoleDesktop/README.md index 6db37a46c..e1cf79761 100644 --- a/Samples/WindowsML/cpp/CppConsoleDesktop/README.md +++ b/Samples/WindowsML/cpp/CppConsoleDesktop/README.md @@ -15,7 +15,7 @@ Options: --compile Compile the model --download Download required packages --model Path to input ONNX model (default: SqueezeNet.onnx in executable directory) - --compiled_output Path for compiled output model (default: SqueezeNet_ctx.onnx) + --compiled_output Path for compiled output model (default: auto-generated with device info) --image_path Path to the input image (default: sample kitten image) ``` @@ -68,7 +68,14 @@ for (const auto& [ep_name, devices] : ep_device_map) ### 2. Model Compilation -The sample shows how to compile an ONNX model for optimized execution: +The sample shows how to compile an ONNX model for optimized execution. Compiled model filenames +are automatically generated with device-specific identifiers to prevent collisions: + +- Policy mode: `SqueezeNet_ctx_PREFER_GPU.onnx` +- Explicit EP: `SqueezeNet_ctx_DML_GPU.onnx` +- With perf mode: `SqueezeNet_ctx_PREFER_NPU_MaxPerformance.onnx` + +Use `--compiled_output` to override with a custom path. ```cpp #include diff --git a/Samples/WindowsML/cs/CSharpConsoleDesktop/README.md b/Samples/WindowsML/cs/CSharpConsoleDesktop/README.md index 0308eb869..7d8db601a 100644 --- a/Samples/WindowsML/cs/CSharpConsoleDesktop/README.md +++ b/Samples/WindowsML/cs/CSharpConsoleDesktop/README.md @@ -15,7 +15,7 @@ Options: --compile Compile the model --download Download required packages --model Path to input ONNX model (default: SqueezeNet.onnx in executable directory) - --compiled_output Path for compiled output model (default: SqueezeNet_ctx.onnx) + --compiled_output Path for compiled output model (default: auto-generated with device info) --image_path Path to the input image (default: sample kitten image) ``` @@ -80,7 +80,14 @@ foreach (KeyValuePair> epGroup in epDeviceMap) ### 2. Model Compilation -The sample shows how to compile an ONNX model for optimized execution: +The sample shows how to compile an ONNX model for optimized execution. Compiled model filenames +are automatically generated with device-specific identifiers to prevent collisions: + +- Policy mode: `SqueezeNet_ctx_PREFER_GPU.onnx` +- Explicit EP: `SqueezeNet_ctx_DML_GPU.onnx` +- With perf mode: `SqueezeNet_ctx_PREFER_NPU_MaxPerformance.onnx` + +Use `--compiled_output` to override with a custom path. ```csharp // Create compilation options from session options