diff --git a/.pipelines/foundry-local-packaging.yml b/.pipelines/foundry-local-packaging.yml index bf05607f..1e0bda32 100644 --- a/.pipelines/foundry-local-packaging.yml +++ b/.pipelines/foundry-local-packaging.yml @@ -5,7 +5,7 @@ # # Produces artifacts: flc-nuget, flc-nuget-winml, flc-wheels, flc-wheels-winml, # cs-sdk, cs-sdk-winml, js-sdk, js-sdk-winml, python-sdk, python-sdk-winml, -# rust-sdk, rust-sdk-winml +# rust-sdk, rust-sdk-winml, cpp-sdk, cpp-sdk-winml pr: - main @@ -643,6 +643,36 @@ extends: # depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-winml' # outputDir: '$(Build.ArtifactStagingDirectory)/rust-sdk-winml' + # ── Build C++ SDK ── + - stage: build_cpp + displayName: 'Build C++ SDK' + dependsOn: + - build_core + jobs: + - job: cpp_sdk + displayName: 'Build' + pool: + name: onnxruntime-Win-CPU-2022 + os: windows + templateContext: + inputs: + - input: pipelineArtifact + artifactName: 'version-info' + targetPath: '$(Pipeline.Workspace)/version-info' + - input: pipelineArtifact + artifactName: 'deps-versions-standard' + targetPath: '$(Pipeline.Workspace)/deps-versions-standard' + outputs: + - output: pipelineArtifact + artifactName: 'cpp-sdk' + targetPath: '$(Build.ArtifactStagingDirectory)/cpp-sdk' + steps: + - checkout: self + - template: .pipelines/templates/build-cpp-steps.yml@self + parameters: + isWinML: false + depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-standard' + # ── Test C# SDK ── - stage: test_cs displayName: 'Test C#' @@ -1003,6 +1033,36 @@ extends: flcNugetDir: '$(Pipeline.Workspace)/flc-nuget' depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-standard' + # ── Test C++ SDK ── + - stage: test_cpp + displayName: 'Test C++' + dependsOn: build_cpp + jobs: + - job: test_cpp_win_x64 + displayName: 'win-x64' + pool: + name: onnxruntime-Win-CPU-2022 + os: windows + templateContext: + inputs: + - input: pipelineArtifact + artifactName: 'flc-nuget' + targetPath: '$(Pipeline.Workspace)/flc-nuget' + - input: pipelineArtifact + artifactName: 'deps-versions-standard' + targetPath: '$(Pipeline.Workspace)/deps-versions-standard' + steps: + - checkout: self + clean: true + - template: .pipelines/templates/checkout-steps.yml@self + parameters: + repoName: test-data-shared + - template: .pipelines/templates/test-cpp-steps.yml@self + parameters: + isWinML: false + flcNugetDir: '$(Pipeline.Workspace)/flc-nuget' + depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-standard' + # ── Build FLC (WinML) ── - stage: build_core_winml displayName: 'Build Core (WinML)' @@ -1283,6 +1343,37 @@ extends: depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-winml' outputDir: '$(Build.ArtifactStagingDirectory)/python-sdk-winml' + # ── Build C++ SDK (WinML) ── + - stage: build_cpp_winml + displayName: 'Build C++ SDK (WinML)' + dependsOn: + - build_core_winml + jobs: + - job: cpp_sdk_winml + displayName: 'Build' + pool: + name: onnxruntime-Win-CPU-2022 + os: windows + templateContext: + inputs: + - input: pipelineArtifact + artifactName: 'version-info' + targetPath: '$(Pipeline.Workspace)/version-info' + - input: pipelineArtifact + artifactName: 'deps-versions-winml' + targetPath: '$(Pipeline.Workspace)/deps-versions-winml' + outputs: + - output: pipelineArtifact + artifactName: 'cpp-sdk-winml' + targetPath: '$(Build.ArtifactStagingDirectory)/cpp-sdk-winml' + steps: + - checkout: self + - template: .pipelines/templates/build-cpp-steps.yml@self + parameters: + isWinML: true + depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-winml' + outputDir: '$(Build.ArtifactStagingDirectory)/cpp-sdk-winml' + # ── Test C# SDK (WinML) ── - stage: test_cs_winml displayName: 'Test C# (WinML)' @@ -1377,3 +1468,33 @@ extends: flcWheelsDir: '$(Pipeline.Workspace)/flc-wheels-winml' sdkWheelsDir: '$(Pipeline.Workspace)/python-sdk-winml' depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-winml' + + # ── Test C++ SDK (WinML) ── + - stage: test_cpp_winml + displayName: 'Test C++ (WinML)' + dependsOn: build_cpp_winml + jobs: + - job: test_cpp_winml_win_x64 + displayName: 'win-x64' + pool: + name: onnxruntime-Win-CPU-2022 + os: windows + templateContext: + inputs: + - input: pipelineArtifact + artifactName: 'flc-nuget-winml' + targetPath: '$(Pipeline.Workspace)/flc-nuget-winml' + - input: pipelineArtifact + artifactName: 'deps-versions-winml' + targetPath: '$(Pipeline.Workspace)/deps-versions-winml' + steps: + - checkout: self + clean: true + - template: .pipelines/templates/checkout-steps.yml@self + parameters: + repoName: test-data-shared + - template: .pipelines/templates/test-cpp-steps.yml@self + parameters: + isWinML: true + flcNugetDir: '$(Pipeline.Workspace)/flc-nuget-winml' + depsVersionsDir: '$(Pipeline.Workspace)/deps-versions-winml' diff --git a/.pipelines/templates/build-cpp-steps.yml b/.pipelines/templates/build-cpp-steps.yml new file mode 100644 index 00000000..d692ca57 --- /dev/null +++ b/.pipelines/templates/build-cpp-steps.yml @@ -0,0 +1,73 @@ +# Steps to package the C++ SDK source into a release archive. +# The archive contains headers, source, CMake config, and deps_versions.json +# so consumers can build from source with their own toolchain. +parameters: +- name: isWinML + type: boolean + default: false +- name: outputDir + type: string + default: '$(Build.ArtifactStagingDirectory)/cpp-sdk' + displayName: 'Path to directory for the packaged SDK' +- name: depsVersionsDir + type: string + default: '' + displayName: 'Path to deps-versions artifact directory' +steps: +# Set paths for multi-repo checkout +- task: PowerShell@2 + displayName: 'Set source paths' + inputs: + targetType: inline + script: | + $multiCheckout = "$(Build.SourcesDirectory)/Foundry-Local" + if (Test-Path $multiCheckout) { + $repoRoot = $multiCheckout + } else { + $repoRoot = "$(Build.SourcesDirectory)" + } + Write-Host "##vso[task.setvariable variable=repoRoot]$repoRoot" + +# Read version from the version-info artifact produced by compute_version stage. +- task: PowerShell@2 + displayName: 'Set SDK version' + inputs: + targetType: inline + script: | + $v = (Get-Content "$(Pipeline.Workspace)/version-info/sdkVersion.txt" -Raw).Trim() + Write-Host "C++ SDK version: $v" + + # Patch vcpkg.json version-string + $vcpkgPath = "$(repoRoot)/sdk/cpp/vcpkg.json" + $content = Get-Content $vcpkgPath -Raw + $content = $content -replace '"version-string"\s*:\s*"[^"]+"', "`"version-string`": `"$v`"" + Set-Content -Path $vcpkgPath -Value $content + + # Patch CMakeLists.txt project version (CMake only accepts MAJOR.MINOR.PATCH[.TWEAK]) + $cmakeVer = ($v -split '-')[0] + $cmakePath = "$(repoRoot)/sdk/cpp/CMakeLists.txt" + $content = Get-Content $cmakePath -Raw + $content = $content -replace 'project\(CppSdk\s+VERSION\s+[^\s]+', "project(CppSdk VERSION $cmakeVer" + Set-Content -Path $cmakePath -Value $content + + Write-Host "##vso[task.setvariable variable=cppSdkVersion]$v" + +# Load dependency versions from deps_versions.json so the archive has correct versions +- template: update-deps-versions-steps.yml + parameters: + repoRoot: $(repoRoot) + artifactDir: ${{ parameters.depsVersionsDir }} + isWinML: ${{ parameters.isWinML }} + +# Archive the SDK source tree for GitHub Releases +- task: PowerShell@2 + displayName: 'Package SDK source' + inputs: + targetType: inline + script: | + $destDir = "${{ parameters.outputDir }}" + New-Item -ItemType Directory -Path $destDir -Force | Out-Null + + $zipPath = "$destDir/foundry-local-cpp-sdk-$(cppSdkVersion).zip" + Compress-Archive -Path "$(repoRoot)/sdk/cpp/*" -DestinationPath $zipPath -Force + Write-Host "Packaged SDK source: $zipPath" diff --git a/.pipelines/templates/test-cpp-steps.yml b/.pipelines/templates/test-cpp-steps.yml new file mode 100644 index 00000000..2fca2025 --- /dev/null +++ b/.pipelines/templates/test-cpp-steps.yml @@ -0,0 +1,151 @@ +# Steps to test the C++ SDK. +# Runs unit tests (mocked, no Core DLL needed) and E2E tests (requires Core DLL). +parameters: +- name: isWinML + type: boolean + default: false +- name: flcNugetDir + type: string + displayName: 'Path to directory containing the FLC .nupkg' +- name: depsVersionsDir + type: string + default: '' + displayName: 'Path to deps-versions artifact directory' + +steps: +- task: PowerShell@2 + displayName: 'Set source paths' + inputs: + targetType: inline + script: | + $multiCheckout = "$(Build.SourcesDirectory)/Foundry-Local" + if (Test-Path $multiCheckout) { + $repoRoot = $multiCheckout + } else { + $repoRoot = "$(Build.SourcesDirectory)" + } + $testDataDir = "$(Build.SourcesDirectory)/test-data-shared" + Write-Host "##vso[task.setvariable variable=repoRoot]$repoRoot" + Write-Host "##vso[task.setvariable variable=testDataDir]$testDataDir" + +# Load dependency versions from deps_versions.json +- template: update-deps-versions-steps.yml + parameters: + repoRoot: $(repoRoot) + artifactDir: ${{ parameters.depsVersionsDir }} + isWinML: ${{ parameters.isWinML }} + +# Extract FLC native binaries from the pipeline-built .nupkg +- task: PowerShell@2 + displayName: 'Extract FLC native binaries' + inputs: + targetType: inline + script: | + $nupkg = Get-ChildItem "${{ parameters.flcNugetDir }}" -Recurse -Filter "Microsoft.AI.Foundry.Local.Core*.nupkg" -Exclude "*.snupkg" | Select-Object -First 1 + if (-not $nupkg) { throw "No FLC .nupkg found in ${{ parameters.flcNugetDir }}" } + + $extractDir = "$(Build.ArtifactStagingDirectory)/flc-extract-cpp" + $zip = [System.IO.Path]::ChangeExtension($nupkg.FullName, ".zip") + Copy-Item $nupkg.FullName $zip -Force + Expand-Archive -Path $zip -DestinationPath $extractDir -Force + + $arch = if ([System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq 'Arm64') { 'arm64' } else { 'x64' } + $rid = "win-$arch" + + $nativeDir = "$extractDir/runtimes/$rid/native" + if (-not (Test-Path $nativeDir)) { throw "No native binaries found at $nativeDir for RID $rid" } + + $flcNativeDir = "$(Build.ArtifactStagingDirectory)/flc-native-cpp" + New-Item -ItemType Directory -Path $flcNativeDir -Force | Out-Null + Get-ChildItem $nativeDir -File | Copy-Item -Destination $flcNativeDir -Force + Write-Host "##vso[task.setvariable variable=FOUNDRY_NATIVE_OVERRIDE_DIR]$flcNativeDir" + Write-Host "Extracted FLC native binaries to $flcNativeDir" + +# Ensure vcpkg is available +- task: PowerShell@2 + displayName: 'Bootstrap vcpkg' + inputs: + targetType: inline + script: | + if ($env:VCPKG_ROOT -and (Test-Path "$env:VCPKG_ROOT/vcpkg.exe")) { + Write-Host "vcpkg already available at $env:VCPKG_ROOT" + } else { + $vcpkgDir = "$(Build.ArtifactStagingDirectory)/vcpkg" + git clone https://github.com/microsoft/vcpkg.git $vcpkgDir + & "$vcpkgDir/bootstrap-vcpkg.bat" -disableMetrics + Write-Host "##vso[task.setvariable variable=VCPKG_ROOT]$vcpkgDir" + Write-Host "##vso[task.prependpath]$vcpkgDir" + Write-Host "Bootstrapped vcpkg at $vcpkgDir" + } + +# Configure and build with tests enabled +- task: PowerShell@2 + displayName: 'CMake configure' + inputs: + targetType: inline + script: | + Set-Location "$(repoRoot)/sdk/cpp" + + $buildDir = "$(Build.ArtifactStagingDirectory)/cpp-test-build" + + cmake -G "Visual Studio 17 2022" -A x64 ` + -DCMAKE_TOOLCHAIN_FILE="$env:VCPKG_ROOT/scripts/buildsystems/vcpkg.cmake" ` + -DVCPKG_OVERLAY_TRIPLETS="$(repoRoot)/sdk/cpp/triplets" ` + -DVCPKG_OVERLAY_PORTS="$(repoRoot)/sdk/cpp/vcpkg-overlay-ports" ` + -DVCPKG_TARGET_TRIPLET=x64-windows-static-md ` + -DBUILD_TESTING=ON ` + -B "$buildDir" ` + -S "$(repoRoot)/sdk/cpp" + if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } + env: + PKG_CONFIG: 'echo' + +- task: PowerShell@2 + displayName: 'CMake build' + inputs: + targetType: inline + script: | + $buildDir = "$(Build.ArtifactStagingDirectory)/cpp-test-build" + cmake --build $buildDir --config Release + if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } + +# Run unit tests (mocked — no Core DLL needed) +- task: PowerShell@2 + displayName: 'Run unit tests' + inputs: + targetType: inline + script: | + $buildDir = "$(Build.ArtifactStagingDirectory)/cpp-test-build" + $testResultsDir = "$(Build.ArtifactStagingDirectory)/test-results" + New-Item -ItemType Directory -Path $testResultsDir -Force | Out-Null + Set-Location $buildDir + + # Run only the unit test executable with JUnit XML output + & ./CppSdkTests --gtest_output=xml:$testResultsDir/unit-tests.xml + if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } + +# Run E2E tests (requires Core DLL) +- task: PowerShell@2 + displayName: 'Run E2E tests' + inputs: + targetType: inline + script: | + $buildDir = "$(Build.ArtifactStagingDirectory)/cpp-test-build" + $testResultsDir = "$(Build.ArtifactStagingDirectory)/test-results" + Set-Location $buildDir + + & ./CppSdkE2ETests --gtest_output=xml:$testResultsDir/e2e-tests.xml + if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE } + env: + TF_BUILD: 'true' + FOUNDRY_TEST_DATA_DIR: $(testDataDir) + +# Publish test results +- task: PublishTestResults@2 + displayName: 'Publish test results' + inputs: + testResultsFormat: 'JUnit' + testResultsFiles: '$(Build.ArtifactStagingDirectory)/test-results/*.xml' + testRunTitle: 'C++ SDK Tests' + failTaskOnFailedTests: true + condition: always() diff --git a/sdk/cpp/CMakeLists.txt b/sdk/cpp/CMakeLists.txt index 7e32b7fb..a8135090 100644 --- a/sdk/cpp/CMakeLists.txt +++ b/sdk/cpp/CMakeLists.txt @@ -9,7 +9,11 @@ if (POLICY CMP0141) endif() endif() -project(CppSdk LANGUAGES CXX) +project(CppSdk VERSION 1.1.0 LANGUAGES CXX) + +# Full semver string (e.g. "1.1.0-dev.202604241855"). Defaults to PROJECT_VERSION. +# CI passes the full version; local builds just get MAJOR.MINOR.PATCH. +set(CPPSDK_VERSION_STRING "${PROJECT_VERSION}" CACHE STRING "Full semantic version string") # ----------------------------- # Windows-only + compiler guard @@ -42,7 +46,15 @@ find_package(wil CONFIG REQUIRED) find_package(Microsoft.GSL CONFIG REQUIRED) option(BUILD_TESTING "Build unit and end-to-end tests" ON) if (BUILD_TESTING) - find_package(GTest CONFIG REQUIRED) + include(FetchContent) + FetchContent_Declare( + googletest + GIT_REPOSITORY https://github.com/google/googletest.git + GIT_TAG v1.17.0 + ) + # Prevent GoogleTest from overriding our compiler/linker options on Windows + set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) + FetchContent_MakeAvailable(googletest) endif() # ----------------------------- @@ -71,6 +83,21 @@ target_link_libraries(CppSdk WIL::WIL ) +target_compile_definitions(CppSdk PUBLIC + CPPSDK_VERSION="${CPPSDK_VERSION_STRING}" +) + +# ----------------------------- +# Native dependencies (Core DLL + ONNX Runtime) +# Downloads from NuGet at configure time, copies next to executables at build time. +# Set FOUNDRY_NATIVE_OVERRIDE_DIR to skip download and use local binaries. +# ----------------------------- +option(FOUNDRY_DOWNLOAD_NATIVE_DEPS "Download native Foundry Local Core libraries from NuGet" ON) +if (FOUNDRY_DOWNLOAD_NATIVE_DEPS) + include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/FoundryLocalNativeDeps.cmake) + foundry_local_download_native_deps() +endif() + # ----------------------------- # Sample executable # ----------------------------- @@ -80,6 +107,10 @@ add_executable(CppSdkSample target_link_libraries(CppSdkSample PRIVATE CppSdk) +if (FOUNDRY_DOWNLOAD_NATIVE_DEPS AND DEFINED FOUNDRY_NATIVE_DIR) + foundry_local_copy_native_deps(CppSdkSample) +endif() + # ----------------------------- # Unit tests # ----------------------------- @@ -104,7 +135,7 @@ if (BUILD_TESTING) target_link_libraries(CppSdkTests PRIVATE CppSdk - GTest::gtest_main + gtest_main ) # Copy testdata files next to the test executable so file-based tests can find them. @@ -138,14 +169,62 @@ if (BUILD_TESTING) target_link_libraries(CppSdkE2ETests PRIVATE CppSdk - GTest::gtest_main + gtest_main ) gtest_discover_tests(CppSdkE2ETests WORKING_DIRECTORY $ ) + + # Copy native libs next to E2E test binary so Core DLL is found at runtime + if (FOUNDRY_DOWNLOAD_NATIVE_DEPS AND DEFINED FOUNDRY_NATIVE_DIR) + foundry_local_copy_native_deps(CppSdkE2ETests) + endif() endif() +# ----------------------------- +# Install rules +# Produces a redistributable tree: +# include/ – public headers +# lib/ – static library +# lib/cmake/ – CMake package config for find_package(CppSdk) +# ----------------------------- +include(GNUInstallDirs) +include(CMakePackageConfigHelpers) + +install(TARGETS CppSdk + EXPORT CppSdkTargets + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} +) + +install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/include/ + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} +) + +install(EXPORT CppSdkTargets + FILE CppSdkTargets.cmake + NAMESPACE foundry_local:: + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/CppSdk +) + +configure_package_config_file( + ${CMAKE_CURRENT_SOURCE_DIR}/cmake/CppSdkConfig.cmake.in + ${CMAKE_CURRENT_BINARY_DIR}/CppSdkConfig.cmake + INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/CppSdk +) + +write_basic_package_version_file( + ${CMAKE_CURRENT_BINARY_DIR}/CppSdkConfigVersion.cmake + VERSION ${PROJECT_VERSION} + COMPATIBILITY SameMajorVersion +) + +install(FILES + ${CMAKE_CURRENT_BINARY_DIR}/CppSdkConfig.cmake + ${CMAKE_CURRENT_BINARY_DIR}/CppSdkConfigVersion.cmake + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/CppSdk +) + # Make Visual Studio start/debug this target by default set_property(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY VS_STARTUP_PROJECT CppSdkSample) diff --git a/sdk/cpp/README.md b/sdk/cpp/README.md new file mode 100644 index 00000000..abf08a72 --- /dev/null +++ b/sdk/cpp/README.md @@ -0,0 +1,503 @@ +# Foundry Local C++ SDK + +The Foundry Local C++ SDK provides a C++17 interface for running AI models locally on your machine. Discover, download, load, and run inference — all without cloud dependencies. + +## Features + +- **Local-first AI** — Run models entirely on your machine with no cloud calls +- **Model catalog** — Browse and discover available models; check what's cached or loaded +- **Automatic model management** — Download, load, unload, and remove models from cache +- **Chat completions** — OpenAI-compatible chat API with both non-streaming and streaming responses +- **Audio transcription** — Transcribe audio files locally with streaming support +- **Tool calling** — Function/tool calling with multi-turn conversation support +- **Multi-variant models** — Models can have multiple variants (e.g., different quantizations) with automatic selection of the best cached variant +- **Embedded web service** — Start a local HTTP server for OpenAI-compatible API access +- **Configurable inference** — Control temperature, max tokens, top-k, top-p, frequency penalty, random seed, and more +- **Custom logging** — Implement the `ILogger` interface to route SDK log output to your application's logging system + +## Prerequisites + +- **C++17** compiler (MSVC or clang-cl on Windows) +- **CMake** 3.20+ +- **vcpkg** (used for dependency management) +- **Windows 10+** (the SDK currently requires Windows APIs) + +## Installation + +Download the latest source archive from [GitHub Releases](https://github.com/microsoft/Foundry-Local/releases), extract it, and build from source: + +```bash +# Extract the archive +unzip foundry-local-cpp-sdk-.zip +cd foundry-local-cpp-sdk + +# Ensure VCPKG_ROOT is set to your vcpkg installation (needed for build dependencies) +cmake --preset x64-release +cmake --build --preset x64-release +``` + +To use the built SDK in another project, install it and point CMake at the install prefix: + +```bash +cmake --install out/build/x64-release --prefix /path/to/install +# Then in your consuming project: +# cmake -DCMAKE_PREFIX_PATH=/path/to/install .. +``` + +### Build dependencies + +The following are resolved automatically by vcpkg during the build: + +| Package | Purpose | +|---------|---------| +| `nlohmann-json` | JSON parsing and serialization | +| `wil` | Windows Implementation Libraries (RAII handles, error helpers) | +| `ms-gsl` | Microsoft GSL (`gsl::not_null`, `gsl::span`) | +| `gtest` | Google Test (unit and E2E tests only) | + +### Runtime dependencies (automatic) + +The SDK dynamically loads `Microsoft.AI.Foundry.Local.Core.dll` and ONNX Runtime libraries at runtime. These are **downloaded automatically from NuGet** during CMake configuration — no manual setup needed. + +When you build the sample or E2E tests, the native libraries are copied next to the executable automatically. For your own project, use the provided convenience function: + +```cmake +# After find_package(CppSdk) or FetchContent: +include(cmake/FoundryLocalNativeDeps.cmake) # or found via CMAKE_MODULE_PATH +foundry_local_download_native_deps() # downloads at configure time +foundry_local_copy_native_deps(your_app) # copies next to your binary at build time +``` + +The download is skipped if the binaries are already present. In CI, set `FOUNDRY_NATIVE_OVERRIDE_DIR` to use pipeline-built binaries instead of downloading from NuGet. + +To disable automatic download (e.g. if you manage the Core DLL yourself): + +```bash +cmake -DFOUNDRY_DOWNLOAD_NATIVE_DEPS=OFF ... +``` + +### Updating the vcpkg baseline + +`vcpkg-configuration.json` contains a `baseline` field — this is a commit hash from the [microsoft/vcpkg](https://github.com/microsoft/vcpkg) repo that pins the exact versions of all build dependencies (nlohmann-json, wil, ms-gsl, gtest). This ensures reproducible builds across machines and CI. + +To update the baseline to the latest vcpkg release: + +```bash +# Get the latest commit hash +git ls-remote https://github.com/microsoft/vcpkg.git HEAD + +# Update vcpkg-configuration.json with the new hash +# Then rebuild to verify nothing breaks +``` + +## Quick Start + +```cpp +#include "foundry_local.h" +#include + +using namespace foundry_local; + +int main() { + // 1. Initialize the manager + Manager::Create({"MyApp"}); + auto& manager = Manager::Instance(); + + // 2. Get a model from the catalog + auto& catalog = manager.GetCatalog(); + auto* model = catalog.GetModel("phi-3.5-mini"); + + // 3. Download (if needed) and load the model + model->Download([](float pct) { + std::cout << "\rDownloading: " << pct << "%" << std::flush; + }); + model->Load(); + + // 4. Create a chat client and run inference + OpenAIChatClient chat(*model); + + ChatSettings settings; + settings.temperature = 0.7f; + settings.max_tokens = 256; + + auto response = chat.CompleteChat( + {{"user", "Why is the sky blue?"}}, + settings); + + std::cout << response.choices[0].message->content << std::endl; + + // 5. Clean up + model->Unload(); + Manager::Destroy(); + return 0; +} +``` + +## Usage + +### Initialization + +Create the singleton `Manager` with a `Configuration`. Call `Destroy()` when done: + +```cpp +#include "foundry_local.h" +using namespace foundry_local; + +// Minimal — just an app name +Manager::Create({"MyApp"}); + +// With custom logger +class MyLogger : public ILogger { +public: + void Log(LogLevel level, std::string_view message) noexcept override { + std::cerr << "[" << LogLevelToString(level) << "] " << message << "\n"; + } +}; + +MyLogger logger; +Manager::Create({"MyApp"}, &logger); + +// Access the singleton anywhere +auto& manager = Manager::Instance(); + +// Check if initialized +if (Manager::IsInitialized()) { /* ... */ } + +// Deterministic cleanup +Manager::Destroy(); +``` + +### Configuration + +The SDK is configured via `Configuration` when creating the manager: + +```cpp +Configuration config("MyApp"); +config.log_level = LogLevel::Information; +config.model_cache_dir = "/path/to/cache"; +config.web = WebServiceConfig{.urls = "http://127.0.0.1:5000"}; +config.additional_settings = {{"Bootstrap", "false"}}; + +Manager::Create(std::move(config)); +``` + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `app_name` | `std::string` | **(required)** | Your application name | +| `app_data_dir` | `optional` | `~/.{app_name}` | Application data directory | +| `model_cache_dir` | `optional` | `{app_data}/cache/models` | Where models are stored locally | +| `logs_dir` | `optional` | `{app_data}/logs` | Log output directory | +| `log_level` | `LogLevel` | `Warning` | `Verbose`, `Debug`, `Information`, `Warning`, `Error`, `Fatal` | +| `web` | `optional` | `nullopt` | Web service configuration | +| `additional_settings` | `optional` | `nullopt` | Extra key-value settings passed to Core | + +### Browsing the Model Catalog + +The `Catalog` lets you discover what models are available, which are already cached locally, and which are currently loaded in memory. + +```cpp +auto& catalog = manager.GetCatalog(); + +// List all available models +auto models = catalog.ListModels(); +for (const auto* model : models) { + std::cout << model->GetAlias() << " (id: " << model->GetId() << ")" + << " cached=" << (model->IsCached() ? "yes" : "no") + << " loaded=" << (model->IsLoaded() ? "yes" : "no") << "\n"; +} + +// Look up a specific model by alias +auto* model = catalog.GetModel("phi-3.5-mini"); + +// Look up a specific variant by its unique model ID +auto* variant = catalog.GetModelVariant("phi-3.5-mini-generic-gpu-4"); + +// See what's already downloaded +auto cached = catalog.GetCachedModels(); + +// See what's currently loaded in memory +auto loaded = catalog.GetLoadedModels(); +``` + +### Model Lifecycle + +Each model may have multiple variants (different quantizations, hardware targets). The SDK auto-selects the best available variant, preferring cached versions. + +```cpp +auto* model = catalog.GetModel("phi-3.5-mini"); + +// Inspect available variants +auto* concreteModel = dynamic_cast(model); +for (const auto& variant : concreteModel->GetAllModelVariants()) { + const auto& info = variant.GetInfo(); + std::cout << " " << info.name << " v" << info.version + << " cached=" << (variant.IsCached() ? "yes" : "no") << "\n"; +} + +// Select a specific variant +concreteModel->SelectVariant(concreteModel->GetAllModelVariants()[0]); +``` + +Download, load, and unload: + +```cpp +// Download with progress reporting +model->Download([](float pct) { + std::cout << "\r" << pct << "%" << std::flush; +}); + +// Load into memory +model->Load(); + +// Unload when done +model->Unload(); + +// Remove from local cache entirely +model->RemoveFromCache(); +``` + +### Chat Completions + +The `OpenAIChatClient` follows the OpenAI Chat Completion API structure. + +```cpp +OpenAIChatClient chat(*model); + +ChatSettings settings; +settings.temperature = 0.7f; +settings.max_tokens = 256; + +auto response = chat.CompleteChat( + {{"system", "You are a helpful assistant."}, + {"user", "Explain quantum computing in simple terms."}}, + settings); + +std::cout << response.choices[0].message->content << "\n"; +``` + +### Streaming Responses + +For real-time token-by-token output, use streaming: + +```cpp +std::cout << "Assistant: "; +chat.CompleteChatStreaming( + {{"user", "Write a short poem about programming."}}, + settings, + [](const ChatCompletionCreateResponse& chunk) { + if (!chunk.choices.empty() && chunk.choices[0].delta && + !chunk.choices[0].delta->content.empty()) { + std::cout << chunk.choices[0].delta->content << std::flush; + } + }); +std::cout << "\n"; +``` + +### Tool Calling + +Define functions the model can call and handle the multi-turn conversation: + +```cpp +// 1. Define tools +std::vector tools = { + {"function", + FunctionDefinition{ + "multiply_numbers", + "Multiply two integers and return the result.", + PropertyDefinition{ + "object", std::nullopt, + std::unordered_map{ + {"first", PropertyDefinition{"integer", "The first number"}}, + {"second", PropertyDefinition{"integer", "The second number"}}}, + std::vector{"first", "second"}}}}}; + +// 2. Send request with tools +std::vector messages = { + {"system", "You are a helpful AI assistant. Use the provided tools when appropriate."}, + {"user", "What is 7 multiplied by 6?"}}; + +ChatSettings settings; +settings.tool_choice = ToolChoiceKind::Required; + +auto response = chat.CompleteChat(messages, tools, settings); + +// 3. Check if the model wants to call a tool +const auto& choice = response.choices[0]; +if (choice.finish_reason == FinishReason::ToolCalls && + choice.message && !choice.message->tool_calls.empty()) { + const auto& tc = choice.message->tool_calls[0]; + + // 4. Execute the tool locally (your application logic) + std::string result = "42"; + + // 5. Feed the tool result back + messages.push_back({"assistant", "", std::nullopt, choice.message->tool_calls}); + messages.push_back({"tool", result, tc.id}); + + settings.tool_choice = ToolChoiceKind::Auto; + auto followUp = chat.CompleteChat(messages, tools, settings); + std::cout << followUp.choices[0].message->content << "\n"; +} +``` + +### Audio Transcription + +Transcribe audio files locally using the `OpenAIAudioClient`: + +```cpp +auto* model = catalog.GetModel("whisper-small"); +model->Load(); + +OpenAIAudioClient audio(*model); + +// Non-streaming transcription +auto result = audio.TranscribeAudio("recording.wav"); +std::cout << "Transcription: " << result.text << "\n"; + +// Streaming transcription +audio.TranscribeAudioStreaming("recording.wav", + [](const AudioCreateTranscriptionResponse& chunk) { + std::cout << chunk.text << std::flush; + }); +``` + +### Embedded Web Service + +Start a local HTTP server that exposes an OpenAI-compatible REST API: + +```cpp +// Configure the web service URL in Configuration +Configuration config("MyApp"); +config.web = WebServiceConfig{.urls = "http://127.0.0.1:5000"}; +Manager::Create(std::move(config)); + +auto& manager = Manager::Instance(); +manager.StartWebService(); + +auto urls = manager.GetUrls(); +std::cout << "Service running at: " << urls[0] << "\n"; + +// Any OpenAI-compatible client can now connect to the endpoint. +// ... + +manager.StopWebService(); +``` + +### Chat Settings Reference + +| Field | Type | Description | +|-------|------|-------------| +| `frequency_penalty` | `optional` | Frequency penalty | +| `max_tokens` | `optional` | Maximum number of tokens to generate | +| `n` | `optional` | Number of completions to generate | +| `temperature` | `optional` | Sampling temperature (0.0–2.0; higher = more random) | +| `presence_penalty` | `optional` | Presence penalty | +| `random_seed` | `optional` | Random seed for reproducible results | +| `top_k` | `optional` | Top-k sampling parameter | +| `top_p` | `optional` | Nucleus sampling probability (0.0–1.0) | +| `tool_choice` | `optional` | Tool selection strategy (`Auto`, `None`, `Required`) | + +## API Reference + +### Core Classes + +| Class | Header | Description | +|-------|--------|-------------| +| `Manager` | `foundry_local_manager.h` | Singleton entry point — initialization, catalog access, web service | +| `Configuration` | `configuration.h` | Initialization settings (app name, cache dir, log level, web service) | +| `Catalog` | `catalog.h` | Model discovery — listing, lookup by alias/ID, cached/loaded queries | +| `IModel` | `model.h` | Abstract interface for models — identity, metadata, lifecycle | +| `Model` | `model.h` | Alias-level model with variant selection (implements `IModel`) | +| `ModelVariant` | `model.h` | Specific model variant with full metadata (implements `IModel`) | +| `ILogger` | `logger.h` | Logging interface — implement to receive SDK log output | + +### OpenAI Clients + +| Class | Header | Description | +|-------|--------|-------------| +| `OpenAIChatClient` | `openai/openai_chat_client.h` | Chat completions (non-streaming and streaming) with tool calling | +| `OpenAIAudioClient` | `openai/openai_audio_client.h` | Audio transcription (non-streaming and streaming) | + +### Types + +| Type | Header | Description | +|------|--------|-------------| +| `ChatMessage` | `openai/openai_chat_client.h` | A message in a chat conversation (role + content + optional tool calls) | +| `ChatSettings` | `openai/openai_chat_client.h` | Generation settings (temperature, max_tokens, etc.) | +| `ChatCompletionCreateResponse` | `openai/openai_chat_client.h` | Response from a chat completion request | +| `ToolDefinition` | `openai/openai_tool_types.h` | Describes a tool the model can call | +| `ToolCall` | `openai/openai_tool_types.h` | A tool call returned by the model | +| `AudioCreateTranscriptionResponse` | `openai/openai_audio_client.h` | Response from an audio transcription request | +| `ModelInfo` | `model.h` | Full metadata for a model variant | + +## Project Structure + +The SDK uses the standard C++ `include/` vs `src/` separation: + +``` +sdk/cpp/ +├── include/ # Public headers (shipped to consumers) +│ ├── foundry_local.h # Umbrella header +│ ├── configuration.h +│ ├── catalog.h +│ ├── model.h +│ ├── foundry_local_manager.h +│ ├── foundry_local_exception.h +│ ├── logger.h +│ ├── log_level.h +│ └── openai/ +│ ├── openai_chat_client.h +│ ├── openai_audio_client.h +│ └── openai_tool_types.h +├── src/ # Internal implementation (not shipped) +│ ├── core.h # Native DLL loader (LoadLibraryW) +│ ├── flcore_native.h # C ABI struct definitions +│ ├── core_helpers.h # Call wrappers +│ ├── core_interop_request.h # JSON request builder +│ ├── parser.h # JSON parsing utilities +│ └── *.cpp # Implementation files +├── sample/ # Sample application +│ └── main.cpp +├── test/ # Unit and E2E tests (GoogleTest) +│ ├── *_test.cpp +│ ├── e2e_test.cpp +│ ├── mock_core.h +│ └── testdata/ +├── cmake/ # CMake package config template +├── CMakeLists.txt +├── CMakePresets.json +├── vcpkg.json +└── vcpkg-configuration.json +``` + +## Platform Support + +| Platform | Status | Notes | +|----------|--------|-------| +| Windows x64 | ✅ | Full support (MSVC, clang-cl) | +| Windows ARM64 | 🔜 | Planned | +| Linux x64 | 🔜 | Planned (requires cross-platform refactor) | +| macOS ARM64 | 🔜 | Planned (requires cross-platform refactor) | + +## Building and Running Tests + +```bash +# Configure with tests enabled (default) +cmake --preset x64-debug + +# Build everything (library, sample, tests) +cmake --build --preset x64-debug + +# Run unit tests (no Core DLL needed) +ctest --preset x64-debug + +# Run E2E tests (requires Core DLL next to test binary) +cd out/build/x64-debug +ctest --output-on-failure +``` + +E2E tests require the Foundry Local Core DLL to be placed alongside the test binary. Tests that require model downloads are `DISABLED_` by default; run them locally with `--gtest_also_run_disabled_tests`. + +## License + +Microsoft Software License Terms — see [LICENSE](../../LICENSE) for details. diff --git a/sdk/cpp/cmake/CppSdkConfig.cmake.in b/sdk/cpp/cmake/CppSdkConfig.cmake.in new file mode 100644 index 00000000..2438575d --- /dev/null +++ b/sdk/cpp/cmake/CppSdkConfig.cmake.in @@ -0,0 +1,10 @@ +@PACKAGE_INIT@ + +include(CMakeFindDependencyMacro) +find_dependency(nlohmann_json CONFIG) +find_dependency(Microsoft.GSL CONFIG) +find_dependency(wil CONFIG) + +include("${CMAKE_CURRENT_LIST_DIR}/CppSdkTargets.cmake") + +check_required_components(CppSdk) diff --git a/sdk/cpp/cmake/FoundryLocalNativeDeps.cmake b/sdk/cpp/cmake/FoundryLocalNativeDeps.cmake new file mode 100644 index 00000000..23494742 --- /dev/null +++ b/sdk/cpp/cmake/FoundryLocalNativeDeps.cmake @@ -0,0 +1,172 @@ +# FoundryLocalNativeDeps.cmake +# +# Downloads Foundry Local Core + ONNX Runtime native libraries from NuGet +# at configure time. Mirrors the Rust SDK's build.rs approach. +# +# Outputs: +# FOUNDRY_NATIVE_DIR - directory containing the downloaded native libraries +# +# The caller should copy ${FOUNDRY_NATIVE_DIR}/*.dll next to their executable. +# A convenience function foundry_local_copy_native_deps() is provided. + +include(FetchContent) + +# --------------------------------------------------------------------------- +# Read deps_versions.json to get pinned package versions +# --------------------------------------------------------------------------- +function(_foundry_read_deps_versions out_core out_ort out_genai) + # Look for deps_versions.json: first next to CMakeLists.txt, then parent sdk/ dir + set(_candidates + "${CMAKE_CURRENT_SOURCE_DIR}/deps_versions.json" + "${CMAKE_CURRENT_SOURCE_DIR}/../deps_versions.json" + ) + + set(_found "") + foreach(_path ${_candidates}) + if(EXISTS "${_path}") + set(_found "${_path}") + break() + endif() + endforeach() + + if(NOT _found) + message(FATAL_ERROR "deps_versions.json not found. Searched: ${_candidates}") + endif() + + file(READ "${_found}" _json) + + # Parse versions using CMake's string(JSON) (CMake 3.19+) + string(JSON _core_version GET "${_json}" "foundry-local-core" "nuget") + string(JSON _ort_version GET "${_json}" "onnxruntime" "version") + string(JSON _genai_version GET "${_json}" "onnxruntime-genai" "version") + + set(${out_core} "${_core_version}" PARENT_SCOPE) + set(${out_ort} "${_ort_version}" PARENT_SCOPE) + set(${out_genai} "${_genai_version}" PARENT_SCOPE) +endfunction() + +# --------------------------------------------------------------------------- +# Download a .nupkg from NuGet and extract native libs for the current RID +# --------------------------------------------------------------------------- +function(_foundry_download_nuget_native pkg_name pkg_version rid out_dir) + string(TOLOWER "${pkg_name}" _lower_name) + string(TOLOWER "${pkg_version}" _lower_version) + + set(_nupkg_url "https://api.nuget.org/v3-flatcontainer/${_lower_name}/${_lower_version}/${_lower_name}.${_lower_version}.nupkg") + set(_download_path "${out_dir}/${_lower_name}.${_lower_version}.nupkg") + + # Skip if already downloaded + if(NOT EXISTS "${_download_path}") + message(STATUS "Downloading ${pkg_name} ${pkg_version} from NuGet...") + file(DOWNLOAD "${_nupkg_url}" "${_download_path}" + STATUS _dl_status + TLS_VERIFY ON + ) + list(GET _dl_status 0 _dl_code) + if(NOT _dl_code EQUAL 0) + list(GET _dl_status 1 _dl_msg) + message(FATAL_ERROR "Failed to download ${pkg_name}: ${_dl_msg}") + endif() + endif() + + # Extract native binaries for this RID + set(_extract_dir "${out_dir}/${_lower_name}-extract") + if(NOT EXISTS "${_extract_dir}") + file(ARCHIVE_EXTRACT INPUT "${_download_path}" DESTINATION "${_extract_dir}") + endif() + + set(_native_dir "${_extract_dir}/runtimes/${rid}/native") + if(EXISTS "${_native_dir}") + file(GLOB _native_files "${_native_dir}/*${CMAKE_SHARED_LIBRARY_SUFFIX}") + foreach(_f ${_native_files}) + get_filename_component(_fname "${_f}" NAME) + file(COPY_FILE "${_f}" "${out_dir}/${_fname}" ONLY_IF_DIFFERENT) + message(STATUS " Extracted ${_fname}") + endforeach() + else() + message(WARNING "No native binaries found for RID '${rid}' in ${pkg_name} ${pkg_version}") + endif() +endfunction() + +# --------------------------------------------------------------------------- +# Main: download all native deps +# --------------------------------------------------------------------------- +function(foundry_local_download_native_deps) + # Determine RID + if(WIN32) + if(CMAKE_SYSTEM_PROCESSOR STREQUAL "ARM64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "AARCH64") + set(_rid "win-arm64") + else() + set(_rid "win-x64") + endif() + elseif(APPLE) + set(_rid "osx-arm64") + elseif(UNIX) + set(_rid "linux-x64") + else() + message(WARNING "Unsupported platform — native libraries will not be downloaded.") + return() + endif() + + # Allow override (CI sets this to use pipeline-built binaries) + if(DEFINED ENV{FOUNDRY_NATIVE_OVERRIDE_DIR} AND IS_DIRECTORY "$ENV{FOUNDRY_NATIVE_OVERRIDE_DIR}") + set(_native_dir "$ENV{FOUNDRY_NATIVE_OVERRIDE_DIR}") + message(STATUS "Using native libraries from FOUNDRY_NATIVE_OVERRIDE_DIR: ${_native_dir}") + set(FOUNDRY_NATIVE_DIR "${_native_dir}" CACHE PATH "Directory containing Foundry Local native libraries" FORCE) + return() + endif() + + set(_native_dir "${CMAKE_BINARY_DIR}/_foundry_native") + file(MAKE_DIRECTORY "${_native_dir}") + + _foundry_read_deps_versions(_core_ver _ort_ver _genai_ver) + message(STATUS "Foundry Local native deps: Core=${_core_ver} ORT=${_ort_ver} GenAI=${_genai_ver}") + + # Check if all required libs are already present + set(_core_lib "${_native_dir}/Microsoft.AI.Foundry.Local.Core${CMAKE_SHARED_LIBRARY_SUFFIX}") + if(WIN32) + set(_ort_lib "${_native_dir}/onnxruntime${CMAKE_SHARED_LIBRARY_SUFFIX}") + set(_genai_lib "${_native_dir}/onnxruntime-genai${CMAKE_SHARED_LIBRARY_SUFFIX}") + else() + set(_ort_lib "${_native_dir}/libonnxruntime${CMAKE_SHARED_LIBRARY_SUFFIX}") + set(_genai_lib "${_native_dir}/libonnxruntime-genai${CMAKE_SHARED_LIBRARY_SUFFIX}") + endif() + + if(EXISTS "${_core_lib}" AND EXISTS "${_ort_lib}" AND EXISTS "${_genai_lib}") + message(STATUS "Native libraries already present, skipping download.") + set(FOUNDRY_NATIVE_DIR "${_native_dir}" CACHE PATH "Directory containing Foundry Local native libraries" FORCE) + return() + endif() + + # Download each package + _foundry_download_nuget_native("Microsoft.AI.Foundry.Local.Core" "${_core_ver}" "${_rid}" "${_native_dir}") + + if(_rid STREQUAL "linux-x64") + _foundry_download_nuget_native("Microsoft.ML.OnnxRuntime.Gpu.Linux" "${_ort_ver}" "${_rid}" "${_native_dir}") + else() + _foundry_download_nuget_native("Microsoft.ML.OnnxRuntime.Foundry" "${_ort_ver}" "${_rid}" "${_native_dir}") + endif() + + _foundry_download_nuget_native("Microsoft.ML.OnnxRuntimeGenAI.Foundry" "${_genai_ver}" "${_rid}" "${_native_dir}") + + set(FOUNDRY_NATIVE_DIR "${_native_dir}" CACHE PATH "Directory containing Foundry Local native libraries" FORCE) +endfunction() + +# --------------------------------------------------------------------------- +# Convenience: copy native libs next to a target's output binary +# --------------------------------------------------------------------------- +function(foundry_local_copy_native_deps target) + if(NOT DEFINED FOUNDRY_NATIVE_DIR OR NOT IS_DIRECTORY "${FOUNDRY_NATIVE_DIR}") + message(WARNING "FOUNDRY_NATIVE_DIR not set — call foundry_local_download_native_deps() first") + return() + endif() + + file(GLOB _native_libs "${FOUNDRY_NATIVE_DIR}/*${CMAKE_SHARED_LIBRARY_SUFFIX}") + foreach(_lib ${_native_libs}) + get_filename_component(_fname "${_lib}" NAME) + add_custom_command(TARGET ${target} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different "${_lib}" "$/${_fname}" + COMMENT "Copying ${_fname} to output directory" + ) + endforeach() +endfunction() diff --git a/sdk/cpp/src/core.h b/sdk/cpp/src/core.h index 10feee5b..36ed0d48 100644 --- a/sdk/cpp/src/core.h +++ b/sdk/cpp/src/core.h @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // -// Core DLL interop – loads Microsoft.AI.Foundry.Local.Core.dll at runtime. +// Core DLL interop � loads Microsoft.AI.Foundry.Local.Core.dll at runtime. // Internal header, not part of the public API. #pragma once diff --git a/sdk/cpp/vcpkg-configuration.json b/sdk/cpp/vcpkg-configuration.json index a5253fb7..0a7835b9 100644 --- a/sdk/cpp/vcpkg-configuration.json +++ b/sdk/cpp/vcpkg-configuration.json @@ -1,6 +1,6 @@ { "default-registry": { "kind": "builtin", - "baseline": "a9f0cd0345fb29cd227d802f1fd1917c28f8e5a3" + "baseline": "73248d88e09d43dfd7836902462a64fd2a43387b" } } diff --git a/sdk/cpp/vcpkg-overlay-ports/README.md b/sdk/cpp/vcpkg-overlay-ports/README.md new file mode 100644 index 00000000..cce36ddb --- /dev/null +++ b/sdk/cpp/vcpkg-overlay-ports/README.md @@ -0,0 +1,44 @@ +# vcpkg Overlay Ports + +This directory contains custom vcpkg overlay ports used **only in CI environments** to work around network restrictions on build agents. + +## Why this exists + +The CI build agents have restricted outbound network access. The upstream `nlohmann-json` vcpkg port calls `vcpkg_fixup_pkgconfig()`, which attempts to download the `pkgconf` tool from msys2 mirrors at build time. Since CI agents cannot reach these mirrors, the build fails with: + +``` +error: curl operation failed with error code 7 (Could not connect to server). +error: Not a transient network error, won't retry download from https://mirror.msys2.org/... +``` + +## What the overlay does + +The `nlohmann-json` overlay port is identical to the upstream port (v3.12.0, port-version 2) with one change: the `vcpkg_fixup_pkgconfig()` call is removed. This is safe because: + +- nlohmann-json is a **header-only** library +- All consumers use **CMake** (`find_package`), not pkg-config +- The `.pc` file is still installed; it simply isn't rewritten with vcpkg-specific paths + +## How to use (CI only) + +Set the `VCPKG_OVERLAY_PORTS` environment variable before running CMake configure: + +```powershell +# In your CI pipeline script +$env:VCPKG_OVERLAY_PORTS = "$repoRoot/sdk/cpp/vcpkg-overlay-ports" +cmake --preset x64-release +``` + +Or pass it as a CMake cache variable: + +```powershell +cmake --preset x64-release -DVCPKG_OVERLAY_PORTS="$repoRoot/sdk/cpp/vcpkg-overlay-ports" +``` + +## Local development + +Local developers with unrestricted internet access do **not** need this overlay. Without `VCPKG_OVERLAY_PORTS` set, vcpkg uses the standard upstream port as normal. + +## Maintenance + +If you update the vcpkg baseline and the upstream nlohmann-json port changes, you may need to update this overlay to match. Check if the upstream port still calls `vcpkg_fixup_pkgconfig()` — if it no longer does, this overlay can be removed. diff --git a/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/fix-4736_char8_t.patch b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/fix-4736_char8_t.patch new file mode 100644 index 00000000..8ebf696e --- /dev/null +++ b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/fix-4736_char8_t.patch @@ -0,0 +1,129 @@ +diff --git a/include/nlohmann/detail/conversions/from_json.hpp b/include/nlohmann/detail/conversions/from_json.hpp +index d647d74239..e161a4282f 100644 +--- a/include/nlohmann/detail/conversions/from_json.hpp ++++ b/include/nlohmann/detail/conversions/from_json.hpp +@@ -540,7 +540,10 @@ inline void from_json(const BasicJsonType& j, std_fs::path& p) + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + const auto& s = *j.template get_ptr(); +-#ifdef JSON_HAS_CPP_20 ++ // Checking for C++20 standard or later can be insufficient in case the ++ // library support for char8_t is either incomplete or was disabled ++ // altogether. Use the __cpp_lib_char8_t feature test instead. ++#if defined(__cpp_lib_char8_t) && (__cpp_lib_char8_t >= 201907L) + p = std_fs::path(std::u8string_view(reinterpret_cast(s.data()), s.size())); + #else + p = std_fs::u8path(s); // accepts UTF-8 encoded std::string in C++17, deprecated in C++20 +diff --git a/include/nlohmann/detail/conversions/to_json.hpp b/include/nlohmann/detail/conversions/to_json.hpp +index ead45665f1..b17e8af437 100644 +--- a/include/nlohmann/detail/conversions/to_json.hpp ++++ b/include/nlohmann/detail/conversions/to_json.hpp +@@ -15,7 +15,8 @@ + + #include // copy + #include // begin, end +-#include // string ++#include // allocator_traits ++#include // basic_string, char_traits + #include // tuple, get + #include // is_same, is_constructible, is_floating_point, is_enum, underlying_type + #include // move, forward, declval, pair +@@ -440,15 +441,21 @@ inline void to_json(BasicJsonType& j, const T& t) + } + + #if JSON_HAS_FILESYSTEM || JSON_HAS_EXPERIMENTAL_FILESYSTEM ++#if defined(__cpp_lib_char8_t) ++template ++inline void to_json(BasicJsonType& j, const std::basic_string& s) ++{ ++ using OtherAllocator = typename std::allocator_traits::template rebind_alloc; ++ j = std::basic_string, OtherAllocator>(s.begin(), s.end(), s.get_allocator()); ++} ++#endif ++ + template + inline void to_json(BasicJsonType& j, const std_fs::path& p) + { +-#ifdef JSON_HAS_CPP_20 +- const std::u8string s = p.u8string(); +- j = std::string(s.begin(), s.end()); +-#else +- j = p.u8string(); // returns std::string in C++17 +-#endif ++ // Returns either a std::string or a std::u8string depending whether library ++ // support for char8_t is enabled. ++ j = p.u8string(); + } + #endif + +diff --git a/single_include/nlohmann/json.hpp b/single_include/nlohmann/json.hpp +index 82d69f7c5d..be3493efa8 100644 +--- a/single_include/nlohmann/json.hpp ++++ b/single_include/nlohmann/json.hpp +@@ -5325,7 +5325,10 @@ inline void from_json(const BasicJsonType& j, std_fs::path& p) + JSON_THROW(type_error::create(302, concat("type must be string, but is ", j.type_name()), &j)); + } + const auto& s = *j.template get_ptr(); +-#ifdef JSON_HAS_CPP_20 ++ // Checking for C++20 standard or later can be insufficient in case the ++ // library support for char8_t is either incomplete or was disabled ++ // altogether. Use the __cpp_lib_char8_t feature test instead. ++#if defined(__cpp_lib_char8_t) && (__cpp_lib_char8_t >= 201907L) + p = std_fs::path(std::u8string_view(reinterpret_cast(s.data()), s.size())); + #else + p = std_fs::u8path(s); // accepts UTF-8 encoded std::string in C++17, deprecated in C++20 +@@ -5380,7 +5383,8 @@ NLOHMANN_JSON_NAMESPACE_END + + #include // copy + #include // begin, end +-#include // string ++#include // allocator_traits ++#include // basic_string, char_traits + #include // tuple, get + #include // is_same, is_constructible, is_floating_point, is_enum, underlying_type + #include // move, forward, declval, pair +@@ -6087,15 +6091,21 @@ inline void to_json(BasicJsonType& j, const T& t) + } + + #if JSON_HAS_FILESYSTEM || JSON_HAS_EXPERIMENTAL_FILESYSTEM ++#if defined(__cpp_lib_char8_t) ++template ++inline void to_json(BasicJsonType& j, const std::basic_string& s) ++{ ++ using OtherAllocator = typename std::allocator_traits::template rebind_alloc; ++ j = std::basic_string, OtherAllocator>(s.begin(), s.end(), s.get_allocator()); ++} ++#endif ++ + template + inline void to_json(BasicJsonType& j, const std_fs::path& p) + { +-#ifdef JSON_HAS_CPP_20 +- const std::u8string s = p.u8string(); +- j = std::string(s.begin(), s.end()); +-#else +- j = p.u8string(); // returns std::string in C++17 +-#endif ++ // Returns either a std::string or a std::u8string depending whether library ++ // support for char8_t is enabled. ++ j = p.u8string(); + } + #endif + +diff --git a/tests/src/unit-deserialization.cpp b/tests/src/unit-deserialization.cpp +index 84a970a183..5c450c23d3 100644 +--- a/tests/src/unit-deserialization.cpp ++++ b/tests/src/unit-deserialization.cpp +@@ -1134,9 +1134,10 @@ TEST_CASE("deserialization") + } + } + +-// select the types to test - char8_t is only available in C++20 ++// select the types to test - char8_t is only available since C++20 if and only ++// if __cpp_char8_t is defined. + #define TYPE_LIST(...) __VA_ARGS__ +-#ifdef JSON_HAS_CPP_20 ++#if defined(__cpp_char8_t) && (__cpp_char8_t >= 201811L) + #define ASCII_TYPES TYPE_LIST(char, wchar_t, char16_t, char32_t, char8_t) + #else + #define ASCII_TYPES TYPE_LIST(char, wchar_t, char16_t, char32_t) \ No newline at end of file diff --git a/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/fix-4742_std_optional.patch b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/fix-4742_std_optional.patch new file mode 100644 index 00000000..b8de9dca --- /dev/null +++ b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/fix-4742_std_optional.patch @@ -0,0 +1,108 @@ +diff --git a/include/nlohmann/detail/conversions/from_json.hpp b/include/nlohmann/detail/conversions/from_json.hpp +index d647d742..797f714d 100644 +--- a/include/nlohmann/detail/conversions/from_json.hpp ++++ b/include/nlohmann/detail/conversions/from_json.hpp +@@ -13,9 +13,6 @@ + #include // forward_list + #include // inserter, front_inserter, end + #include // map +-#ifdef JSON_HAS_CPP_17 +- #include // optional +-#endif + #include // string + #include // tuple, make_tuple + #include // is_arithmetic, is_same, is_enum, underlying_type, is_convertible +@@ -32,6 +29,11 @@ + #include + #include + ++// include after macro_scope.hpp ++#ifdef JSON_HAS_CPP_17 ++ #include // optional ++#endif ++ + NLOHMANN_JSON_NAMESPACE_BEGIN + namespace detail + { +@@ -47,7 +49,6 @@ inline void from_json(const BasicJsonType& j, typename std::nullptr_t& n) + } + + #ifdef JSON_HAS_CPP_17 +-#ifndef JSON_USE_IMPLICIT_CONVERSIONS + template + void from_json(const BasicJsonType& j, std::optional& opt) + { +@@ -60,8 +61,6 @@ void from_json(const BasicJsonType& j, std::optional& opt) + opt.emplace(j.template get()); + } + } +- +-#endif // JSON_USE_IMPLICIT_CONVERSIONS + #endif // JSON_HAS_CPP_17 + + // overloads for basic_json template parameters +diff --git a/include/nlohmann/detail/conversions/to_json.hpp b/include/nlohmann/detail/conversions/to_json.hpp +index ead45665..f8413850 100644 +--- a/include/nlohmann/detail/conversions/to_json.hpp ++++ b/include/nlohmann/detail/conversions/to_json.hpp +@@ -267,7 +267,7 @@ struct external_constructor + #ifdef JSON_HAS_CPP_17 + template::value, int> = 0> +-void to_json(BasicJsonType& j, const std::optional& opt) ++void to_json(BasicJsonType& j, const std::optional& opt) noexcept + { + if (opt.has_value()) + { +diff --git a/single_include/nlohmann/json.hpp b/single_include/nlohmann/json.hpp +index 82d69f7c..53a9ea70 100644 +--- a/single_include/nlohmann/json.hpp ++++ b/single_include/nlohmann/json.hpp +@@ -173,9 +173,6 @@ + #include // forward_list + #include // inserter, front_inserter, end + #include // map +-#ifdef JSON_HAS_CPP_17 +- #include // optional +-#endif + #include // string + #include // tuple, make_tuple + #include // is_arithmetic, is_same, is_enum, underlying_type, is_convertible +@@ -4817,6 +4814,11 @@ NLOHMANN_JSON_NAMESPACE_END + // #include + + ++// include after macro_scope.hpp ++#ifdef JSON_HAS_CPP_17 ++ #include // optional ++#endif ++ + NLOHMANN_JSON_NAMESPACE_BEGIN + namespace detail + { +@@ -4832,7 +4834,6 @@ inline void from_json(const BasicJsonType& j, typename std::nullptr_t& n) + } + + #ifdef JSON_HAS_CPP_17 +-#ifndef JSON_USE_IMPLICIT_CONVERSIONS + template + void from_json(const BasicJsonType& j, std::optional& opt) + { +@@ -4845,8 +4846,6 @@ void from_json(const BasicJsonType& j, std::optional& opt) + opt.emplace(j.template get()); + } + } +- +-#endif // JSON_USE_IMPLICIT_CONVERSIONS + #endif // JSON_HAS_CPP_17 + + // overloads for basic_json template parameters +@@ -5914,7 +5913,7 @@ struct external_constructor + #ifdef JSON_HAS_CPP_17 + template::value, int> = 0> +-void to_json(BasicJsonType& j, const std::optional& opt) ++void to_json(BasicJsonType& j, const std::optional& opt) noexcept + { + if (opt.has_value()) + { \ No newline at end of file diff --git a/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/portfile.cmake b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/portfile.cmake new file mode 100644 index 00000000..d5c1faae --- /dev/null +++ b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/portfile.cmake @@ -0,0 +1,55 @@ +vcpkg_from_github( + OUT_SOURCE_PATH SOURCE_PATH + REPO nlohmann/json + REF "v${VERSION}" + SHA512 6cc1e86261f8fac21cc17a33da3b6b3c3cd5c116755651642af3c9e99bb3538fd42c1bd50397a77c8fb6821bc62d90e6b91bcdde77a78f58f2416c62fc53b97d + HEAD_REF master + PATCHES + fix-4736_char8_t.patch + fix-4742_std_optional.patch +) + +if(NOT DEFINED nlohmann-json_IMPLICIT_CONVERSIONS) + set(nlohmann-json_IMPLICIT_CONVERSIONS ON) +endif() + +vcpkg_check_features(OUT_FEATURE_OPTIONS FEATURE_OPTIONS +FEATURES + "diagnostics" JSON_Diagnostics +) + +vcpkg_cmake_configure( + SOURCE_PATH "${SOURCE_PATH}" + OPTIONS ${FEATURE_OPTIONS} + -DJSON_Install=ON + -DJSON_MultipleHeaders=ON + -DJSON_BuildTests=OFF + -DJSON_ImplicitConversions=${nlohmann-json_IMPLICIT_CONVERSIONS} +) +vcpkg_cmake_install() +vcpkg_cmake_config_fixup(PACKAGE_NAME "nlohmann_json" CONFIG_PATH "share/cmake/nlohmann_json") + +# Skipping vcpkg_fixup_pkgconfig() to avoid downloading pkgconf from msys2 mirrors. +# The .pc file is still installed; it just won't have paths rewritten by vcpkg. +# This is acceptable because nlohmann-json is header-only and consumers use CMake. + +vcpkg_replace_string( + "${CURRENT_PACKAGES_DIR}/share/nlohmann_json/nlohmann_jsonTargets.cmake" + "{_IMPORT_PREFIX}/nlohmann_json.natvis" + "{_IMPORT_PREFIX}/share/nlohmann_json/nlohmann_json.natvis" + IGNORE_UNCHANGED +) +if(EXISTS "${CURRENT_PACKAGES_DIR}/nlohmann_json.natvis") + file(RENAME + "${CURRENT_PACKAGES_DIR}/nlohmann_json.natvis" + "${CURRENT_PACKAGES_DIR}/share/nlohmann_json/nlohmann_json.natvis" + ) +endif() + +file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug") + +# Handle copyright +file(INSTALL "${SOURCE_PATH}/LICENSE.MIT" DESTINATION "${CURRENT_PACKAGES_DIR}/share/${PORT}" RENAME copyright) + +# Handle usage +file(COPY "${CMAKE_CURRENT_LIST_DIR}/usage" DESTINATION "${CURRENT_PACKAGES_DIR}/share/${PORT}") diff --git a/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/usage b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/usage new file mode 100644 index 00000000..1f01657c --- /dev/null +++ b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/usage @@ -0,0 +1,12 @@ +The package nlohmann-json provides CMake targets: + + find_package(nlohmann_json CONFIG REQUIRED) + target_link_libraries(main PRIVATE nlohmann_json::nlohmann_json) + +The package nlohmann-json can be configured to not provide implicit conversions via a custom triplet file: + + set(nlohmann-json_IMPLICIT_CONVERSIONS OFF) + +For more information, see the docs here: + + https://json.nlohmann.me/api/macros/json_use_implicit_conversions/ diff --git a/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/vcpkg.json b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/vcpkg.json new file mode 100644 index 00000000..c1a396f1 --- /dev/null +++ b/sdk/cpp/vcpkg-overlay-ports/nlohmann-json/vcpkg.json @@ -0,0 +1,23 @@ +{ + "name": "nlohmann-json", + "version-semver": "3.12.0", + "port-version": 2, + "description": "JSON for Modern C++", + "homepage": "https://github.com/nlohmann/json", + "license": "MIT", + "dependencies": [ + { + "name": "vcpkg-cmake", + "host": true + }, + { + "name": "vcpkg-cmake-config", + "host": true + } + ], + "features": { + "diagnostics": { + "description": "Build json_diagnostics" + } + } +} diff --git a/sdk/cpp/vcpkg.json b/sdk/cpp/vcpkg.json index ec08c349..d31d2735 100644 --- a/sdk/cpp/vcpkg.json +++ b/sdk/cpp/vcpkg.json @@ -4,7 +4,6 @@ "dependencies": [ "nlohmann-json", "wil", - "ms-gsl", - "gtest" + "ms-gsl" ] }