Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable coremltools for Linux build #23481

Merged
merged 5 commits into from
Jan 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
85 changes: 39 additions & 46 deletions cmake/onnxruntime_providers_coreml.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,18 @@ endif()
add_compile_definitions(USE_COREML=1)

# Check if we can build the coremltools code for creating an mlpackage with an mlprogram.
# The coremltools source requires std::filesystem::path which is only available from iOS 13 on.
set(_enable_ML_PROGRAM ON)
if (IOS AND CMAKE_OSX_DEPLOYMENT_TARGET VERSION_LESS 13.0)
message(WARNING "CoreML ML Program is not supported on iOS < 13.0. Excluding ML Program support from build.")
set(_enable_ML_PROGRAM OFF)
elseif(LINUX)
# uuid-dev is required. we don't bother installing on CIs as it's really for manual developer testing.
if(LINUX)
find_library(LibUUID_LIBRARY NAMES uuid)
find_path(LibUUID_INCLUDE_DIR NAMES uuid/uuid.h)
if (NOT LibUUID_INCLUDE_DIR)
message(STATUS "uuid/uuid.h was not found as is required for ML Program support. "
message(FATAL "uuid/uuid.h was not found as is required for ML Program support. "
"Run `sudo apt install uuid-dev` if you need to test ML Program related CoreML EP code. ")
set(_enable_ML_PROGRAM OFF)
endif()
endif()

if (_enable_ML_PROGRAM)
add_compile_definitions(COREML_ENABLE_MLPROGRAM=1)
endif()

add_compile_definitions(COREML_ENABLE_MLPROGRAM=1)


# Compile CoreML proto definition to ${CMAKE_CURRENT_BINARY_DIR}/coreml_proto
set(COREML_PROTO_ROOT ${coremltools_SOURCE_DIR}/mlmodel/format)
Expand Down Expand Up @@ -93,10 +86,10 @@ file(GLOB_RECURSE
"${ONNXRUNTIME_ROOT}/core/providers/coreml/builders/*.cc"
)

if(_enable_ML_PROGRAM)

# Add helpers to create mlpackage weights. limit to just the files we need to minimize the changes to make them
# build on Windows and Linux.
file(GLOB
file(GLOB
onnxruntime_providers_coreml_milblob_cc_srcs CONFIGURE_DEPENDS
"${coremltools_SOURCE_DIR}/mlmodel/src/MILBlob/*.hpp"
"${coremltools_SOURCE_DIR}/mlmodel/src/MILBlob/*.cpp"
Expand All @@ -105,22 +98,22 @@ if(_enable_ML_PROGRAM)
"${coremltools_SOURCE_DIR}/mlmodel/src/MILBlob/Blob/StorageFormat.hpp"
"${coremltools_SOURCE_DIR}/mlmodel/src/MILBlob/Blob/FileWriter.?pp"
"${coremltools_SOURCE_DIR}/mlmodel/src/MILBlob/Blob/StorageWriter.?pp"
)
)

# Add helpers to create mlpackage
file(GLOB
# Add helpers to create mlpackage
file(GLOB
onnxruntime_providers_coreml_modelpackage_cc_srcs CONFIGURE_DEPENDS
"${coremltools_SOURCE_DIR}/modelpackage/src/ModelPackage.?pp"
"${coremltools_SOURCE_DIR}/modelpackage/src/utils/JsonMap.?pp"
)
)

set(coremltools_srcs
set(coremltools_srcs
${onnxruntime_providers_coreml_milblob_cc_srcs}
${onnxruntime_providers_coreml_modelpackage_cc_srcs}
)
)

source_group(TREE ${coremltools_SOURCE_DIR} PREFIX coremltools FILES ${coremltools_srcs})

source_group(TREE ${coremltools_SOURCE_DIR} PREFIX coremltools FILES ${coremltools_srcs})
endif()

# Add CoreML objective c++ source code
if (APPLE)
Expand Down Expand Up @@ -174,34 +167,34 @@ if (APPLE)
target_compile_definitions(onnxruntime_providers_coreml PRIVATE __APPLE__)
endif()

if (_enable_ML_PROGRAM)
# Setup coremltools fp16 and json dependencies for creating an mlpackage.
#
# fp16 depends on psimd
FetchContent_Declare(psimd URL ${DEP_URL_psimd} URL_HASH SHA1=${DEP_SHA1_psimd})
onnxruntime_fetchcontent_makeavailable(psimd)
set(PSIMD_SOURCE_DIR ${psimd_SOURCE_DIR})
FetchContent_Declare(fp16 URL ${DEP_URL_fp16} URL_HASH SHA1=${DEP_SHA1_fp16})
set(FP16_BUILD_TESTS OFF CACHE INTERNAL "")
set(FP16_BUILD_BENCHMARKS OFF CACHE INTERNAL "")
onnxruntime_fetchcontent_makeavailable(fp16)

# need to tweak the include paths to match what the coreml source code expects
target_include_directories(onnxruntime_providers_coreml PRIVATE
${fp16_SOURCE_DIR}/include
${nlohmann_json_SOURCE_DIR}/single_include/nlohmann
${coremltools_SOURCE_DIR}
${coremltools_SOURCE_DIR}/mlmodel/src/
${coremltools_SOURCE_DIR}/modelpackage/src/
)

add_dependencies(onnxruntime_providers_coreml nlohmann_json::nlohmann_json fp16)
# Setup coremltools fp16 and json dependencies for creating an mlpackage.
#
# fp16 depends on psimd
FetchContent_Declare(psimd URL ${DEP_URL_psimd} URL_HASH SHA1=${DEP_SHA1_psimd})
onnxruntime_fetchcontent_makeavailable(psimd)
set(PSIMD_SOURCE_DIR ${psimd_SOURCE_DIR})
FetchContent_Declare(fp16 URL ${DEP_URL_fp16} URL_HASH SHA1=${DEP_SHA1_fp16})
set(FP16_BUILD_TESTS OFF CACHE INTERNAL "")
set(FP16_BUILD_BENCHMARKS OFF CACHE INTERNAL "")
onnxruntime_fetchcontent_makeavailable(fp16)

# need to tweak the include paths to match what the coreml source code expects
target_include_directories(onnxruntime_providers_coreml PRIVATE
${fp16_SOURCE_DIR}/include
${nlohmann_json_SOURCE_DIR}/single_include/nlohmann
${coremltools_SOURCE_DIR}
${coremltools_SOURCE_DIR}/mlmodel/src/
${coremltools_SOURCE_DIR}/modelpackage/src/
)

if (LINUX)
target_link_libraries(onnxruntime_providers_coreml PRIVATE uuid)
endif()
add_dependencies(onnxruntime_providers_coreml nlohmann_json::nlohmann_json fp16)

if (LINUX)
target_link_libraries(onnxruntime_providers_coreml PRIVATE uuid)
endif()


if (APPLE)
target_link_libraries(onnxruntime_providers_coreml PRIVATE "-framework Foundation" "-framework CoreML")
endif()
Expand Down
81 changes: 55 additions & 26 deletions cmake/patches/coremltools/crossplatformbuild.patch
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ index adc7bfcf..7b2bf9cc 100644
--- a/mlmodel/src/MILBlob/Blob/FileWriter.cpp
+++ b/mlmodel/src/MILBlob/Blob/FileWriter.cpp
@@ -8,8 +8,12 @@

#include <cstdio>
#include <stdexcept>
+
Expand All @@ -12,17 +12,31 @@ index adc7bfcf..7b2bf9cc 100644
#include <sys/mman.h>
#include <sys/stat.h>
+#endif

using namespace MILBlob;
using namespace MILBlob::Blob;
diff --git a/mlmodel/src/MILBlob/Blob/FileWriter.hpp b/mlmodel/src/MILBlob/Blob/FileWriter.hpp
index 2bc99403..49239513 100644
--- a/mlmodel/src/MILBlob/Blob/FileWriter.hpp
+++ b/mlmodel/src/MILBlob/Blob/FileWriter.hpp
@@ -6,7 +6,8 @@
#pragma once

#include "MILBlob/Util/Span.hpp"
-
+// ORT_EDIT: add missing header
+#include <cstdint>
edgchen1 marked this conversation as resolved.
Show resolved Hide resolved
#include <fstream>
#include <string>
#include <type_traits>
diff --git a/mlmodel/src/MILBlob/Fp16.cpp b/mlmodel/src/MILBlob/Fp16.cpp
index ae1e71a1..77a7161f 100644
--- a/mlmodel/src/MILBlob/Fp16.cpp
+++ b/mlmodel/src/MILBlob/Fp16.cpp
@@ -5,6 +5,8 @@

#include "MILBlob/Fp16.hpp"

+// ORT_EDIT: Exclude clang specific pragmas from other builds
+#if defined(__clang__)
// fp16 lib code has some conversion warnings we don't want to globally ignore
Expand All @@ -35,11 +49,11 @@ index ae1e71a1..77a7161f 100644
+#else
+#include "fp16/fp16.h"
+#endif

using namespace MILBlob;

diff --git a/modelpackage/src/ModelPackage.cpp b/modelpackage/src/ModelPackage.cpp
index 8fee56b9..99e0d8d6 100644
index 8fee56b9..5508e316 100644
--- a/modelpackage/src/ModelPackage.cpp
+++ b/modelpackage/src/ModelPackage.cpp
@@ -26,7 +26,14 @@ namespace std {
Expand All @@ -55,22 +69,22 @@ index 8fee56b9..99e0d8d6 100644
#include <uuid/uuid.h>
+#endif
#include <vector>

#if defined(__cplusplus)
@@ -187,7 +194,10 @@ public:
ModelPackageItemInfo createFile(const std::string& name, const std::string& author, const std::string& description);
};

+// ORT_EDIT: pragma only available on APPLE platforms
+#if defined(__APPLE__)
#pragma mark ModelPackageImpl
+#endif

ModelPackageImpl::ModelPackageImpl(const std::filesystem::path& path, bool createIfNecessary, bool readOnly)
: m_packagePath(path),
@@ -372,6 +382,20 @@ std::filesystem::path ModelPackageImpl::getItemPath(const std::string& name, con
}

std::string ModelPackageImpl::generateIdentifier() const {
+// ORT_EDIT: Use built-in UUID generation on Windows
+#if defined(_WIN32)
Expand All @@ -87,42 +101,43 @@ index 8fee56b9..99e0d8d6 100644
+ return uuidStrCpp;
+#else
uuid_t uuid;

// uuid_unparse generates a 36-character null-terminated string (37 bytes).
@@ -383,6 +407,7 @@ std::string ModelPackageImpl::generateIdentifier() const {
uuid_unparse(uuid, buf);

return std::string(buf);
+#endif
}

ModelPackageItemInfo ModelPackageImpl::createFile(const std::string& name, const std::string& author, const std::string& description) {
@@ -468,7 +493,13 @@ std::shared_ptr<ModelPackageItemInfo> ModelPackageImpl::findItem(const std::stri
@@ -468,7 +493,14 @@ std::shared_ptr<ModelPackageItemInfo> ModelPackageImpl::findItem(const std::stri
auto author = itemInfoEntry->getString(kModelPackageItemInfoAuthorKey);
auto description = itemInfoEntry->getString(kModelPackageItemInfoDescriptionKey);

+// ORT_EDIT: need to use path.string() on Windows
+#if defined(_WIN32)
+ return std::make_shared<ModelPackageItemInfo>(std::make_shared<ModelPackageItemInfoImpl>(identifier, path.string(), name, author, description));
+
+#else
return std::make_shared<ModelPackageItemInfo>(std::make_shared<ModelPackageItemInfoImpl>(identifier, path, name, author, description));
+#endif
+
}

std::shared_ptr<ModelPackageItemInfo> ModelPackageImpl::findItem(const std::string& name, const std::string& author) const
@@ -514,7 +545,9 @@ void ModelPackageImpl::removeItem(const std::string& identifier)
@@ -514,7 +546,9 @@ void ModelPackageImpl::removeItem(const std::string& identifier)
}

auto path = m_packageDataDirPath / itemInfoEntry->getString(kModelPackageItemInfoPathKey);
- if (0 != std::remove(path.c_str())) {
+ // ORT_EDIT: std::remove doesn't work on Windows. Use std::filesystem::remove instead.
+ // if (0 != std::remove(path.c_str())) {
+ if (!std::filesystem::remove(path)) {
throw std::runtime_error("Failed to remove file at path: " + path.string());
}

@@ -525,13 +558,16 @@ bool ModelPackageImpl::isValid(const std::filesystem::path& path)
@@ -525,13 +559,16 @@ bool ModelPackageImpl::isValid(const std::filesystem::path& path)
{
try {
ModelPackageImpl(path, false, true);
Expand All @@ -132,16 +147,16 @@ index 8fee56b9..99e0d8d6 100644
}
return true;
}

+// ORT_EDIT: pragma only available on APPLE platforms
+#if defined(__APPLE__)
#pragma mark ModelPackage
+#endif

ModelPackage::ModelPackage(const std::string& packagePath, bool createIfNecessary, bool readOnly)
: m_modelPackageImpl(std::make_shared<ModelPackageImpl>(packagePath, createIfNecessary, readOnly))
@@ -544,7 +580,12 @@ ModelPackage::~ModelPackage()

@@ -544,7 +581,12 @@ ModelPackage::~ModelPackage()
std::string ModelPackage::path() const
{
+// ORT_EDIT: Windows doesn't automatically convert to std::string as the native format could be char or wchar.
Expand All @@ -151,5 +166,19 @@ index 8fee56b9..99e0d8d6 100644
return m_modelPackageImpl->path();
+#endif
}

std::string ModelPackage::setRootModel(const std::string& path, const std::string& name, const std::string& author, const std::string& description)
diff --git a/modelpackage/src/utils/JsonMap.hpp b/modelpackage/src/utils/JsonMap.hpp
index 0d7dc3f4..b700cfd5 100644
--- a/modelpackage/src/utils/JsonMap.hpp
+++ b/modelpackage/src/utils/JsonMap.hpp
@@ -10,7 +10,8 @@
#include <iostream>
#include <vector>
#include <string>
-
+// ORT_EDIT: add missing header
+#include <memory>
class JsonMapImpl;

class JsonMap {
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ parameters:

variables:
- name: docker_base_image
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250124.1
- name: linux_trt_version
value: 10.3.0.26-1.cuda11.8
- name: Repository
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ parameters:
variables:
- name: docker_base_image
${{ if eq(parameters.CudaVersion, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250124.1
${{ if eq(parameters.CudaVersion, '12.2') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1

- name: Repository
${{ if eq(parameters.CudaVersion, '11.8') }}:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ variables:
- template: templates/common-variables.yml
- name: docker_base_image
${{ if eq(parameters.CudaVersion, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250124.1
${{ if eq(parameters.CudaVersion, '12.2') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1
- name: linux_trt_version
${{ if eq(parameters.CudaVersion, '11.8') }}:
value: ${{ variables.linux_trt_version_cuda11 }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ variables:
- template: templates/common-variables.yml
- name: docker_base_image
${{ if eq(parameters.CudaVersion, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250124.1
${{ if eq(parameters.CudaVersion, '12.2') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1
- name: linux_trt_version
${{ if eq(parameters.CudaVersion, '11.8') }}:
value: ${{ variables.linux_trt_version_cuda11 }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ stages:
machine_pool: 'Onnxruntime-Linux-GPU'
python_wheel_suffix: '_gpu'
timeout: 480
docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250109.1
docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250124.1
cuda_version: '11.8'

- stage: Republish_Wheels
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ stages:
machine_pool: 'Onnxruntime-Linux-GPU'
python_wheel_suffix: '_gpu'
timeout: 480
docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250109.1
docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1
cuda_version: '12.2'

- stage: Republish_Wheels
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,9 @@ stages:
value: false
- name: docker_base_image
${{ if eq(parameters.CudaVersion, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20250124.1
${{ if eq(parameters.CudaVersion, '12.2') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250109.1
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1
timeoutInMinutes: 60

steps:
Expand Down
Loading
Loading