AI4C/AI4C.spec

125 lines
4.1 KiB
RPMSpec

%global pkg_version %{name}-v%{version}-alpha
%global build_dir %{_builddir}/%{pkg_version}
%global build_dir_dep %{build_dir}/third_party
%global build_dir_frm %{build_dir}/aiframe
%global build_dir_model %{build_dir}/models
%global build_libdir %{build_dir_frm}/lib64
%global build_includedir %{build_dir_frm}/include
%global install_libdir %{buildroot}%{_libdir}
%global install_includedir %{buildroot}%{_includedir}
%global install_dir_model %{install_libdir}/%{name}
%global max_jobs 8
Summary: %{name} is a framework which enables compilers compilers to integrate ML-driven compiler optimization.
Name: AI4C
Version: 0.1.0
Release: 2
# Package onnxruntime and SafeInt have MIT License.
# Package onnx has Apache License 2.0.
License: MIT and ASL 2.0 and Boost and BSD
URL: https://gitee.com/openeuler/AI4C
Source0: %{pkg_version}.tar.gz
Patch1: 0001-Add-batch-inference-feature-and-optimizer-model.patch
BuildRequires: cmake >= 3.13
BuildRequires: make
BuildRequires: gcc
BuildRequires: gcc-c++
BuildRequires: abseil-cpp-devel
BuildRequires: boost-devel >= 1.66
BuildRequires: bzip2
BuildRequires: python3-devel
BuildRequires: python3-numpy
BuildRequires: python3-setuptools
BuildRequires: python3-pip
BuildRequires: openssl
BuildRequires: openssl-devel
%description
%{name} is a framework which enables compilers compilers to integrate ML-driven compiler optimization.
%prep
tar -xzf %{SOURCE0} -C .
%autosetup -S git -n %{pkg_version}
%build
# Construct dependency package `cmake`
cd %{build_dir_dep}/cmake-3.28.5
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=./install
make install -j %{max_jobs}
# Broken test in aarch64 architecture.
cd %{build_dir_dep}/onnxruntime-1.16.3
%ifarch aarch64
rm -v onnxruntime/test/optimizer/nhwc_transformer_test.cc
%endif
%{build_dir_dep}/cmake-3.28.5/build/install/bin/cmake \
-DCMAKE_INSTALL_LIBDIR=%{_lib} \
-DCMAKE_INSTALL_INCLUDEDIR=include \
-Donnxruntime_BUILD_SHARED_LIB=ON \
-Donnxruntime_BUILD_UNIT_TESTS=ON \
-Donnxruntime_INSTALL_UNIT_TESTS=OFF \
-Donnxruntime_BUILD_BENCHMARKS=OFF \
-Donnxruntime_USE_FULL_PROTOBUF=ON \
-DPYTHON_VERSION=%{python3_version} \
-Donnxruntime_ENABLE_CPUINFO=ON \
-Donnxruntime_DISABLE_ABSEIL=ON \
-Donnxruntime_USE_NEURAL_SPEED=OFF \
-Donnxruntime_ENABLE_PYTHON=ON \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-S cmake
make -j %{max_jobs}
mkdir -p %{build_libdir}
cd %{build_dir_dep}/onnxruntime-1.16.3
cp libonnxruntime.so.1.16.3 %{build_libdir}
cp libonnxruntime.so %{build_libdir}
cp include/onnxruntime/core/session/onnxruntime_c_api.h %{build_includedir}
cp include/onnxruntime/core/session/onnxruntime_cxx_api.h %{build_includedir}
cp include/onnxruntime/core/session/onnxruntime_cxx_inline.h %{build_includedir}
cp include/onnxruntime/core/session/onnxruntime_float16.h %{build_includedir}
cp include/onnxruntime/core/session/onnxruntime_run_options_config_keys.h %{build_includedir}
cp include/onnxruntime/core/session/onnxruntime_session_options_config_keys.h %{build_includedir}
cp include/onnxruntime/core/providers/cpu/cpu_provider_factory.h %{build_includedir}
cp include/onnxruntime/core/session/onnxruntime_lite_custom_op.h %{build_includedir}
cp include/onnxruntime/core/framework/provider_options.h %{build_includedir}
cp orttraining/orttraining/training_api/include/onnxruntime_training_cxx_api.h %{build_includedir}
cp orttraining/orttraining/training_api/include/onnxruntime_training_cxx_inline.h %{build_includedir}
cp orttraining/orttraining/training_api/include/onnxruntime_training_c_api.h %{build_includedir}
# Construct AI4C library `libONNXRunner.so`.
cd %{build_dir_frm}
cmake .
make -j %{max_jobs}
mv libONNXRunner.so %{build_libdir}
%install
install -d %{install_dir_model}
install %{build_dir_model}/* %{install_dir_model}
install %{build_libdir}/* %{install_libdir}
install -d %{install_includedir}
install %{build_includedir}/* %{install_includedir}
%files
%{_libdir}/*
%{_includedir}/*
%changelog
* Fri Jun 21 2024 Feiyang Liu <liufeiyang6@huaewei.com> - 0.1.0-2
- Add batch inference feature and optimizer model
* Wed Jun 5 2024 Zhenyu Zhao <zhaozhenyu17@huaewei.com> - 0.1.0-1
- Release 0.1.0