130 lines
5.3 KiB
RPMSpec
130 lines
5.3 KiB
RPMSpec
%define __os_install_post %{nil}
|
|
|
|
Name: hive
|
|
Version: 3.1.2
|
|
Release: 1
|
|
Summary: The Apache Hadoop data warehouse
|
|
|
|
License: Apache-2.0 and Python-2.0 and MPL-2.0 and BSD and ICU
|
|
URL: http://hive.apache.org/
|
|
|
|
Source0: https://github.com/apache/%{name}/archive/refs/tags/rel/release-%{version}.tar.gz
|
|
Source1: xmvn-reactor
|
|
Source2: pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar
|
|
Source3: guava-27.0-jre.jar
|
|
Source4: mysql-connector-java.jar
|
|
|
|
BuildRequires: cmake java-1.8.0-openjdk-devel maven xmvn xmvn-install gradle-local maven-local protobuf2-devel protobuf2-compiler
|
|
Requires: java-1.8.0-openjdk
|
|
Requires: hadoop-3.1-client hadoop-3.1-common hadoop-3.1-common-native hadoop-3.1-devel hadoop-3.1-hdfs hadoop-3.1-httpfs
|
|
Requires: hadoop-3.1-mapreduce hadoop-3.1-maven-plugin hadoop-3.1-yarn hadoop-3.1-yarn-security
|
|
Requires: mysql5-server
|
|
BuildArch: noarch
|
|
|
|
%description
|
|
The Apache Hive data warehouse software facilitates querying and
|
|
managing large datasets residing in distributed storage. Apache Hive
|
|
provides a mechanism to project structure onto this data and query
|
|
the data using a SQL-like language called HiveQL.
|
|
|
|
%prep
|
|
%setup -q -n %{name}-rel-release-%{version}
|
|
mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=2.5.0 -Dclassifier=linux-aarch_64 -Dpackaging=exe -Dfile=/usr/bin/protoc
|
|
mvn install:install-file -DgroupId=org.pentaho -DartifactId=pentaho-aggdesigner-algorithm -Dversion=5.1.5-jhyde -Dpackaging=jar -Dfile=%{SOURCE2}
|
|
cp %{SOURCE1} ./.xmvn-reactor
|
|
echo `pwd` > absolute_prefix.log
|
|
sed -i 's/\//\\\//g' absolute_prefix.log
|
|
absolute_prefix=`head -n 1 absolute_prefix.log`
|
|
sed -i 's/absolute-prefix/'"$absolute_prefix"'/g' .xmvn-reactor
|
|
|
|
find -name "*.jar" -delete
|
|
|
|
# missing shebang
|
|
sed -i -e '1d;2i#!/usr/bin/env bash' bin/hive-config.sh
|
|
|
|
%build
|
|
|
|
# for javadoc encoding
|
|
export LC_ALL=en_US.UTF-8
|
|
|
|
mvn package -DskipTests -Pdist -Dtar
|
|
|
|
%install
|
|
%mvn_install -J build/dist/docs
|
|
|
|
install -d -m 0755 %{buildroot}%{_datadir}/%{name}/bin
|
|
install -d -m 0755 %{buildroot}%{_datadir}/%{name}/conf
|
|
install -d -m 0755 %{buildroot}%{_datadir}/%{name}/lib
|
|
install -d -m 0755 %{buildroot}%{_datadir}/%{name}/scripts
|
|
|
|
cp -f packaging/target/apache-%{name}-%{version}-bin/apache-%{name}-%{version}-bin/lib/hive-storage-api-2.7.0.jar %{buildroot}%{_datadir}/java/%{name}/
|
|
|
|
tar -C packaging/target -zxf packaging/target/apache-%{name}-%{version}-bin.tar.gz
|
|
pushd packaging/target/apache-%{name}-%{version}-bin/apache-%{name}-%{version}-bin
|
|
cp -arf bin/* %{buildroot}%{_datadir}/%{name}/bin
|
|
cp -arf conf/* %{buildroot}%{_datadir}/%{name}/conf
|
|
rm -rf lib/guava-19.0.jar
|
|
cp %{SOURCE3} lib/
|
|
cp %{SOURCE4} lib/
|
|
cp -arf lib/* %{buildroot}%{_datadir}/%{name}/lib
|
|
cp -arf scripts/* %{buildroot}%{_datadir}/%{name}/scripts
|
|
popd
|
|
|
|
# create the root from here
|
|
install -d -m 0755 %{buildroot}%{_datadir}/%{name}/bin/ext
|
|
install -d -m 0755 %{buildroot}%{_datadir}/%{name}/bin/ext/util
|
|
install -d -m 0755 %{buildroot}%{_sysconfdir}/%{name}
|
|
|
|
# bin
|
|
install -dm 0755 %{buildroot}%{_bindir}
|
|
for f in %{name} hive-config.sh init-hive-dfs.sh metatool schematool; do
|
|
install -p bin/${f} %{buildroot}%{_datadir}/%{name}/bin
|
|
ln -s %{_datadir}/%{name}/bin/${f} %{buildroot}%{_bindir}/${f}
|
|
done
|
|
|
|
# bin/ext/util
|
|
cp -pr bin/ext/* %{buildroot}%{_datadir}/%{name}/bin/ext
|
|
|
|
# don't have these just yet...
|
|
for f in beeline.sh hiveserver2.sh; do
|
|
rm %{buildroot}%{_datadir}/%{name}/bin/ext/${f}
|
|
done
|
|
|
|
# conf
|
|
for f in hive-default.xml hive-env.sh hive-exec-log4j2.properties hive-log4j2.properties; do
|
|
cp -p packaging/target/apache-hive-3.1.2-bin/conf/${f}.template %{buildroot}%{_datadir}/%{name}/conf/${f}
|
|
ln -s %{_datadir}/%{name}/conf/${f} %{buildroot}%{_sysconfdir}/%{name}/${f}
|
|
done
|
|
|
|
# manually replace the hive jars with their links
|
|
rm %{buildroot}%{_datadir}/%{name}/lib/hive-*.jar
|
|
for m in accumulo-handler beeline classification cli common contrib druid-handler exec hbase-handler hplsql jdbc jdbc-handler kryo-registrator llap-client llap-common llap-ext-client llap-server llap-tez metastore serde service service-rpc shims shims-0.23 shims-common shims-scheduler standalone-metastore streaming testutils upgrade-acid vector-code-gen; do
|
|
ln -s %{_javadir}/%{name}/%{name}-${m}.jar %{buildroot}%{_datadir}/%{name}/lib/%{name}-${m}-%{version}.jar
|
|
done
|
|
|
|
ln -s %{_javadir}/%{name}/hcatalog-core.jar %{buildroot}%{_datadir}/%{name}/lib/%{name}-hcatalog-core-%{version}.jar
|
|
ln -s %{_javadir}/%{name}/hcatalog-server-extensions.jar %{buildroot}%{_datadir}/%{name}/lib/%{name}-hcatalog-server-extensions-%{version}.jar
|
|
ln -s %{_javadir}/%{name}/hive-storage-api-2.7.0.jar %{buildroot}%{_datadir}/%{name}/lib/hive-storage-api-2.7.0.jar
|
|
|
|
# MR needs the shims and thrift jars in it's classpath
|
|
mkdir -p -m0755 %{buildroot}/%{_datadir}/hadoop/mapreduce/lib
|
|
ln -s %{_javadir}/%{name}/%{name}-shims.jar %{buildroot}%{_datadir}/hadoop/mapreduce/lib/%{name}-shims.jar
|
|
|
|
%check
|
|
# tests are not run due to dependency on hive hbase support
|
|
# which is currently unavailable
|
|
|
|
%files -f .mfiles
|
|
%doc LICENSE NOTICE
|
|
%{_bindir}/*
|
|
%{_datadir}/%{name}
|
|
%{_sysconfdir}/%{name}
|
|
%dir %{_javadir}/%{name}
|
|
%{_datadir}/hadoop/mapreduce/lib/%{name}-shims.jar
|
|
%{_datadir}/java/%{name}/hive-storage-api-2.7.0.jar
|
|
|
|
|
|
%changelog
|
|
* Tue Apr 13 2021 Ge Wang <wangge20@huawei.com> 3.1.2-1
|
|
- Init package
|