# Maintainer: Erich Eckner # Contributor: Chris Severance aur.severach AatT spamgourmet.com # Contributor: David Roheim # Contributor: Manuel Hoffmann # Contributor: Markus Holtermann # Contributor: Mantas Vidutis # Contributor: Tianjiao Yin pkgname='hadoop' pkgver=3.3.0 pkgrel=5 pkgdesc='MapReduce implementation and distributed filesystem' arch=('i686' 'pentium4' 'x86_64') url='http://hadoop.apache.org' license=('apache') _pinned_dependencies=( 'gcc-libs=10.2.0' 'glibc>=2.31' 'openssl=1.1.1.j' 'snappy=1.1.8' 'zlib=1:1.2.11' ) depends=('java-environment>=7' 'openssh' 'apache-ant' 'polkit' "${_pinned_dependencies[@]}") backup=( "etc/conf.d/${pkgname}" "etc/profile.d/${pkgname}.sh" "etc/${pkgname}/capacity-scheduler.xml" "etc/${pkgname}/configuration.xsl" "etc/${pkgname}/core-site.xml" "etc/${pkgname}/fair-scheduler.xml" "etc/${pkgname}/hadoop-env.sh" "etc/${pkgname}/hadoop-metrics2.properties" "etc/${pkgname}/hadoop-policy.xml" "etc/${pkgname}/hdfs-site.xml" "etc/${pkgname}/log4j.properties" "etc/${pkgname}/mapred-queue-acls.xml" "etc/${pkgname}/mapred-site.xml" "etc/${pkgname}/masters" "etc/${pkgname}/slaves" "etc/${pkgname}/ssl-client.xml.example" "etc/${pkgname}/ssl-server.xml.example" "etc/${pkgname}/taskcontroller.cfg" "etc/${pkgname}/task-log4j.properties" ) options=('!strip') install="${pkgname}.install" source=( "https://archive.apache.org/dist/hadoop/common/hadoop-${pkgver}/hadoop-${pkgver}.tar.gz" 'hadoop.profile' 'hadoop-conf' 'hadoop.sh' 'hadoop-namenode.service' 'hadoop-datanode.service' 'hadoop-secondarynamenode.service' 'hadoop-jobtracker.service' 'hadoop-tasktracker.service' ) sha512sums=('9ac5a5a8d29de4d2edfb5e554c178b04863375c5644d6fea1f6464ab4a7e22a50a6c43253ea348edbd114fc534dcde5bdd2826007e24b2a6b0ce0d704c5b4f5b' 'b9631a180becc565a6db9ddf60a0eedfc7af0cce1eaa5911521976bc295d5629e6f009bec72d77efe2b00865f9e13708399985835a2c6411c5e62ab1a86ee97f' '31725354a63e7b34516e1843d3d64488a35ab74bb913d8c9e773fbbb48ab4e949e938f5570fdde8ab9f66653e903a4e0261c41c99be1ebb192b143ace5d44b1f' '5799e1d50b90391e42122459e53496a7ff651eb96f82e3fb6d26cdcd46dc28d7f113e15773cc3c8b6d8075eef5b77e04e64808f1d2fb5d3b3d00f59cb8b7a697' 'f65c3d899f521d58cc9ff4bf2c874a14e92ffe4cc5025ce5e450ce8b8a9a260c6f77ac6928e701b4807b7503503142297564b01a33dce2c1f64781982e597a95' 'e20976eb0432d451907c2fec6134310f389c5cf5af63c818318149150bc816dc8684fa0ee3d76ca53fdcaab43b8a4f5b9158aea1ff9ee4172daeb5d70306ce07' '8908a94b8c6e8602636d365571d056846dd26a4d837bb6714b0d146b792fdca273ef3c9705c2f4cbaee95940e4180f4949e14e75dfd0b73f4ff72df65c2c2d81' '561d69f618bb03ad6e354658f44fcf460d2536ab338250a0693c5760d0891ef3693caa4222cfab7c05a7d5623560e28196aab91cfcc4249e60e6cb8a994c87a4' '6accbe0df9d1686e75384b188aacf34a3655c40c39a4ebd1356abddf199498e4a856ef9157f8fae757ed99a161ba3e88aee26006ee925b90a31e283b3192728b') package() { local _usr_lib="${pkgdir}/usr/lib" local _hadoop_real_home="${_usr_lib}/${pkgname}-${pkgver}" local _hadoop_link_home="${_usr_lib}/${pkgname}" install -d "${_usr_lib}" "${pkgdir}/usr/lib/systemd/system" cp -pr "${srcdir}/${pkgname}-${pkgver}" "${_usr_lib}" # install -Dpm755 "${srcdir}/hadoop-conf" "${pkgdir}/etc/conf.d/hadoop" install -Dpm755 "${srcdir}/hadoop.profile" "${pkgdir}/etc/profile.d/hadoop.sh" install -Dpm644 "${srcdir}/"hadoop-*.service -t "${pkgdir}/usr/lib/systemd/system/" # we do not use soft link because we need put configures in backup array, # in order to preserve the conf when upgrade package. cp -pr "${_hadoop_real_home}/etc/hadoop" "${pkgdir}/etc" mv "${_hadoop_real_home}/etc" "${_hadoop_real_home}/orig_etc" # todo: i need an own file :) install -Dm755 <(cat << EOF #!/bin/sh # Automatically generated by ${pkgname}-${pkgver} PKGBUILD from Arch Linux AUR # https://aur.archlinux.org/ for f in /etc/profile.d/*.sh; do . "\${f}" done /usr/lib/hadoop/bin/hadoop "\$@" EOF ) "${pkgdir}/usr/bin/hadoop" cd "${_usr_lib}" ln -s "${pkgname}-${pkgver}" "${pkgname}" ## Disable IPv6 (comment out to disable IPv6 support): # sed -i 's|_OPTS="-D|_OPTS="-Djava.net.preferIPv4Stack=true -D|' hadoop-env.sh if ! :; then ## fix native if [ "${CARCH}" = 'i686' ] || [ "${CARCH}" = 'pentium4' ]; then rm -rf 'lib/native/Linux-amd64-64' cd 'lib/native/Linux-i386-32' sed -i -e "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/i386/server :" 'libhadoop.la' fi if [ "${CARCH}" = 'x86_64' ]; then rm -rf 'lib/native/Linux-i386-32' cd 'lib/native/Linux-amd64-64' sed -i "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/amd64/server :" 'libhadoop.la' fi ## Create some links, so Hadoop's KFS jar could access KFS libraries properly ## (it is still fine if KFS is not installed) echo 'Creating KFS links...' for _lib in 'libkfsClient' 'libkfsCommon' 'libkfsEmulator' 'libkfsIO' 'libkfsMeta'; do for _ext in 'a' 'so'; do ln -sf "/usr/lib/${_lib}.${_ext}" done done ln -sf '/usr/lib/libkfs_access.so' fi }