summaryrefslogtreecommitdiff
path: root/hadoop/PKGBUILD
blob: c889f1034ea5bfafbebeacf66c14679564bba804 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
# Maintainer: Erich Eckner <arch at eckner dot net>
# Contributor: Chris Severance aur.severach AatT spamgourmet.com
# Contributor: David Roheim <david dot roheim at gmail dot com>
# Contributor: Manuel Hoffmann <manuel@manuel-hoffmann.info>
# Contributor: Markus Holtermann <aur@markusholtermann.eu>
# Contributor: Mantas Vidutis <mantas.a.vidutis-at-gmail.com>
# Contributor: Tianjiao Yin <ytj000@gmail.com>

pkgname='hadoop'
pkgver=2.8.2
pkgrel=1
pkgdesc='MapReduce implementation and distributed filesystem'
arch=('i686' 'x86_64')
url='http://hadoop.apache.org'
license=('apache')
depends=('java-environment>=7' 'openssh' 'apache-ant' 'polkit')
backup=(
  "etc/conf.d/${pkgname}"
  "etc/profile.d/${pkgname}.sh"
  "etc/${pkgname}/capacity-scheduler.xml"
  "etc/${pkgname}/configuration.xsl"
  "etc/${pkgname}/core-site.xml"
  "etc/${pkgname}/fair-scheduler.xml"
  "etc/${pkgname}/hadoop-env.sh"
  "etc/${pkgname}/hadoop-metrics2.properties"
  "etc/${pkgname}/hadoop-policy.xml"
  "etc/${pkgname}/hdfs-site.xml"
  "etc/${pkgname}/log4j.properties"
  "etc/${pkgname}/mapred-queue-acls.xml"
  "etc/${pkgname}/mapred-site.xml"
  "etc/${pkgname}/masters"
  "etc/${pkgname}/slaves"
  "etc/${pkgname}/ssl-client.xml.example"
  "etc/${pkgname}/ssl-server.xml.example"
  "etc/${pkgname}/taskcontroller.cfg"
  "etc/${pkgname}/task-log4j.properties"
)
options=('!strip')
install="${pkgname}.install"
source=(
  "https://archive.apache.org/dist/hadoop/common/hadoop-${pkgver}/hadoop-${pkgver}.tar.gz"
  'hadoop.profile'
  'hadoop-conf'
  'hadoop.sh'
  'hadoop-namenode.service'
  'hadoop-datanode.service'
  'hadoop-secondarynamenode.service'
  'hadoop-jobtracker.service'
  'hadoop-tasktracker.service'
)
sha512sums=('2463ddf5dedf2176a0eb8e1ea830d55819c7dbc13309e9e92aa9b287ac94eec4521573d8f1bd7fc017cad45220d005c626c3cee8e7ecaee418659b739b870423'
            'b9631a180becc565a6db9ddf60a0eedfc7af0cce1eaa5911521976bc295d5629e6f009bec72d77efe2b00865f9e13708399985835a2c6411c5e62ab1a86ee97f'
            '31725354a63e7b34516e1843d3d64488a35ab74bb913d8c9e773fbbb48ab4e949e938f5570fdde8ab9f66653e903a4e0261c41c99be1ebb192b143ace5d44b1f'
            '5799e1d50b90391e42122459e53496a7ff651eb96f82e3fb6d26cdcd46dc28d7f113e15773cc3c8b6d8075eef5b77e04e64808f1d2fb5d3b3d00f59cb8b7a697'
            'f65c3d899f521d58cc9ff4bf2c874a14e92ffe4cc5025ce5e450ce8b8a9a260c6f77ac6928e701b4807b7503503142297564b01a33dce2c1f64781982e597a95'
            'e20976eb0432d451907c2fec6134310f389c5cf5af63c818318149150bc816dc8684fa0ee3d76ca53fdcaab43b8a4f5b9158aea1ff9ee4172daeb5d70306ce07'
            '8908a94b8c6e8602636d365571d056846dd26a4d837bb6714b0d146b792fdca273ef3c9705c2f4cbaee95940e4180f4949e14e75dfd0b73f4ff72df65c2c2d81'
            '561d69f618bb03ad6e354658f44fcf460d2536ab338250a0693c5760d0891ef3693caa4222cfab7c05a7d5623560e28196aab91cfcc4249e60e6cb8a994c87a4'
            '6accbe0df9d1686e75384b188aacf34a3655c40c39a4ebd1356abddf199498e4a856ef9157f8fae757ed99a161ba3e88aee26006ee925b90a31e283b3192728b')

package() {
  local _usr_lib="${pkgdir}/usr/lib"
  local _hadoop_real_home="${_usr_lib}/${pkgname}-${pkgver}"
  local _hadoop_link_home="${_usr_lib}/${pkgname}"

  install -d "${_usr_lib}" "${pkgdir}/usr/lib/systemd/system"
  cp -pr "${srcdir}/${pkgname}-${pkgver}" "${_usr_lib}"

  #
  install -Dpm755 "${srcdir}/hadoop-conf" "${pkgdir}/etc/conf.d/hadoop"
  install -Dpm755 "${srcdir}/hadoop.profile" "${pkgdir}/etc/profile.d/hadoop.sh"
  install -Dpm644 "${srcdir}/"hadoop-*.service -t "${pkgdir}/usr/lib/systemd/system/"

  # we do not use soft link because we need put configures in backup array,
  # in order to preserve the conf when upgrade package.
  cp -pr "${_hadoop_real_home}/etc/hadoop" "${pkgdir}/etc"
  mv "${_hadoop_real_home}/etc" "${_hadoop_real_home}/orig_etc"

  # todo: i need an own file :)
  install -Dm755 <(cat << EOF
#!/bin/sh
# Automatically generated by ${pkgname}-${pkgver} PKGBUILD from Arch Linux AUR
# https://aur.archlinux.org/
for f in /etc/profile.d/*.sh; do
  . "\${f}"
done
/usr/lib/hadoop/bin/hadoop "\$@"
EOF
  ) "${pkgdir}/usr/bin/hadoop"

  cd "${_usr_lib}"
  ln -s "${pkgname}-${pkgver}" "${pkgname}"

  ## Disable IPv6 (comment out to disable IPv6 support):
  # sed -i 's|_OPTS="-D|_OPTS="-Djava.net.preferIPv4Stack=true -D|' hadoop-env.sh

if ! :; then
  ## fix native
  if [ "${CARCH}" = 'i686' ]; then
    rm -rf 'lib/native/Linux-amd64-64'
    cd 'lib/native/Linux-i386-32'
    sed -i -e "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/i386/server :" 'libhadoop.la'
  fi

  if [ "${CARCH}" = 'x86_64' ]; then
    rm -rf 'lib/native/Linux-i386-32'
    cd 'lib/native/Linux-amd64-64'
    sed -i "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/amd64/server :" 'libhadoop.la'
  fi

  ## Create some links, so Hadoop's KFS jar could access KFS libraries properly
  ## (it is still fine if KFS is not installed)

  msg 'Creating KFS links...'

  for _lib in 'libkfsClient' 'libkfsCommon' 'libkfsEmulator' 'libkfsIO' 'libkfsMeta'; do
    for _ext in 'a' 'so'; do
      ln -sf "/usr/lib/${_lib}.${_ext}"
    done
  done
  ln -sf '/usr/lib/libkfs_access.so'
fi

}