blob: 6742ab0cbb6f271184a74b58e575d53cb6ec8703 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
|
# Maintainer: Erich Eckner <arch at eckner dot net>
# Contributor: Chris Severance aur.severach AatT spamgourmet.com
# Contributor: David Roheim <david dot roheim at gmail dot com>
# Contributor: Manuel Hoffmann <manuel@manuel-hoffmann.info>
# Contributor: Markus Holtermann <aur@markusholtermann.eu>
# Contributor: Mantas Vidutis <mantas.a.vidutis-at-gmail.com>
# Contributor: Tianjiao Yin <ytj000@gmail.com>
pkgname='hadoop'
pkgver='2.8.1'
pkgrel='1'
pkgdesc='MapReduce implementation and distributed filesystem'
arch=('i686' 'x86_64')
url='http://hadoop.apache.org'
license=('apache')
depends=('java-environment>=7' 'openssh' 'apache-ant' 'polkit')
backup=(
"etc/conf.d/${pkgname}"
"etc/profile.d/${pkgname}.sh"
"etc/${pkgname}/capacity-scheduler.xml"
"etc/${pkgname}/configuration.xsl"
"etc/${pkgname}/core-site.xml"
"etc/${pkgname}/fair-scheduler.xml"
"etc/${pkgname}/hadoop-env.sh"
"etc/${pkgname}/hadoop-metrics2.properties"
"etc/${pkgname}/hadoop-policy.xml"
"etc/${pkgname}/hdfs-site.xml"
"etc/${pkgname}/log4j.properties"
"etc/${pkgname}/mapred-queue-acls.xml"
"etc/${pkgname}/mapred-site.xml"
"etc/${pkgname}/masters"
"etc/${pkgname}/slaves"
"etc/${pkgname}/ssl-client.xml.example"
"etc/${pkgname}/ssl-server.xml.example"
"etc/${pkgname}/taskcontroller.cfg"
"etc/${pkgname}/task-log4j.properties"
)
options=('!strip')
install="${pkgname}.install"
source=(
"https://archive.apache.org/dist/hadoop/common/hadoop-${pkgver}/hadoop-${pkgver}.tar.gz"
'hadoop.profile'
'hadoop-conf'
'hadoop.sh'
'hadoop-namenode.service'
'hadoop-datanode.service'
'hadoop-secondarynamenode.service'
'hadoop-jobtracker.service'
'hadoop-tasktracker.service'
)
sha256sums=('b5be527578ef2c8565cd435ab4ae71a024cad704f7bf597bd792082703bd77d9'
'b6607cb8531244d9be9241d8d4980d5695869f89fa598c8d24b35ec503df413b'
'e584c32246fd23fe5f35b13399372419584c27a234364b12d1479f3c87e70748'
'93cb40f76f6bb0c1924b7ef083b82d39bf32190f86c28fc6304839703cdda7b1'
'3fd40045f7657881cde0abee4ac1735232ba3d79121d724f74707252e19088b3'
'230a58ab4e3462eb63662aee057965c5130247f7d9c98df83495c8da2c409fe5'
'047d3d6aea9eada82780eaa93a55c6259fb1b63c68bc50cc26323e066c1b7f75'
'5e9bc41b0086dfa7b237d1a7248a7f113299687f79ba0c58ba01eaeea0e35e79'
'37d7a252292b365782d9e7a64d6019a78d9c561acf9b5af3c246b602d3e0a8ec')
package() {
local _usr_lib="${pkgdir}/usr/lib"
local _hadoop_real_home="${_usr_lib}/${pkgname}-${pkgver}"
local _hadoop_link_home="${_usr_lib}/${pkgname}"
install -d "${_usr_lib}" "${pkgdir}/usr/lib/systemd/system"
cp -pr "${srcdir}/${pkgname}-${pkgver}" "${_usr_lib}"
#
install -Dpm755 "${srcdir}/hadoop-conf" "${pkgdir}/etc/conf.d/hadoop"
install -Dpm755 "${srcdir}/hadoop.profile" "${pkgdir}/etc/profile.d/hadoop.sh"
install -Dpm644 "${srcdir}/"hadoop-*.service -t "${pkgdir}/usr/lib/systemd/system/"
# we do not use soft link because we need put configures in backup array,
# in order to preserve the conf when upgrade package.
cp -pr "${_hadoop_real_home}/etc/hadoop" "${pkgdir}/etc"
mv "${_hadoop_real_home}/etc" "${_hadoop_real_home}/orig_etc"
# todo: i need an own file :)
install -Dm755 <(cat << EOF
#!/bin/sh
# Automatically generated by ${pkgname}-${pkgver} PKGBUILD from Arch Linux AUR
# https://aur.archlinux.org/
for f in /etc/profile.d/*.sh; do
. "\${f}"
done
/usr/lib/hadoop/bin/hadoop "\$@"
EOF
) "${pkgdir}/usr/bin/hadoop"
cd "${_usr_lib}"
ln -s "${pkgname}-${pkgver}" "${pkgname}"
## Disable IPv6 (comment out to disable IPv6 support):
# sed -i 's|_OPTS="-D|_OPTS="-Djava.net.preferIPv4Stack=true -D|' hadoop-env.sh
if ! :; then
## fix native
if [ "${CARCH}" = 'i686' ]; then
rm -rf 'lib/native/Linux-amd64-64'
cd 'lib/native/Linux-i386-32'
sed -i -e "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/i386/server :" 'libhadoop.la'
fi
if [ "${CARCH}" = 'x86_64' ]; then
rm -rf 'lib/native/Linux-i386-32'
cd 'lib/native/Linux-amd64-64'
sed -i "s:dependency_libs=':dependency_libs='-L/opt/java/jre/lib/amd64/server :" 'libhadoop.la'
fi
## Create some links, so Hadoop's KFS jar could access KFS libraries properly
## (it is still fine if KFS is not installed)
msg 'Creating KFS links...'
for _lib in 'libkfsClient' 'libkfsCommon' 'libkfsEmulator' 'libkfsIO' 'libkfsMeta'; do
for _ext in 'a' 'so'; do
ln -sf "/usr/lib/${_lib}.${_ext}"
done
done
ln -sf '/usr/lib/libkfs_access.so'
fi
}
|