summaryrefslogtreecommitdiff
path: root/apache-spark/PKGBUILD
diff options
context:
space:
mode:
authorErich Eckner <git@eckner.net>2017-09-20 12:20:41 +0200
committerErich Eckner <git@eckner.net>2017-09-20 12:20:41 +0200
commit046b5d8334ec0b16253079407c6b1c652eb8b925 (patch)
treecd0941f169b77bf10bb78affa6ca3188c02d22e3 /apache-spark/PKGBUILD
parentc0a1033ef093daa270ec480effee6b709b3e8ab9 (diff)
downloadarchlinuxewe-046b5d8334ec0b16253079407c6b1c652eb8b925.tar.xz
apache-spark neu
Diffstat (limited to 'apache-spark/PKGBUILD')
-rw-r--r--apache-spark/PKGBUILD76
1 files changed, 76 insertions, 0 deletions
diff --git a/apache-spark/PKGBUILD b/apache-spark/PKGBUILD
new file mode 100644
index 000000000..4999cd5a1
--- /dev/null
+++ b/apache-spark/PKGBUILD
@@ -0,0 +1,76 @@
+# Maintainer: Erich Eckner <arch at eckner dot net>
+# Contributor: François Garillot ("huitseeker") <francois [at] garillot.net>
+# Contributor: Christian Krause ("wookietreiber") <kizkizzbangbang@gmail.com>
+
+pkgname=apache-spark
+pkgver=2.2.0
+pkgrel=3
+pkgdesc="fast and general engine for large-scale data processing"
+arch=('any')
+url="http://spark.apache.org"
+license=('APACHE')
+depends=('java-environment>=6')
+optdepends=('python2: python2 support for pyspark'
+ 'ipython2: ipython2 support for pyspark'
+ 'python: python3 support for pyspark'
+ 'ipython: ipython3 support for pyspark'
+ 'r: support for sparkR'
+ 'rsync: support rsync hadoop binaries from master'
+ 'hadoop: support for running on YARN')
+install=apache-spark.install
+source=("http://d3kbcqa49mib13.cloudfront.net/spark-${pkgver}-bin-without-hadoop.tgz"
+ 'apache-spark-master.service'
+ 'apache-spark-slave@.service'
+ 'spark-env.sh'
+ 'spark-daemon-run.sh'
+ 'run-master.sh'
+ 'run-slave.sh')
+sha1sums=('15b9577049638fc1afe8d2843ac1ae9dec470962'
+ 'ac71d12070a9a10323e8ec5aed4346b1dd7f21c6'
+ 'a191e4f8f7f8bbc596f4fadfb3c592c3efbc4fc0'
+ 'e52d327571e84b9b350bc594131fcaf50a3dd0f4'
+ '08557d2d5328d5c99e533e16366fd893fffaad78'
+ '323445b8d64aea0534a2213d2600d438f406855b'
+ '65b1bc5fce63d1fa7a1b90f2d54a09acf62012a4')
+backup=('etc/apache-spark/spark-env.sh')
+
+PKGEXT=${PKGEXT:-'.pkg.tar.xz'}
+
+prepare() {
+ cd "$srcdir/spark-${pkgver}-bin-without-hadoop"
+}
+
+package() {
+ cd "$srcdir/spark-${pkgver}-bin-without-hadoop"
+
+ install -d "$pkgdir/usr/bin" "$pkgdir/opt" "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work"
+ chmod 2775 "$pkgdir/var/log/apache-spark" "$pkgdir/var/lib/apache-spark/work"
+
+ cp -r "$srcdir/spark-${pkgver}-bin-without-hadoop" "$pkgdir/opt/apache-spark/"
+
+ cd "$pkgdir/usr/bin"
+ for binary in beeline pyspark sparkR spark-class spark-shell spark-sql spark-submit load-spark-env.sh; do
+ binpath="/opt/apache-spark/bin/$binary"
+ ln -s "$binpath" $binary
+ sed -i 's|^export SPARK_HOME=.*$|export SPARK_HOME=/opt/apache-spark|' "$pkgdir/$binpath"
+ done
+
+ mkdir -p $pkgdir/etc/profile.d
+ echo '#!/bin/sh' > $pkgdir/etc/profile.d/apache-spark.sh
+ echo 'SPARK_HOME=/opt/apache-spark' >> $pkgdir/etc/profile.d/apache-spark.sh
+ echo 'export SPARK_HOME' >> $pkgdir/etc/profile.d/apache-spark.sh
+ chmod 755 $pkgdir/etc/profile.d/apache-spark.sh
+
+ install -Dm644 "$srcdir/apache-spark-master.service" "$pkgdir/usr/lib/systemd/system/apache-spark-master.service"
+ install -Dm644 "$srcdir/apache-spark-slave@.service" "$pkgdir/usr/lib/systemd/system/apache-spark-slave@.service"
+ install -Dm644 "$srcdir/spark-env.sh" "$pkgdir/etc/apache-spark/spark-env.sh"
+ for script in run-master.sh run-slave.sh spark-daemon-run.sh; do
+ install -Dm755 "$srcdir/$script" "$pkgdir/opt/apache-spark/sbin/$script"
+ done
+ install -Dm644 "$srcdir/spark-${pkgver}-bin-without-hadoop/conf"/* "$pkgdir/etc/apache-spark"
+
+ cd "$pkgdir/opt/apache-spark"
+ mv conf conf-templates
+ ln -sf "/etc/apache-spark" conf
+ ln -sf "/var/lib/apache-spark/work" .
+}