commit: d0c364b54e88328176248fbc2b4a56dfe0660873
Author: Patrice Clement <monsieurp <AT> gentoo <DOT> org>
AuthorDate: Thu Nov 26 15:17:21 2020 +0000
Commit: Patrice Clement <monsieurp <AT> gentoo <DOT> org>
CommitDate: Thu Nov 26 15:55:11 2020 +0000
URL: https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=d0c364b5
sys-cluster/spark-bin: conditionally install pre-compiled JARs.
Spark 2.4.x JARs comes in two flavours:
* Pre-compiled with Scala 2.11.
* Pre-compiled with Scala 2.12.
Add conditions to install either one of these flavours. Also do not
install Spark + Hadoop JARs anymore but only Spark JARs.
Package-Manager: Portage-3.0.9, Repoman-3.0.2
Signed-off-by: Patrice Clement <monsieurp <AT> gentoo.org>
sys-cluster/spark-bin/Manifest | 2 +
sys-cluster/spark-bin/metadata.xml | 4 ++
sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild | 84 +++++++++++++++++++++++++
3 files changed, 90 insertions(+)
diff --git a/sys-cluster/spark-bin/Manifest b/sys-cluster/spark-bin/Manifest
index 399e58c5e48..869880ea696 100644
--- a/sys-cluster/spark-bin/Manifest
+++ b/sys-cluster/spark-bin/Manifest
@@ -1,2 +1,4 @@
+DIST spark-bin-2.4.7-scala211.tgz 168360525 BLAKE2B
9a8728a1c7450166d4d9635ff1be6a65d57df1832945672e92044e0fc88ef152e7e870d02ec590378ea97a47d4a68568da73e6a1e92f8eaaaa06f495ed9939ad
SHA512
ff75e6db3c4ca5cece2a8a26ad7bd8a2c1c46b4f93e8f2922a31e6a8d98274bedc66bef3e469e862d249c5439355ccca83687a20d536a8400f2c058be553df57
+DIST spark-bin-2.4.7-scala212.tgz 146775712 BLAKE2B
f0381e6aebeae90990d5ad19d8db923efd01e9b215ddfe34e34a8272942e57fe7ec6a53208146006ba5c4e76f8e44d202e0715f63b559cc8749b523710b92a5f
SHA512
708a702af5c754dba0137c88e3c7055d7759593f013798755468638b6fdc583a707aae4c2a4e14648f91c16b963f9bba9f8c44d28341c08c4512590a425f70a8
DIST spark-bin-2.4.7.tgz 233333392 BLAKE2B
5698cda600858ba36dbaa891949dd6e12f98877aa49b39f136733a0de9536975e373539080ed3992e4c3942c314c31919bf0bc27c6109e466cbc41b3574d0c63
SHA512
0f5455672045f6110b030ce343c049855b7ba86c0ecb5e39a075ff9d093c7f648da55ded12e72ffe65d84c32dcd5418a6d764f2d6295a3f894a4286cc80ef478
DIST spark-bin-3.0.1.tgz 219929956 BLAKE2B
dd53df1e0123feae5b69f97193d1edc647bd4802ab78c54e471ed54b3ae6eee32ace51e379569f4947599d24aba5af63b401c11ef2af6234ffc6ac1afbbe275f
SHA512
f4a10baec5b8ff1841f10651cac2c4aa39c162d3029ca180a9749149e6060805b5b5ddf9287b4aa321434810172f8cc0534943ac005531bb48b6622fbe228ddc
diff --git a/sys-cluster/spark-bin/metadata.xml
b/sys-cluster/spark-bin/metadata.xml
index 687380a6ed2..5408c183b06 100644
--- a/sys-cluster/spark-bin/metadata.xml
+++ b/sys-cluster/spark-bin/metadata.xml
@@ -13,4 +13,8 @@
<email>[email protected]</email>
<name>Proxy Maintainers</name>
</maintainer>
+ <use>
+ <flag name="scala211">Install Spark JARs pre-built with Scala 2.11.</flag>
+ <flag name="scala212">Install Spark JARs pre-built with Scala 2.12.</flag>
+ </use>
</pkgmetadata>
diff --git a/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
new file mode 100644
index 00000000000..3016c0cf7bd
--- /dev/null
+++ b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
@@ -0,0 +1,84 @@
+# Copyright 1999-2020 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=7
+
+inherit java-pkg-2
+
+DESCRIPTION="Lightning-fast unified analytics engine"
+HOMEPAGE="https://spark.apache.org"
+SRC_URI="
+ !scala212? ( scala211? (
mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop.tgz ->
${P}-scala211.tgz ) )
+ !scala211? ( scala212? (
mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop-scala-2.12.tgz
-> ${P}-scala212.tgz ) )
+"
+
+REQUIRED_USE="^^ ( scala211 scala212 )"
+
+LICENSE="Apache-2.0"
+SLOT="2"
+KEYWORDS="~amd64"
+
+IUSE="+scala211 scala212"
+
+RDEPEND="
+ >=virtual/jre-1.8"
+
+DEPEND="
+ >=virtual/jdk-1.8"
+
+DOCS=( LICENSE NOTICE README.md RELEASE )
+
+src_unpack() {
+ unpack ${A}
+ use scala211 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop"
+ use scala212 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop-scala-2.12"
+}
+
+# Nothing to compile here.
+src_compile() { :; }
+
+src_install() {
+ dodir usr/lib/spark-${SLOT}
+ into usr/lib/spark-${SLOT}
+
+ local SPARK_SCRIPTS=(
+ bin/beeline
+ bin/load-spark-env.sh
+ bin/pyspark
+ bin/spark-class
+ bin/spark-shell
+ bin/spark-sql
+ bin/spark-submit
+ )
+
+ local s
+ for s in "${SPARK_SCRIPTS[@]}"; do
+ ebegin "Setting SPARK_HOME to /usr/lib/spark-${SLOT} in
$(basename ${s}) script ..."
+ sed -i -e "2iSPARK_HOME=/usr/lib/spark-${SLOT}" "${s}"
+ eend $?
+ dobin "${s}"
+ done
+
+ insinto usr/lib/spark-${SLOT}
+
+ local SPARK_DIRS=( conf jars python sbin yarn )
+
+ local d
+ for d in "${SPARK_DIRS[@]}"; do
+ doins -r "${d}"
+ done
+
+ einstalldocs
+}
+
+pkg_postinst() {
+ einfo
+ einfo "Spark is now slotted. You have installed Spark ${SLOT}."
+ einfo
+ einfo "Make sure to add /usr/lib/spark-${SLOT}/{bin,sbin} directories"
+ einfo "to your PATH in order to run Spark shell scripts:"
+ einfo
+ einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/bin"
+ einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/sbin"
+ einfo
+}