commit:     523ac4b5f845d2a98bffa2122adede3d34564acd
Author:     Volkmar W. Pogatzki <gentoo <AT> pogatzki <DOT> net>
AuthorDate: Thu Nov 14 06:19:48 2024 +0000
Commit:     Miroslav Šulc <fordfrog <AT> gentoo <DOT> org>
CommitDate: Thu Nov 14 09:21:23 2024 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=523ac4b5

sys-cluster/spark-bin: drop 2.4.7-r1

Signed-off-by: Volkmar W. Pogatzki <gentoo <AT> pogatzki.net>
Closes: https://github.com/gentoo/gentoo/pull/39313
Signed-off-by: Miroslav Šulc <fordfrog <AT> gentoo.org>

 sys-cluster/spark-bin/Manifest                  |  2 -
 sys-cluster/spark-bin/metadata.xml              |  4 --
 sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild | 82 -------------------------
 3 files changed, 88 deletions(-)

diff --git a/sys-cluster/spark-bin/Manifest b/sys-cluster/spark-bin/Manifest
index 99f45c5ffc9b..85973495a43b 100644
--- a/sys-cluster/spark-bin/Manifest
+++ b/sys-cluster/spark-bin/Manifest
@@ -1,3 +1 @@
-DIST spark-bin-2.4.7-nohadoop-scala211.tgz 168360525 BLAKE2B 
9a8728a1c7450166d4d9635ff1be6a65d57df1832945672e92044e0fc88ef152e7e870d02ec590378ea97a47d4a68568da73e6a1e92f8eaaaa06f495ed9939ad
 SHA512 
ff75e6db3c4ca5cece2a8a26ad7bd8a2c1c46b4f93e8f2922a31e6a8d98274bedc66bef3e469e862d249c5439355ccca83687a20d536a8400f2c058be553df57
-DIST spark-bin-2.4.7-nohadoop-scala212.tgz 146775712 BLAKE2B 
f0381e6aebeae90990d5ad19d8db923efd01e9b215ddfe34e34a8272942e57fe7ec6a53208146006ba5c4e76f8e44d202e0715f63b559cc8749b523710b92a5f
 SHA512 
708a702af5c754dba0137c88e3c7055d7759593f013798755468638b6fdc583a707aae4c2a4e14648f91c16b963f9bba9f8c44d28341c08c4512590a425f70a8
 DIST spark-bin-3.1.2-hadoop.tgz 228834641 BLAKE2B 
7b0191a15414146a96394abc489940a5e572cea66749b6201f234878935c6591ff809f551dae7d6c7c996baefd551cd27c5c967e2f06184ffe2bf972f901f1ba
 SHA512 
2385cb772f21b014ce2abd6b8f5e815721580d6e8bc42a26d70bbcdda8d303d886a6f12b36d40f6971b5547b70fae62b5a96146f0421cb93d4e51491308ef5d5

diff --git a/sys-cluster/spark-bin/metadata.xml 
b/sys-cluster/spark-bin/metadata.xml
index 8d60017a41e7..95dc9a9082ca 100644
--- a/sys-cluster/spark-bin/metadata.xml
+++ b/sys-cluster/spark-bin/metadata.xml
@@ -13,8 +13,4 @@
     <email>[email protected]</email>
     <name>Proxy Maintainers</name>
   </maintainer>
-  <use>
-    <flag name="scala211">Install Spark JARs pre-built with Scala 2.11.</flag>
-    <flag name="scala212">Install Spark JARs pre-built with Scala 2.12.</flag>
-  </use>
 </pkgmetadata>

diff --git a/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild 
b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
deleted file mode 100644
index 363bc8a43f06..000000000000
--- a/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 1999-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-
-inherit java-pkg-2
-
-DESCRIPTION="Lightning-fast unified analytics engine"
-HOMEPAGE="https://spark.apache.org";
-SRC_URI="
-       !scala212? ( scala211? ( 
mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop.tgz -> 
${P}-nohadoop-scala211.tgz ) )
-       !scala211? ( scala212? ( 
mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop-scala-2.12.tgz 
-> ${P}-nohadoop-scala212.tgz ) )
-"
-
-REQUIRED_USE="^^ ( scala211 scala212 )"
-
-LICENSE="Apache-2.0"
-SLOT="2"
-KEYWORDS="~amd64"
-
-IUSE="scala211 scala212"
-
-RDEPEND="
-       >=virtual/jre-1.8"
-
-DEPEND="
-       >=virtual/jdk-1.8"
-
-DOCS=( LICENSE NOTICE README.md RELEASE )
-
-src_unpack() {
-       unpack ${A}
-       use scala211 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop"
-       use scala212 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop-scala-2.12"
-}
-
-# Nothing to compile here.
-src_compile() { :; }
-
-src_install() {
-       dodir usr/lib/spark-${SLOT}
-       into usr/lib/spark-${SLOT}
-
-       local SPARK_SCRIPTS=(
-               bin/beeline
-               bin/find-spark-home
-               bin/load-spark-env.sh
-               bin/pyspark
-               bin/spark-class
-               bin/spark-shell
-               bin/spark-sql
-               bin/spark-submit
-       )
-
-       local s
-       for s in "${SPARK_SCRIPTS[@]}"; do
-               dobin "${s}"
-       done
-
-       insinto usr/lib/spark-${SLOT}
-
-       local SPARK_DIRS=( conf jars python sbin yarn )
-
-       local d
-       for d in "${SPARK_DIRS[@]}"; do
-               doins -r "${d}"
-       done
-
-       einstalldocs
-}
-
-pkg_postinst() {
-       einfo
-       einfo "Spark is now slotted. You have installed Spark ${SLOT}."
-       einfo
-       einfo "Make sure to add /usr/lib/spark-${SLOT}/{bin,sbin} directories"
-       einfo "to your PATH in order to run Spark shell scripts:"
-       einfo
-       einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/bin"
-       einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/sbin"
-       einfo
-}

Reply via email to