commit:     e01288b79918003e5e4a26936c4ce46aa746e5b3
Author:     Sergey Alirzaev <l29ah <AT> riseup <DOT> net>
AuthorDate: Thu Feb 20 14:57:54 2025 +0000
Commit:     Sergey Alirzaev <zl29ah <AT> gmail <DOT> com>
CommitDate: Thu Feb 20 14:57:54 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=e01288b7

sci-misc/llama-cpp: Add ROCm 6.3 HIP support

Closes: https://bugs.gentoo.org/949856
Signed-off-by: Sergey Alirzaev <l29ah <AT> riseup.net>

 sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild | 35 +++++++++++++++++++++++++--
 sci-misc/llama-cpp/llama-cpp-9999.ebuild      | 35 +++++++++++++++++++++++++--
 sci-misc/llama-cpp/metadata.xml               |  1 +
 3 files changed, 67 insertions(+), 4 deletions(-)

diff --git a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild 
b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
index 1e6b82c0d..b4db64b49 100644
--- a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
+++ b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
@@ -3,7 +3,9 @@
 
 EAPI=8
 
-inherit cmake
+ROCM_VERSION="6.3"
+
+inherit cmake rocm
 
 if [[ "${PV}" != "9999" ]]; then
        KEYWORDS="~amd64"
@@ -21,15 +23,37 @@ HOMEPAGE="https://github.com/ggerganov/llama.cpp";
 LICENSE="MIT"
 SLOT="0"
 CPU_FLAGS_X86=( avx avx2 f16c )
-IUSE="curl openblas blis"
+IUSE="curl openblas blis hip"
 REQUIRED_USE="?? ( openblas blis )"
 
+AMDGPU_TARGETS_COMPAT=(
+       gfx900
+       gfx90c
+       gfx902
+       gfx1010
+       gfx1011
+       gfx1012
+       gfx1030
+       gfx1031
+       gfx1032
+       gfx1034
+       gfx1035
+       gfx1036
+       gfx1100
+       gfx1101
+       gfx1102
+       gfx1103
+       gfx1150
+       gfx1151
+)
+
 # curl is needed for pulling models from huggingface
 # numpy is used by convert_hf_to_gguf.py
 DEPEND="
        curl? ( net-misc/curl:= )
        openblas? ( sci-libs/openblas:= )
        blis? ( sci-libs/blis:= )
+       hip? (  >=dev-util/hip-6.3:= )
 "
 RDEPEND="${DEPEND}
        dev-python/numpy
@@ -58,5 +82,12 @@ src_configure() {
                )
        fi
 
+       if use hip; then
+               rocm_use_hipcc
+               mycmakeargs+=(
+                       -DGGML_HIP=ON -DAMDGPU_TARGETS=$(get_amdgpu_flags)
+               )
+       fi
+
        cmake_src_configure
 }

diff --git a/sci-misc/llama-cpp/llama-cpp-9999.ebuild 
b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
index 1e6b82c0d..b4db64b49 100644
--- a/sci-misc/llama-cpp/llama-cpp-9999.ebuild
+++ b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
@@ -3,7 +3,9 @@
 
 EAPI=8
 
-inherit cmake
+ROCM_VERSION="6.3"
+
+inherit cmake rocm
 
 if [[ "${PV}" != "9999" ]]; then
        KEYWORDS="~amd64"
@@ -21,15 +23,37 @@ HOMEPAGE="https://github.com/ggerganov/llama.cpp";
 LICENSE="MIT"
 SLOT="0"
 CPU_FLAGS_X86=( avx avx2 f16c )
-IUSE="curl openblas blis"
+IUSE="curl openblas blis hip"
 REQUIRED_USE="?? ( openblas blis )"
 
+AMDGPU_TARGETS_COMPAT=(
+       gfx900
+       gfx90c
+       gfx902
+       gfx1010
+       gfx1011
+       gfx1012
+       gfx1030
+       gfx1031
+       gfx1032
+       gfx1034
+       gfx1035
+       gfx1036
+       gfx1100
+       gfx1101
+       gfx1102
+       gfx1103
+       gfx1150
+       gfx1151
+)
+
 # curl is needed for pulling models from huggingface
 # numpy is used by convert_hf_to_gguf.py
 DEPEND="
        curl? ( net-misc/curl:= )
        openblas? ( sci-libs/openblas:= )
        blis? ( sci-libs/blis:= )
+       hip? (  >=dev-util/hip-6.3:= )
 "
 RDEPEND="${DEPEND}
        dev-python/numpy
@@ -58,5 +82,12 @@ src_configure() {
                )
        fi
 
+       if use hip; then
+               rocm_use_hipcc
+               mycmakeargs+=(
+                       -DGGML_HIP=ON -DAMDGPU_TARGETS=$(get_amdgpu_flags)
+               )
+       fi
+
        cmake_src_configure
 }

diff --git a/sci-misc/llama-cpp/metadata.xml b/sci-misc/llama-cpp/metadata.xml
index c93fd3afb..70af1186d 100644
--- a/sci-misc/llama-cpp/metadata.xml
+++ b/sci-misc/llama-cpp/metadata.xml
@@ -6,6 +6,7 @@
        </upstream>
        <use>
                <flag name="blis">Build a BLIS backend</flag>
+               <flag name="hip">Build a HIP (ROCm) backend</flag>
                <flag name="openblas">Build an OpenBLAS backend</flag>
        </use>
        <maintainer type="person">

Reply via email to