mirror of
https://github.com/archlinux/aur.git
synced 2026-03-14 23:16:48 +01:00
upgpkg: llama.cpp-hip b3982-1
Signed-off-by: txtsd <code@ihavea.quest>
This commit is contained in:
commit
32f7e6ca56
6 changed files with 138 additions and 0 deletions
37
.SRCINFO
Normal file
37
.SRCINFO
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
pkgbase = llama.cpp-hip
|
||||
pkgdesc = Port of Facebook's LLaMA model in C/C++ (with AMD ROCm optimizations)
|
||||
pkgver = b3982
|
||||
pkgrel = 1
|
||||
url = https://github.com/ggerganov/llama.cpp
|
||||
arch = x86_64
|
||||
arch = armv7h
|
||||
arch = aarch64
|
||||
license = MIT
|
||||
makedepends = rocm-hip-runtime
|
||||
makedepends = rocm-hip-sdk
|
||||
makedepends = clblast
|
||||
makedepends = cmake
|
||||
makedepends = git
|
||||
depends = curl
|
||||
depends = gcc-libs
|
||||
depends = glibc
|
||||
depends = python
|
||||
depends = python-numpy
|
||||
depends = python-sentencepiece
|
||||
depends = hipblas
|
||||
depends = hip-runtime-amd
|
||||
depends = openmp
|
||||
depends = rocblas
|
||||
provides = llama.cpp
|
||||
conflicts = llama.cpp
|
||||
options = lto
|
||||
source = git+https://github.com/ggerganov/llama.cpp#tag=b3982
|
||||
source = git+https://github.com/nomic-ai/kompute.git
|
||||
source = llama.cpp.conf
|
||||
source = llama.cpp.service
|
||||
sha256sums = 24615514be685a63a7d5ce3d7823e27abc09f67fef97bd3c021d1342d1cd4942
|
||||
sha256sums = SKIP
|
||||
sha256sums = 53fa70cfe40cb8a3ca432590e4f76561df0f129a31b121c9b4b34af0da7c4d87
|
||||
sha256sums = 065f69ccd7ac40d189fae723b58d6de2a24966e9b526e0dbfa3035a4c46a7669
|
||||
|
||||
pkgname = llama.cpp-hip
|
||||
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
pkg/
|
||||
src/
|
||||
llama.cpp/
|
||||
kompute/
|
||||
|
||||
*.tar.*
|
||||
4
.nvchecker.toml
Normal file
4
.nvchecker.toml
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
["llama.cpp-hip"]
|
||||
source = "git"
|
||||
git = "https://github.com/ggerganov/llama.cpp.git"
|
||||
include_regex = "b\\d+"
|
||||
77
PKGBUILD
Normal file
77
PKGBUILD
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# Maintainer: txtsd <aur.archlinux@ihavea.quest>
|
||||
|
||||
pkgname=llama.cpp-hip
|
||||
_pkgname=${pkgname%%-hip}
|
||||
pkgver=b3982
|
||||
pkgrel=1
|
||||
pkgdesc="Port of Facebook's LLaMA model in C/C++ (with AMD ROCm optimizations)"
|
||||
arch=('x86_64' 'armv7h' 'aarch64')
|
||||
url='https://github.com/ggerganov/llama.cpp'
|
||||
license=('MIT')
|
||||
depends=(
|
||||
'curl'
|
||||
'gcc-libs'
|
||||
'glibc'
|
||||
'python'
|
||||
'python-numpy'
|
||||
'python-sentencepiece'
|
||||
'hipblas'
|
||||
'hip-runtime-amd'
|
||||
'openmp'
|
||||
'rocblas'
|
||||
)
|
||||
makedepends=(
|
||||
'rocm-hip-runtime'
|
||||
'rocm-hip-sdk'
|
||||
'clblast'
|
||||
'cmake'
|
||||
'git'
|
||||
)
|
||||
provides=(${_pkgname})
|
||||
conflicts=(${_pkgname})
|
||||
options=(lto)
|
||||
source=(
|
||||
"git+${url}#tag=${pkgver}"
|
||||
"git+https://github.com/nomic-ai/kompute.git"
|
||||
'llama.cpp.conf'
|
||||
'llama.cpp.service'
|
||||
)
|
||||
sha256sums=('24615514be685a63a7d5ce3d7823e27abc09f67fef97bd3c021d1342d1cd4942'
|
||||
'SKIP'
|
||||
'53fa70cfe40cb8a3ca432590e4f76561df0f129a31b121c9b4b34af0da7c4d87'
|
||||
'065f69ccd7ac40d189fae723b58d6de2a24966e9b526e0dbfa3035a4c46a7669')
|
||||
|
||||
prepare() {
|
||||
cd "${_pkgname}"
|
||||
|
||||
git submodule init
|
||||
git config submodule.kompute.url "${srcdir}/kompute"
|
||||
git -c protocol.file.allow=always submodule update
|
||||
}
|
||||
|
||||
build() {
|
||||
cd "${_pkgname}"
|
||||
export CC=/opt/rocm/llvm/bin/clang
|
||||
export CXX=/opt/rocm/llvm/bin/clang++
|
||||
cmake -S . -B build \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DBUILD_SHARED_LIBS=ON \
|
||||
-DGGML_ALL_WARNINGS_3RD_PARTY=ON \
|
||||
-DGGML_BLAS=ON \
|
||||
-DGGML_LTO=ON \
|
||||
-DGGML_RPC=ON \
|
||||
-DLLAMA_CURL=ON \
|
||||
-DLLAMA_FATAL_WARNINGS=ON \
|
||||
-DGGML_HIPBLAS=ON
|
||||
cmake --build build --config Release
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "${_pkgname}"
|
||||
|
||||
DESTDIR="${pkgdir}" cmake --install build
|
||||
install -Dm644 'LICENSE' "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||
|
||||
install -Dm644 "${srcdir}/llama.cpp.conf" "${pkgdir}/etc/conf.d/llama.cpp"
|
||||
install -Dm644 "${srcdir}/llama.cpp.service" "${pkgdir}/usr/lib/systemd/system/llama.cpp.service"
|
||||
}
|
||||
1
llama.cpp.conf
Normal file
1
llama.cpp.conf
Normal file
|
|
@ -0,0 +1 @@
|
|||
LLAMA_ARGS=""
|
||||
13
llama.cpp.service
Normal file
13
llama.cpp.service
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
[Unit]
|
||||
Description=llama.cpp Server
|
||||
After=syslog.target network.target local-fs.target remote-fs.target nss-lookup.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
EnvironmentFile=/etc/conf.d/llama.cpp
|
||||
ExecStart=/usr/bin/llama-server $LLAMA_ARGS
|
||||
ExecReload=/bin/kill -s HUP $MAINPID
|
||||
Restart=never
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
Loading…
Add table
Reference in a new issue