🔖 Bump ik-llama.cpp-vulkan version to r4068.756c3f8f-1

This commit is contained in:
Orion 2025-12-16 18:30:08 +00:00
parent 86eb9a0376
commit bcdd5dbebe
2 changed files with 18 additions and 4 deletions

View file

@ -1,6 +1,6 @@
pkgbase = ik-llama.cpp-vulkan
pkgdesc = llama.cpp fork with additional SOTA quants and improved performance (Vulkan Backend)
pkgver = r4066.090f354d
pkgver = r4068.756c3f8f
pkgrel = 1
url = https://github.com/ikawrakow/ik_llama.cpp
arch = x86_64
@ -32,5 +32,9 @@ pkgbase = ik-llama.cpp-vulkan
conflicts = ik-llama.cpp-cuda
options = lto
options = !debug
source = https://raw.githubusercontent.com/Orion-zhen/aur-packages/refs/heads/main/assets/llama.cpp/llama.cpp.service
source = https://raw.githubusercontent.com/Orion-zhen/aur-packages/refs/heads/main/assets/llama.cpp/llama.cpp.conf
sha256sums = 0377d08a07bda056785981d3352ccd2dbc0387c4836f91fb73e6b790d836620d
sha256sums = e4856f186f69cd5dbfcc4edec9f6b6bd08e923bceedd8622eeae1a2595beb2ec
pkgname = ik-llama.cpp-vulkan

View file

@ -2,7 +2,7 @@
pkgname=ik-llama.cpp-vulkan
_pkgname=ik_llama.cpp
pkgver=r4066.090f354d
pkgver=r4068.756c3f8f
pkgrel=1
pkgdesc="llama.cpp fork with additional SOTA quants and improved performance (Vulkan Backend)"
arch=(x86_64 armv7h aarch64)
@ -42,8 +42,12 @@ provides=(llama.cpp)
options=(lto !debug)
source=()
sha256sums=()
source=(
"https://raw.githubusercontent.com/Orion-zhen/aur-packages/refs/heads/main/assets/llama.cpp/llama.cpp.service"
"https://raw.githubusercontent.com/Orion-zhen/aur-packages/refs/heads/main/assets/llama.cpp/llama.cpp.conf"
)
sha256sums=('0377d08a07bda056785981d3352ccd2dbc0387c4836f91fb73e6b790d836620d'
'e4856f186f69cd5dbfcc4edec9f6b6bd08e923bceedd8622eeae1a2595beb2ec')
prepare() {
cd "$srcdir"
@ -96,4 +100,10 @@ build() {
package() {
DESTDIR="${pkgdir}" cmake --install build
install -Dm644 "llama.cpp.conf" "${pkgdir}/etc/conf.d/llama.cpp"
install -Dm644 "llama.cpp.service" "${pkgdir}/usr/lib/systemd/system/llama.cpp.service"
msg2 "llama.cpp.service is now available"
msg2 "llama-server arguments are in /etc/conf.d/llama.cpp"
}