🔖 Bump ik-llama.cpp-cuda version to r3972.db3bed24-1

This commit is contained in:
Orion 2025-11-10 00:22:55 +00:00
parent c52b4f06a5
commit 9f74ca11e5
2 changed files with 2 additions and 2 deletions

View file

@ -1,6 +1,6 @@
pkgbase = ik-llama.cpp-cuda
pkgdesc = llama.cpp fork with additional SOTA quants and improved performance (CUDA Backend)
pkgver = r3960.665434e5
pkgver = r3972.db3bed24
pkgrel = 1
url = https://github.com/ikawrakow/ik_llama.cpp
arch = x86_64

View file

@ -2,7 +2,7 @@
pkgname=ik-llama.cpp-cuda
_pkgname=ik_llama.cpp
pkgver=r3960.665434e5
pkgver=r3972.db3bed24
pkgrel=1
pkgdesc="llama.cpp fork with additional SOTA quants and improved performance (CUDA Backend)"
arch=(x86_64 armv7h aarch64)