🔖 Bump llama.cpp-vulkan version to b6397-1

This commit is contained in:
Orion 2025-09-06 08:44:48 +00:00
parent c2a34fe826
commit c29317eddf
2 changed files with 22 additions and 6 deletions

View file

@ -1,6 +1,6 @@
pkgbase = llama.cpp-vulkan
pkgdesc = Port of Facebook's LLaMA model in C/C++ (with Vulkan GPU optimizations)
pkgver = b6396
pkgver = b6397
pkgrel = 1
url = https://github.com/ggerganov/llama.cpp
arch = x86_64
@ -28,7 +28,7 @@ pkgbase = llama.cpp-vulkan
conflicts = stable-diffusion.cpp
options = lto
options = !debug
source = llama.cpp-vulkan-b6396.tar.gz::https://github.com/ggml-org/llama.cpp/archive/refs/tags/b6396.tar.gz
sha256sums = 53df2e1479c9d6de5c52db2d970a04bc316a8b4c70b82639e48630f8161105c7
source = llama.cpp-vulkan-b6397.tar.gz::https://github.com/ggml-org/llama.cpp/archive/refs/tags/b6397.tar.gz
sha256sums = 0cd4f35881c57f0de35e3d42c07a792c2c406477b2ef79ed8e94d151e1fb1704
pkgname = llama.cpp-vulkan

View file

@ -3,7 +3,7 @@
pkgname=llama.cpp-vulkan
_pkgname=${pkgname%%-vulkan}
pkgver=b6396
pkgver=b6397
pkgrel=1
pkgdesc="Port of Facebook's LLaMA model in C/C++ (with Vulkan GPU optimizations)"
arch=(x86_64 armv7h aarch64)
@ -33,7 +33,7 @@ provides=(${_pkgname})
conflicts=(${_pkgname} libggml ggml stable-diffusion.cpp)
options=(lto !debug)
source=("${pkgname}-${pkgver}.tar.gz::https://github.com/ggml-org/llama.cpp/archive/refs/tags/${pkgver}.tar.gz")
sha256sums=('53df2e1479c9d6de5c52db2d970a04bc316a8b4c70b82639e48630f8161105c7')
sha256sums=('0cd4f35881c57f0de35e3d42c07a792c2c406477b2ef79ed8e94d151e1fb1704')
prepare() {
ln -sf "${_pkgname}-${pkgver}" llama.cpp
@ -57,9 +57,25 @@ build() {
-DGGML_RPC=ON
-DGGML_VULKAN=ON
-DGGML_CUDA_FA_ALL_QUANTS=ON
-DGGML_NATIVE=ON
-Wno-dev
)
# 检查是否在 CI 环境中构建
if [ -n "$CI" ] && [ "$CI" != 0 ]; then
msg2 "CI = $CI detected, building universal package"
# 启用通用构建
_cmake_options+=(
-DGGML_BACKEND_DL=ON
-DGGML_CPU_ALL_VARIANTS=ON
-DGGML_NATIVE=OFF
)
else
# 本地构建, 针对当前设备优化
_cmake_options+=(
-DGGML_NATIVE=ON
)
fi
cmake "${_cmake_options[@]}"
cmake --build build -- -j $(nproc)
}