🔖 Bump llama.cpp-vulkan version to b6351-1

This commit is contained in:
Orion 2025-09-02 02:53:41 +00:00
parent b85298ea20
commit 4fcf13954e
2 changed files with 7 additions and 10 deletions

View file

@ -1,6 +1,6 @@
pkgbase = llama.cpp-vulkan
pkgdesc = Port of Facebook's LLaMA model in C/C++ (with Vulkan GPU optimizations)
pkgver = b6341
pkgver = b6351
pkgrel = 1
url = https://github.com/ggerganov/llama.cpp
arch = x86_64

View file

@ -3,7 +3,7 @@
pkgname=llama.cpp-vulkan
_pkgname=${pkgname%%-vulkan}
pkgver=b6341
pkgver=b6351
pkgrel=1
pkgdesc="Port of Facebook's LLaMA model in C/C++ (with Vulkan GPU optimizations)"
arch=(x86_64 armv7h aarch64)
@ -37,17 +37,14 @@ sha256sums=()
prepare() {
cd "$srcdir"
git clone --depth 20 --single-branch --branch master "${url}" "${_pkgname}"
git clone --depth 1 --single-branch --branch master "${url}" "${_pkgname}"
}
pkgver() {
cd "$_pkgname"
local _latest_tag
_latest_tag=$(git describe --tags --abbrev=0)
# 格式化为:<最新标签>
printf "%s" "$_latest_tag"
# 使用 API 获取最新发布的标签
curl -s "https://api.github.com/repos/ggml-org/llama.cpp/releases/latest" | \
grep '"tag_name":' | \
sed -E 's/.*"([^"]+)".*/\1/'
}
build() {