⬆️ Bump version to b6055.r6055.gba42794c9-1

This commit is contained in:
Orion 2025-08-01 04:33:04 +00:00
parent c6c2b34bab
commit 83fa3a4276
4 changed files with 9 additions and 23 deletions

View file

@ -1,6 +1,6 @@
pkgbase = llama.cpp-hip
pkgdesc = Port of Facebook's LLaMA model in C/C++ (with AMD ROCm optimizations)
pkgver = b6052.r6052.gdaf2dd788
pkgver = b6055.r6055.gba42794c9
pkgrel = 1
url = https://github.com/ggerganov/llama.cpp
arch = x86_64
@ -33,7 +33,7 @@ pkgbase = llama.cpp-hip
source = llama.cpp.conf
source = llama.cpp.service
sha256sums = SKIP
sha256sums = 53fa70cfe40cb8a3ca432590e4f76561df0f129a31b121c9b4b34af0da7c4d87
sha256sums = 0377d08a07bda056785981d3352ccd2dbc0387c4836f91fb73e6b790d836620d
sha256sums = e4856f186f69cd5dbfcc4edec9f6b6bd08e923bceedd8622eeae1a2595beb2ec
sha256sums = c090beede58ec9b8eeac9dc1e54291994d1385bbe87ea109ac48a445ba688750
pkgname = llama.cpp-hip

View file

@ -3,7 +3,7 @@
pkgname=llama.cpp-hip
_pkgname="${pkgname%-hip}"
pkgver=b6052.r6052.gdaf2dd788
pkgver=b6055.r6055.gba42794c9
pkgrel=1
pkgdesc="Port of Facebook's LLaMA model in C/C++ (with AMD ROCm optimizations)"
arch=(x86_64 armv7h aarch64)
@ -39,11 +39,9 @@ source=(
llama.cpp.conf
llama.cpp.service
)
sha256sums=(
'SKIP'
'53fa70cfe40cb8a3ca432590e4f76561df0f129a31b121c9b4b34af0da7c4d87'
'0377d08a07bda056785981d3352ccd2dbc0387c4836f91fb73e6b790d836620d'
)
sha256sums=('SKIP'
'e4856f186f69cd5dbfcc4edec9f6b6bd08e923bceedd8622eeae1a2595beb2ec'
'c090beede58ec9b8eeac9dc1e54291994d1385bbe87ea109ac48a445ba688750')
pkgver() {
cd "$_pkgname"

View file

@ -1 +0,0 @@
LLAMA_ARGS=""

1
llama.cpp.conf Symbolic link
View file

@ -0,0 +1 @@
/__w/our/our/ourpkg/llama.cpp-hip/llama.cpp.conf

View file

@ -1,13 +0,0 @@
[Unit]
Description=llama.cpp Server
After=syslog.target network.target local-fs.target remote-fs.target nss-lookup.target
[Service]
Type=simple
EnvironmentFile=/etc/conf.d/llama.cpp
ExecStart=/usr/bin/llama-server $LLAMA_ARGS
ExecReload=/bin/kill -s HUP $MAINPID
Restart=on-failure
[Install]
WantedBy=multi-user.target

1
llama.cpp.service Symbolic link
View file

@ -0,0 +1 @@
/__w/our/our/ourpkg/llama.cpp-hip/llama.cpp.service