From 31eddcc5eb0786ccb3932d3e419968c63c105849 Mon Sep 17 00:00:00 2001 From: lilac Date: Sun, 29 Dec 2024 21:27:33 +0800 Subject: [PATCH] llama.cpp-cuda: auto updated to b4397-1 --- archlinuxcn/llama.cpp-cuda/PKGBUILD | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/archlinuxcn/llama.cpp-cuda/PKGBUILD b/archlinuxcn/llama.cpp-cuda/PKGBUILD index c50ac84f14c..38868616e29 100644 --- a/archlinuxcn/llama.cpp-cuda/PKGBUILD +++ b/archlinuxcn/llama.cpp-cuda/PKGBUILD @@ -2,7 +2,7 @@ pkgname=llama.cpp-cuda _pkgname="${pkgname%-cuda}" -pkgver=b4394 +pkgver=b4397 pkgrel=1 pkgdesc="Port of Facebook's LLaMA model in C/C++ (with NVIDIA CUDA optimizations)" arch=(x86_64 armv7h aarch64) @@ -33,7 +33,7 @@ source=( llama.cpp.conf llama.cpp.service ) -sha256sums=('64d706b84ad9a8cb4529566f816d000f0ed66d81f98b7af7d31d8e5c7284335c' +sha256sums=('9ea90bbbabea218e5bdd23aed4a95340369fe636e6e647a77218f7800aad5b39' 'SKIP' '53fa70cfe40cb8a3ca432590e4f76561df0f129a31b121c9b4b34af0da7c4d87' '0377d08a07bda056785981d3352ccd2dbc0387c4836f91fb73e6b790d836620d')