bump(main/llama-cpp): 0.0.0-b6475

This commit has been automatically submitted by Github Actions.
This commit is contained in:
termux-pacman-bot
2025-09-15 00:49:50 +00:00
parent 13922e4d84
commit 14dbdfd70d

View File

@@ -2,9 +2,9 @@ TERMUX_PKG_HOMEPAGE=https://github.com/ggml-org/llama.cpp
TERMUX_PKG_DESCRIPTION="LLM inference in C/C++"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER=@termux
TERMUX_PKG_VERSION="0.0.0-b6471"
TERMUX_PKG_VERSION="0.0.0-b6475"
TERMUX_PKG_SRCURL=https://github.com/ggml-org/llama.cpp/archive/refs/tags/${TERMUX_PKG_VERSION#*-}.tar.gz
TERMUX_PKG_SHA256=5e6caafb8e2275821a18206de0797d298a1c15ae9b62bbac4b4215327bebc332
TERMUX_PKG_SHA256=0a25df20c0f79d6b95fb3c02eb6a49c15045c8e9aa4511b1a546e7c0d8a08002
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_DEPENDS="libc++, libcurl"
TERMUX_PKG_BUILD_DEPENDS="vulkan-headers, opencl-headers, ocl-icd"