bump(main/llama-cpp): 0.0.0-b6181

This commit has been automatically submitted by Github Actions.
This commit is contained in:
termux-pacman-bot
2025-08-16 12:46:42 +00:00
parent e8740b14c2
commit e62dc18edd

View File

@@ -2,9 +2,9 @@ TERMUX_PKG_HOMEPAGE=https://github.com/ggml-org/llama.cpp
TERMUX_PKG_DESCRIPTION="LLM inference in C/C++"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER=@termux
TERMUX_PKG_VERSION="0.0.0-b6178"
TERMUX_PKG_VERSION="0.0.0-b6181"
TERMUX_PKG_SRCURL=https://github.com/ggml-org/llama.cpp/archive/refs/tags/${TERMUX_PKG_VERSION#*-}.tar.gz
TERMUX_PKG_SHA256=31aced37f72623abde50f396e6ed6a5a62573d7ae0b7a6b52bd9cbdb31f57a1e
TERMUX_PKG_SHA256=fcf74b8f39d93bd749f2961582f98f2c22acc8a6a927141d287bd538a36af5f9
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_DEPENDS="libc++, libcurl"
TERMUX_PKG_BUILD_DEPENDS="vulkan-headers, opencl-headers, ocl-icd"