pkgsrc-WIP-changes archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
wip/llama.cpp: Update to 0.0.2.4333
Module Name: pkgsrc-wip
Committed By: Ryo ONODERA <ryoon%NetBSD.org@localhost>
Pushed By: ryoon
Date: Mon Dec 16 22:50:54 2024 +0900
Changeset: 54044f01a799874debf0dbf95806ad0090691b16
Modified Files:
llama.cpp/Makefile
llama.cpp/PLIST
llama.cpp/distinfo
Log Message:
wip/llama.cpp: Update to 0.0.2.4333
* Use opemblas64_pthread for multithreaded generation.
openblas64_openmp does not work for me.
* Use new cmake options.
* Disable OpenCL settings to prevent segfault.
To see a diff of this commit:
https://wip.pkgsrc.org/cgi-bin/gitweb.cgi?p=pkgsrc-wip.git;a=commitdiff;h=54044f01a799874debf0dbf95806ad0090691b16
Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.
diffstat:
llama.cpp/Makefile | 16 ++++++++++++----
llama.cpp/PLIST | 10 +++++++++-
llama.cpp/distinfo | 6 +++---
3 files changed, 24 insertions(+), 8 deletions(-)
diffs:
diff --git a/llama.cpp/Makefile b/llama.cpp/Makefile
index 9832a2ea9a..3ad5e2d896 100644
--- a/llama.cpp/Makefile
+++ b/llama.cpp/Makefile
@@ -4,24 +4,32 @@ DISTNAME= llama.cpp-${GITHUB_TAG}
PKGNAME= ${DISTNAME:S/-b/-0.0.2./}
CATEGORIES= devel
MASTER_SITES= ${MASTER_SITE_GITHUB:=ggerganov/}
-GITHUB_TAG= b3878
+GITHUB_TAG= b4233
MAINTAINER= pkgsrc-users%NetBSD.org@localhost
HOMEPAGE= https://github.com/ggerganov/llama.cpp/
COMMENT= LLM inference in C/C++
LICENSE= mit
-USE_TOOLS+= pkg-config
-USE_LANGUAGES= c c++
+USE_TOOLS+= pkg-config
+USE_LANGUAGES= c c++
+USE_CXX_FEATURES= gnu++17
+BLAS_INDEX64= yes
+BLAS_ACCEPTED= openblas_pthread #openblas_openmp
BLAS_C_INTERFACE= yes
+CXXFLAGS+= ${BLAS_INCLUDES}
PKGCONFIG_OVERRIDE+= cmake/llama.pc.in
REPLACE_PYTHON+= *.py */*.py */*/*.py
-CMAKE_CONFIGURE_ARGS+= -DLLAMA_BLAS=1
+CMAKE_CONFIGURE_ARGS+= -DGGML_BLAS=ON
+CMAKE_CONFIGURE_ARGS+= -DGGML_BLAS_VENDOR=OpenBLAS
CMAKE_CONFIGURE_ARGS+= -DBLAS_LIBRARIES=${CBLAS_LIBS:Q}
CMAKE_CONFIGURE_ARGS+= -DLLAMA_BUILD_TESTS=no
+CMAKE_CONFIGURE_ARGS+= -DGGML_OPENCL_EMBED_KERNELS=OFF
+CMAKE_CONFIGURE_ARGS+= -DGGML_OPENCL_PROFILING=OFF
+CMAKE_CONFIGURE_ARGS+= -DGGML_OPENCL_USE_ADRENO_KERNELS=OFF
.include "../../devel/cmake/build.mk"
.include "../../lang/python/application.mk"
diff --git a/llama.cpp/PLIST b/llama.cpp/PLIST
index 23dbcc97c6..e84d43e02a 100644
--- a/llama.cpp/PLIST
+++ b/llama.cpp/PLIST
@@ -1,6 +1,5 @@
@comment $NetBSD$
bin/convert_hf_to_gguf.py
-bin/llama-baby-llama
bin/llama-batched
bin/llama-batched-bench
bin/llama-bench
@@ -30,25 +29,34 @@ bin/llama-perplexity
bin/llama-quantize
bin/llama-quantize-stats
bin/llama-retrieval
+bin/llama-run
bin/llama-save-load-state
bin/llama-server
bin/llama-simple
+bin/llama-simple-chat
bin/llama-speculative
+bin/llama-speculative-simple
bin/llama-tokenize
include/ggml-alloc.h
include/ggml-backend.h
include/ggml-blas.h
include/ggml-cann.h
+include/ggml-cpu.h
include/ggml-cuda.h
include/ggml-kompute.h
include/ggml-metal.h
+include/ggml-opt.h
include/ggml-rpc.h
include/ggml-sycl.h
include/ggml-vulkan.h
include/ggml.h
+include/llama-cpp.h
include/llama.h
lib/cmake/llama/llama-config.cmake
lib/cmake/llama/llama-version.cmake
+lib/libggml-base.so
+lib/libggml-blas.so
+lib/libggml-cpu.so
lib/libggml.so
lib/libllama.so
lib/libllava_shared.so
diff --git a/llama.cpp/distinfo b/llama.cpp/distinfo
index 1a3016ebc5..464f292ff6 100644
--- a/llama.cpp/distinfo
+++ b/llama.cpp/distinfo
@@ -1,5 +1,5 @@
$NetBSD$
-BLAKE2s (llama.cpp-b3878.tar.gz) = 545b05db754c41dbe95e9a8f6d8847cae26ea9522851bf812328bc40e8d2d750
-SHA512 (llama.cpp-b3878.tar.gz) = 6852917eee5b30e257713299dfc85049782182dd760e2076c290684c2cfffd3f54e35e31611b405fe9bde40493f75d6663edfbcace14c6437dd44dab6f8af0b1
-Size (llama.cpp-b3878.tar.gz) = 19153299 bytes
+BLAKE2s (llama.cpp-b4233.tar.gz) = 121e91a4415263b97c70d3e871e333bcef2484673d0c5e038549bdd9de57cc74
+SHA512 (llama.cpp-b4233.tar.gz) = cad9fe82c269a8fd300ce493bbd3150cbf2adcaca219e1309405b4f4b94cc6b592cb1e11048ff30f1311d0535ab0361d696dda7877005272fbcbe9b64cddd961
+Size (llama.cpp-b4233.tar.gz) = 19573237 bytes
Home |
Main Index |
Thread Index |
Old Index