=>> Building misc/py-llama-cpp-python build started at Sun May 4 07:37:59 BST 2025 port directory: /usr/ports/misc/py-llama-cpp-python package name: py311-llama-cpp-python-0.3.2 building for: FreeBSD pkg-builder.dan.net.uk 14.2-RELEASE FreeBSD 14.2-RELEASE amd64 maintained by: yuri@FreeBSD.org Makefile datestamp: -rw-r--r-- 1 root wheel 1736 Dec 29 14:16 /usr/ports/misc/py-llama-cpp-python/Makefile Ports top last git commit: 5529c5919b Ports top unclean checkout: yes Port dir last git commit: 1a35b19e6d Port dir unclean checkout: no Poudriere version: poudriere-git-3.4.2 Host OSVERSION: 1402000 Jail OSVERSION: 1402000 Job Id: 07 ---Begin Environment--- SHELL=/bin/sh OSVERSION=1402000 UNAME_v=FreeBSD 14.2-RELEASE UNAME_r=14.2-RELEASE BLOCKSIZE=K MAIL=/var/mail/root MM_CHARSET=UTF-8 LANG=C.UTF-8 STATUS=1 HOME=/root PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin MAKE_OBJDIR_CHECK_WRITABLE=0 LOCALBASE=/usr/local USER=root POUDRIERE_NAME=poudriere-git LIBEXECPREFIX=/usr/local/libexec/poudriere POUDRIERE_VERSION=3.4.2 MASTERMNT=/usr/local/poudriere/data/.m/14-amd64-default-dan/ref LC_COLLATE=C POUDRIERE_BUILD_TYPE=bulk PACKAGE_BUILDING=yes SAVED_TERM=screen OUTPUT_REDIRECTED_STDERR=4 OUTPUT_REDIRECTED=1 PWD=/usr/local/poudriere/data/.m/14-amd64-default-dan/07/.p OUTPUT_REDIRECTED_STDOUT=3 P_PORTS_FEATURES=FLAVORS SUBPACKAGES SELECTED_OPTIONS MASTERNAME=14-amd64-default-dan SCRIPTPREFIX=/usr/local/share/poudriere SCRIPTNAME=bulk.sh OLDPWD=/usr/local/poudriere/data/.m/14-amd64-default-dan/ref/.p/pool POUDRIERE_PKGNAME=poudriere-git-3.4.2 SCRIPTPATH=/usr/local/share/poudriere/bulk.sh POUDRIEREPATH=/usr/local/bin/poudriere ---End Environment--- ---Begin Poudriere Port Flags/Env--- PORT_FLAGS= PKGENV= FLAVOR=py311 MAKE_ARGS= FLAVOR=py311 ---End Poudriere Port Flags/Env--- ---Begin OPTIONS List--- ---End OPTIONS List--- --MAINTAINER-- yuri@FreeBSD.org --End MAINTAINER-- --CONFIGURE_ARGS-- --End CONFIGURE_ARGS-- --CONFIGURE_ENV-- PYTHON="/usr/local/bin/python3.11" XDG_DATA_HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311 XDG_CONFIG_HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311 XDG_CACHE_HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/.cache HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311 TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin PKG_CONFIG_LIBDIR=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/.pkgconfig:/usr/local/libdata/pkgconfig:/usr/local/share/pkgconfig:/usr/libdata/pkgconfig SHELL=/bin/sh CONFIG_SHELL=/bin/sh --End CONFIGURE_ENV-- --MAKE_ENV-- XDG_DATA_HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311 XDG_CONFIG_HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311 XDG_CACHE_HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/.cache HOME=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311 TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin PKG_CONFIG_LIBDIR=/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/.pkgconfig:/usr/local/libdata/pkgconfig:/usr/local/share/pkgconfig:/usr/libdata/pkgconfig MK_DEBUG_FILES=no MK_KERNEL_SYMBOLS=no SHELL=/bin/sh NO_LINT=YES PREFIX=/usr/local LOCALBASE=/usr/local CC="cc" CFLAGS="-O2 -pipe -fstack-protector-strong -fno-strict-aliasing " CPP="cpp" CPPFLAGS="" LDFLAGS=" -fstack-protector-strong " LIBS="" CXX="c++" CXXFLAGS="-O2 -pipe -fstack-protector-strong -fno-strict-aliasing " BSD_INSTALL_PROGRAM="install -s -m 555" BSD_INSTALL_LIB="install -s -m 0644" BSD_INSTALL_SCRIPT="install -m 555" BSD_INSTALL_DATA="install -m 0644" BSD_INSTALL_MAN="install -m 444" --End MAKE_ENV-- --PLIST_SUB-- PYTHON_INCLUDEDIR=include/python3.11 PYTHON_LIBDIR=lib/python3.11 PYTHON_PLATFORM=freebsd14 PYTHON_SITELIBDIR=lib/python3.11/site-packages PYTHON_SUFFIX=311 PYTHON_EXT_SUFFIX=.cpython-311 PYTHON_VER=3.11 PYTHON_VERSION=python3.11 PYTHON2="@comment " PYTHON3="" OSREL=14.2 PREFIX=%D LOCALBASE=/usr/local RESETPREFIX=/usr/local LIB32DIR=lib DOCSDIR="share/doc/llama-cpp-python" EXAMPLESDIR="share/examples/llama-cpp-python" DATADIR="share/llama-cpp-python" WWWDIR="www/llama-cpp-python" ETCDIR="etc/llama-cpp-python" --End PLIST_SUB-- --SUB_LIST-- PYTHON_INCLUDEDIR=/usr/local/include/python3.11 PYTHON_LIBDIR=/usr/local/lib/python3.11 PYTHON_PLATFORM=freebsd14 PYTHON_SITELIBDIR=/usr/local/lib/python3.11/site-packages PYTHON_SUFFIX=311 PYTHON_EXT_SUFFIX=.cpython-311 PYTHON_VER=3.11 PYTHON_VERSION=python3.11 PYTHON2="@comment " PYTHON3="" PREFIX=/usr/local LOCALBASE=/usr/local DATADIR=/usr/local/share/llama-cpp-python DOCSDIR=/usr/local/share/doc/llama-cpp-python EXAMPLESDIR=/usr/local/share/examples/llama-cpp-python WWWDIR=/usr/local/www/llama-cpp-python ETCDIR=/usr/local/etc/llama-cpp-python --End SUB_LIST-- ---Begin make.conf--- USE_PACKAGE_DEPENDS=yes BATCH=yes WRKDIRPREFIX=/wrkdirs PORTSDIR=/usr/ports PACKAGES=/packages DISTDIR=/distfiles FORCE_PACKAGE=yes PACKAGE_BUILDING=yes PACKAGE_BUILDING_FLAVORS=yes #### #### CCACHE_CPP2=1 WITH_SSP_PORTS=yes WITH_SSP=yes #WITH_LTO=yes DISABLE_LICENSES=yes LICENSES_ACCEPTED=AGPLv3 APACHE10 APACHE11 APACHE20 ART10 ARTPERL10 ART20 BSD BSD2CLAUSE BSD3CLAUSE BSD4CLAUSE BSL CC0-1.0 CDDL ClArtistic EPL GFDL GMGPL GPLv1 GPLv2 GPLv3 GPLv3RLE ISCL LGPL20 LGPL21 LGPL3 LPPL10 LPPL11 LPPL12 LPPL13 LPPL13a LPPL13b LPPL13c MIT MPL OpenSSL OFL10 OFL11 OWL PostgreSQL PHP202 PHP30 PHP301 PSFL RUBY ZLIB ZPL21 SVM-Light EULA ALASIR Microsoft-exFAT SIMIAN UDEVGAME unknown MTA COMMERCIAL teamspeak NO_LICENSES_DIALOGS=yes #### #### NO_IGNORE=yes DEFAULT_VERSIONS+=ssl=openssl apache=2.4 imagemagick=7 java=21 linux=c7 mysql=8.0 php=8.3 samba=4.19 varnish=7 WITH_SETID_MODE=force PHP_ZTS=enabled OPTIONS_UNSET+=OPENJPEG OPTIONS_UNSET+=GSSAPI_BASE OPTIONS_SET+=ZTS OPTIONS_SET+=GSSAPI_NONE ALLOW_UNSUPPORTED_SYSTEM=yes WITH_CCACHE_BUILD=yes CCACHE_DIR=/root/.ccache #### Misc Poudriere #### .include "/etc/make.conf.ports_env" GID=0 UID=0 DISABLE_MAKE_JOBS=poudriere ---End make.conf--- --Resource limits-- cpu time (seconds, -t) unlimited file size (512-blocks, -f) unlimited data seg size (kbytes, -d) 33554432 stack size (kbytes, -s) 524288 core file size (512-blocks, -c) unlimited max memory size (kbytes, -m) unlimited locked memory (kbytes, -l) unlimited max user processes (-u) 89999 open files (-n) 8192 virtual mem size (kbytes, -v) unlimited swap limit (kbytes, -w) unlimited socket buffer size (bytes, -b) unlimited pseudo-terminals (-p) unlimited kqueues (-k) unlimited umtx shared locks (-o) unlimited pipebuf (-y) unlimited --End resource limits-- =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 ===> py311-llama-cpp-python-0.3.2 depends on file: /usr/local/sbin/pkg - not found ===> Installing existing package /packages/All/pkg-2.1.2.pkg [pkg-builder.dan.net.uk] Installing pkg-2.1.2... [pkg-builder.dan.net.uk] Extracting pkg-2.1.2: .......... done ===> py311-llama-cpp-python-0.3.2 depends on file: /usr/local/sbin/pkg - found ===> Returning to build of py311-llama-cpp-python-0.3.2 =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Fetching all distfiles required by py311-llama-cpp-python-0.3.2 for building =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Fetching all distfiles required by py311-llama-cpp-python-0.3.2 for building => SHA256 Checksum OK for abetlen-llama-cpp-python-v0.3.2_GH0.tar.gz. => SHA256 Checksum OK for ggerganov-llama.cpp-74d73dc_GH0.tar.gz. =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Fetching all distfiles required by py311-llama-cpp-python-0.3.2 for building ===> Extracting for py311-llama-cpp-python-0.3.2 => SHA256 Checksum OK for abetlen-llama-cpp-python-v0.3.2_GH0.tar.gz. => SHA256 Checksum OK for ggerganov-llama.cpp-74d73dc_GH0.tar.gz. =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Patching for py311-llama-cpp-python-0.3.2 =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-scikit-build-core>0 - not found ===> Installing existing package /packages/All/py311-scikit-build-core-0.11.1.pkg [pkg-builder.dan.net.uk] Installing py311-scikit-build-core-0.11.1... [pkg-builder.dan.net.uk] `-- Installing py311-packaging-24.2... [pkg-builder.dan.net.uk] | `-- Installing python311-3.11.12... [pkg-builder.dan.net.uk] | | `-- Installing gettext-runtime-0.23.1... [pkg-builder.dan.net.uk] | | `-- Installing indexinfo-0.3.1_1... [pkg-builder.dan.net.uk] | | `-- Extracting indexinfo-0.3.1_1: . done [pkg-builder.dan.net.uk] | | `-- Extracting gettext-runtime-0.23.1: .......... done [pkg-builder.dan.net.uk] | | `-- Installing libffi-3.4.8... [pkg-builder.dan.net.uk] | | `-- Extracting libffi-3.4.8: .......... done [pkg-builder.dan.net.uk] | | `-- Installing mpdecimal-4.0.0... [pkg-builder.dan.net.uk] | | `-- Extracting mpdecimal-4.0.0: .......... done [pkg-builder.dan.net.uk] | | `-- Installing openssl-3.0.16,1... [pkg-builder.dan.net.uk] | | `-- Extracting openssl-3.0.16,1: .......... done [pkg-builder.dan.net.uk] | | `-- Installing readline-8.2.13_2... [pkg-builder.dan.net.uk] | | `-- Extracting readline-8.2.13_2: .......... done [pkg-builder.dan.net.uk] | `-- Extracting python311-3.11.12: .......... done [pkg-builder.dan.net.uk] `-- Extracting py311-packaging-24.2: .......... done [pkg-builder.dan.net.uk] `-- Installing py311-pathspec-0.12.1... [pkg-builder.dan.net.uk] `-- Extracting py311-pathspec-0.12.1: .......... done [pkg-builder.dan.net.uk] Extracting py311-scikit-build-core-0.11.1: .......... done ===== Message from python311-3.11.12: -- Note that some standard Python modules are provided as separate ports as they require additional dependencies. They are available as: py311-gdbm databases/py-gdbm@py311 py311-sqlite3 databases/py-sqlite3@py311 py311-tkinter x11-toolkits/py-tkinter@py311 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-scikit-build-core>0 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on executable: cmake - not found ===> Installing existing package /packages/All/cmake-core-3.31.6.pkg [pkg-builder.dan.net.uk] Installing cmake-core-3.31.6... [pkg-builder.dan.net.uk] `-- Installing expat-2.7.1... [pkg-builder.dan.net.uk] `-- Extracting expat-2.7.1: .......... done [pkg-builder.dan.net.uk] `-- Installing jsoncpp-1.9.6_1... [pkg-builder.dan.net.uk] `-- Extracting jsoncpp-1.9.6_1: .......... done [pkg-builder.dan.net.uk] `-- Installing libidn2-2.3.8... [pkg-builder.dan.net.uk] | `-- Installing libunistring-1.3... [pkg-builder.dan.net.uk] | `-- Extracting libunistring-1.3: .......... done [pkg-builder.dan.net.uk] `-- Extracting libidn2-2.3.8: .......... done [pkg-builder.dan.net.uk] `-- Installing libuv-1.51.0... [pkg-builder.dan.net.uk] `-- Extracting libuv-1.51.0: .......... done [pkg-builder.dan.net.uk] `-- Installing rhash-1.4.4_1... [pkg-builder.dan.net.uk] `-- Extracting rhash-1.4.4_1: .......... done [pkg-builder.dan.net.uk] Extracting cmake-core-3.31.6: .......... done ===> py311-llama-cpp-python-0.3.2 depends on executable: cmake - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on file: /usr/local/bin/python3.11 - found ===> py311-llama-cpp-python-0.3.2 depends on package: py311-build>=0 - not found ===> Installing existing package /packages/All/py311-build-1.2.2_2.pkg [pkg-builder.dan.net.uk] Installing py311-build-1.2.2_2... [pkg-builder.dan.net.uk] `-- Installing py311-pyproject-hooks-1.2.0... [pkg-builder.dan.net.uk] `-- Extracting py311-pyproject-hooks-1.2.0: .......... done [pkg-builder.dan.net.uk] Extracting py311-build-1.2.2_2: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-build>=0 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-installer>=0 - not found ===> Installing existing package /packages/All/py311-installer-0.7.0.pkg [pkg-builder.dan.net.uk] Installing py311-installer-0.7.0... [pkg-builder.dan.net.uk] Extracting py311-installer-0.7.0: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-installer>=0 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on file: /usr/local/bin/ccache - not found ===> Installing existing package /packages/All/ccache-3.7.12_8.pkg [pkg-builder.dan.net.uk] Installing ccache-3.7.12_8... [pkg-builder.dan.net.uk] Extracting ccache-3.7.12_8: ......... done Create compiler links... create symlink for cc create symlink for cc (world) create symlink for c++ create symlink for c++ (world) create symlink for CC create symlink for CC (world) create symlink for clang create symlink for clang (world) create symlink for clang++ create symlink for clang++ (world) ===== Message from ccache-3.7.12_8: -- NOTE: Please read /usr/local/share/doc/ccache/ccache-howto-freebsd.txt for information on using ccache with FreeBSD ports and src. ===> py311-llama-cpp-python-0.3.2 depends on file: /usr/local/bin/ccache - found ===> Returning to build of py311-llama-cpp-python-0.3.2 =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 ===> py311-llama-cpp-python-0.3.2 depends on shared library: libvulkan.so - not found ===> Installing existing package /packages/All/vulkan-loader-1.4.313.pkg [pkg-builder.dan.net.uk] Installing vulkan-loader-1.4.313... [pkg-builder.dan.net.uk] `-- Installing libX11-1.8.12,1... [pkg-builder.dan.net.uk] | `-- Installing libxcb-1.17.0... [pkg-builder.dan.net.uk] | | `-- Installing libXau-1.0.11... [pkg-builder.dan.net.uk] | | `-- Extracting libXau-1.0.11: .......... done [pkg-builder.dan.net.uk] | | `-- Installing libXdmcp-1.1.5... [pkg-builder.dan.net.uk] | | `-- Installing xorgproto-2024.1... [pkg-builder.dan.net.uk] | | `-- Extracting xorgproto-2024.1: .......... done [pkg-builder.dan.net.uk] | | `-- Extracting libXdmcp-1.1.5: ...... done [pkg-builder.dan.net.uk] | `-- Extracting libxcb-1.17.0: .......... done [pkg-builder.dan.net.uk] `-- Extracting libX11-1.8.12,1: .......... done [pkg-builder.dan.net.uk] `-- Installing libXrandr-1.5.4... [pkg-builder.dan.net.uk] | `-- Installing libXext-1.3.6,1... [pkg-builder.dan.net.uk] | `-- Extracting libXext-1.3.6,1: .......... done [pkg-builder.dan.net.uk] | `-- Installing libXrender-0.9.12... [pkg-builder.dan.net.uk] | `-- Extracting libXrender-0.9.12: ....... done [pkg-builder.dan.net.uk] `-- Extracting libXrandr-1.5.4: .......... done [pkg-builder.dan.net.uk] `-- Installing wayland-1.23.1... [pkg-builder.dan.net.uk] | `-- Installing libepoll-shim-0.0.20240608... [pkg-builder.dan.net.uk] | `-- Extracting libepoll-shim-0.0.20240608: .......... done [pkg-builder.dan.net.uk] | `-- Installing libxml2-2.11.9... [pkg-builder.dan.net.uk] | `-- Extracting libxml2-2.11.9: .......... done [pkg-builder.dan.net.uk] `-- Extracting wayland-1.23.1: .......... done [pkg-builder.dan.net.uk] Extracting vulkan-loader-1.4.313: ....... done ===== Message from wayland-1.23.1: -- Wayland requires XDG_RUNTIME_DIR to be defined to a path that will contain "wayland-%d" unix(4) sockets. This is usually handled by consolekit2 (via ck-launch-session) or pam_xdg (via login). ===> py311-llama-cpp-python-0.3.2 depends on shared library: libvulkan.so - found (/usr/local/lib/libvulkan.so) ===> Returning to build of py311-llama-cpp-python-0.3.2 =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Configuring for py311-llama-cpp-python-0.3.2 =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Building for py311-llama-cpp-python-0.3.2 * Getting build dependencies for wheel... * Building wheel... *** scikit-build-core 0.11.1 using CMake 3.31.6 (wheel) *** Configuring CMake... loading initial cache file /tmp/tmp_ba3024x/build/CMakeInit.txt -- The C compiler identification is Clang 18.1.6 -- The CXX compiler identification is Clang 18.1.6 -- Detecting C compiler ABI info -- Detecting C compiler ABI info - done -- Check for working C compiler: /usr/local/libexec/ccache/cc - skipped -- Detecting C compile features -- Detecting C compile features - done -- Detecting CXX compiler ABI info -- Detecting CXX compiler ABI info - done -- Check for working CXX compiler: /usr/local/libexec/ccache/c++ - skipped -- Detecting CXX compile features -- Detecting CXX compile features - done -- Could NOT find Git (missing: GIT_EXECUTABLE) CMake Warning at vendor/llama.cpp/cmake/build-info.cmake:14 (message): Git not found. Build info will not be accurate. Call Stack (most recent call first): vendor/llama.cpp/CMakeLists.txt:77 (include) -- Performing Test CMAKE_HAVE_LIBC_PTHREAD -- Performing Test CMAKE_HAVE_LIBC_PTHREAD - Failed -- Check if compiler accepts -pthread -- Check if compiler accepts -pthread - yes -- Found Threads: TRUE -- ccache found, compilation results will be cached. Disable with GGML_CCACHE=OFF. -- CMAKE_SYSTEM_PROCESSOR: amd64 -- Found OpenMP_C: -fopenmp=libomp (found version "5.1") -- Found OpenMP_CXX: -fopenmp=libomp (found version "5.1") -- Found OpenMP: TRUE (found version "5.1") -- OpenMP found -- Using llamafile -- Unknown architecture -- Using runtime weight conversion of Q4_0 to Q4_0_x_x to enable optimized GEMM/GEMV kernels -- Including CPU backend CMake Warning at vendor/llama.cpp/ggml/src/ggml-amx/CMakeLists.txt:106 (message): AMX requires x86 and gcc version > 11.0. Turning off GGML_AMX. -- Looking for pthread_create in pthreads -- Looking for pthread_create in pthreads - not found -- Looking for pthread_create in pthread -- Looking for pthread_create in pthread - found CMake Warning at vendor/llama.cpp/common/CMakeLists.txt:30 (message): Git repository not found; to enable automatic generation of build info, make sure Git is installed and the project is a Git repository. CMake Warning (dev) at CMakeLists.txt:13 (install): Target llama has PUBLIC_HEADER files but no PUBLIC_HEADER DESTINATION. Call Stack (most recent call first): CMakeLists.txt:80 (llama_cpp_python_install_target) This warning is for project developers. Use -Wno-dev to suppress it. CMake Warning (dev) at CMakeLists.txt:21 (install): Target llama has PUBLIC_HEADER files but no PUBLIC_HEADER DESTINATION. Call Stack (most recent call first): CMakeLists.txt:80 (llama_cpp_python_install_target) This warning is for project developers. Use -Wno-dev to suppress it. CMake Warning (dev) at CMakeLists.txt:13 (install): Target ggml has PUBLIC_HEADER files but no PUBLIC_HEADER DESTINATION. Call Stack (most recent call first): CMakeLists.txt:81 (llama_cpp_python_install_target) This warning is for project developers. Use -Wno-dev to suppress it. CMake Warning (dev) at CMakeLists.txt:21 (install): Target ggml has PUBLIC_HEADER files but no PUBLIC_HEADER DESTINATION. Call Stack (most recent call first): CMakeLists.txt:81 (llama_cpp_python_install_target) This warning is for project developers. Use -Wno-dev to suppress it. -- Configuring done (1.8s) -- Generating done (0.0s) -- Build files have been written to: /tmp/tmp_ba3024x/build *** Building project with Unix Makefiles... Change Dir: '/tmp/tmp_ba3024x/build' Run Build Command(s): /usr/local/bin/cmake -E env VERBOSE=1 /usr/bin/make -f Makefile /usr/local/bin/cmake -S/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 -B/tmp/tmp_ba3024x/build --check-build-system CMakeFiles/Makefile.cmake 0 /usr/local/bin/cmake -E cmake_progress_start /tmp/tmp_ba3024x/build/CMakeFiles /tmp/tmp_ba3024x/build//CMakeFiles/progress.marks /usr/bin/make -f CMakeFiles/Makefile2 all /usr/bin/make -f vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/build.make vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/build.make vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/build [ 2%] Building C object vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_base_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml.c.o -MF CMakeFiles/ggml-base.dir/ggml.c.o.d -o CMakeFiles/ggml-base.dir/ggml.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml.c [ 5%] Building C object vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-alloc.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_base_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-alloc.c.o -MF CMakeFiles/ggml-base.dir/ggml-alloc.c.o.d -o CMakeFiles/ggml-base.dir/ggml-alloc.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-alloc.c [ 7%] Building CXX object vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-backend.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_base_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -pthread -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-backend.cpp.o -MF CMakeFiles/ggml-base.dir/ggml-backend.cpp.o.d -o CMakeFiles/ggml-base.dir/ggml-backend.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-backend.cpp [ 10%] Building CXX object vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-threading.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_base_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -pthread -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-threading.cpp.o -MF CMakeFiles/ggml-base.dir/ggml-threading.cpp.o.d -o CMakeFiles/ggml-base.dir/ggml-threading.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-threading.cpp [ 12%] Building C object vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-quants.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_base_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-quants.c.o -MF CMakeFiles/ggml-base.dir/ggml-quants.c.o.d -o CMakeFiles/ggml-base.dir/ggml-quants.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-quants.c [ 15%] Building C object vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-aarch64.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/cc -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_base_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -pthread -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml-base.dir/ggml-aarch64.c.o -MF CMakeFiles/ggml-base.dir/ggml-aarch64.c.o.d -o CMakeFiles/ggml-base.dir/ggml-aarch64.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-aarch64.c [ 17%] Linking CXX shared library libggml-base.so cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/ggml-base.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -fPIC -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -Xlinker --dependency-file=CMakeFiles/ggml-base.dir/link.d -fstack-protector-strong -shared -Wl,-soname,libggml-base.so -o libggml-base.so "CMakeFiles/ggml-base.dir/ggml.c.o" "CMakeFiles/ggml-base.dir/ggml-alloc.c.o" "CMakeFiles/ggml-base.dir/ggml-backend.cpp.o" "CMakeFiles/ggml-base.dir/ggml-threading.cpp.o" "CMakeFiles/ggml-base.dir/ggml-quants.c.o" "CMakeFiles/ggml-base.dir/ggml-aarch64.c.o" -Wl,-rpath,"\$ORIGIN" -lm -pthread [ 17%] Built target ggml-base /usr/bin/make -f vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/build.make vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/build.make vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/build [ 20%] Building C object vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu && ccache /usr/local/libexec/ccache/cc -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_cpu_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -fopenmp=libomp -MD -MT vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu.c.o -MF CMakeFiles/ggml-cpu.dir/ggml-cpu.c.o.d -o CMakeFiles/ggml-cpu.dir/ggml-cpu.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/ggml-cpu.c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/ggml-cpu.c:245:9: warning: 'CACHE_LINE_SIZE' macro redefined [-Wmacro-redefined] 245 | #define CACHE_LINE_SIZE 64 | ^ /usr/include/machine/param.h:92:9: note: previous definition is here 92 | #define CACHE_LINE_SIZE (1 << CACHE_LINE_SHIFT) | ^ 1 warning generated. [ 22%] Building CXX object vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_cpu_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -fopenmp=libomp -MD -MT vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu.cpp.o -MF CMakeFiles/ggml-cpu.dir/ggml-cpu.cpp.o.d -o CMakeFiles/ggml-cpu.dir/ggml-cpu.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/ggml-cpu.cpp [ 25%] Building C object vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu-aarch64.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu && ccache /usr/local/libexec/ccache/cc -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_cpu_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -fopenmp=libomp -MD -MT vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu-aarch64.c.o -MF CMakeFiles/ggml-cpu.dir/ggml-cpu-aarch64.c.o.d -o CMakeFiles/ggml-cpu.dir/ggml-cpu-aarch64.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/ggml-cpu-aarch64.c [ 27%] Building C object vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu-quants.c.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu && ccache /usr/local/libexec/ccache/cc -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_cpu_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu11 -fPIC -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -Werror=implicit-int -Werror=implicit-function-declaration -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wdouble-promotion -fopenmp=libomp -MD -MT vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/ggml-cpu-quants.c.o -MF CMakeFiles/ggml-cpu.dir/ggml-cpu-quants.c.o.d -o CMakeFiles/ggml-cpu.dir/ggml-cpu-quants.c.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/ggml-cpu-quants.c [ 30%] Building CXX object vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/llamafile/sgemm.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_BUILD -DGGML_BACKEND_SHARED -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU_AARCH64 -DGGML_USE_LLAMAFILE -DGGML_USE_OPENMP -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_cpu_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -fopenmp=libomp -MD -MT vendor/llama.cpp/ggml/src/ggml-cpu/CMakeFiles/ggml-cpu.dir/llamafile/sgemm.cpp.o -MF CMakeFiles/ggml-cpu.dir/llamafile/sgemm.cpp.o.d -o CMakeFiles/ggml-cpu.dir/llamafile/sgemm.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-cpu/llamafile/sgemm.cpp [ 32%] Linking CXX shared library libggml-cpu.so cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/ggml-cpu.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -fPIC -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -Xlinker --dependency-file=CMakeFiles/ggml-cpu.dir/link.d -fstack-protector-strong -shared -Wl,-soname,libggml-cpu.so -o libggml-cpu.so "CMakeFiles/ggml-cpu.dir/ggml-cpu.c.o" "CMakeFiles/ggml-cpu.dir/ggml-cpu.cpp.o" "CMakeFiles/ggml-cpu.dir/ggml-cpu-aarch64.c.o" "CMakeFiles/ggml-cpu.dir/ggml-cpu-quants.c.o" "CMakeFiles/ggml-cpu.dir/llamafile/sgemm.cpp.o" -Wl,-rpath,"\$ORIGIN" ../libggml-base.so /usr/lib/libomp.so [ 32%] Built target ggml-cpu /usr/bin/make -f vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/build.make vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/build.make vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/build [ 35%] Building CXX object vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/ggml-backend-reg.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_BUILD -DGGML_SCHED_MAX_COPIES=4 -DGGML_SHARED -DGGML_USE_CPU -D_XOPEN_SOURCE=600 -D__BSD_VISIBLE -Dggml_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -std=gnu++11 -fPIC -Wmissing-declarations -Wmissing-noreturn -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi -MD -MT vendor/llama.cpp/ggml/src/CMakeFiles/ggml.dir/ggml-backend-reg.cpp.o -MF CMakeFiles/ggml.dir/ggml-backend-reg.cpp.o.d -o CMakeFiles/ggml.dir/ggml-backend-reg.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/ggml-backend-reg.cpp [ 37%] Linking CXX shared library libggml.so cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/ggml.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -fPIC -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -Xlinker --dependency-file=CMakeFiles/ggml.dir/link.d -fstack-protector-strong -shared -Wl,-soname,libggml.so -o libggml.so "CMakeFiles/ggml.dir/ggml-backend-reg.cpp.o" -Wl,-rpath,"\$ORIGIN" ggml-cpu/libggml-cpu.so libggml-base.so [ 37%] Built target ggml /usr/bin/make -f vendor/llama.cpp/src/CMakeFiles/llama.dir/build.make vendor/llama.cpp/src/CMakeFiles/llama.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/src /tmp/tmp_ba3024x/build/vendor/llama.cpp/src/CMakeFiles/llama.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/src/CMakeFiles/llama.dir/build.make vendor/llama.cpp/src/CMakeFiles/llama.dir/build [ 40%] Building CXX object vendor/llama.cpp/src/CMakeFiles/llama.dir/llama.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -Dllama_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/src/CMakeFiles/llama.dir/llama.cpp.o -MF CMakeFiles/llama.dir/llama.cpp.o.d -o CMakeFiles/llama.dir/llama.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/llama.cpp [ 42%] Building CXX object vendor/llama.cpp/src/CMakeFiles/llama.dir/llama-vocab.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -Dllama_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/src/CMakeFiles/llama.dir/llama-vocab.cpp.o -MF CMakeFiles/llama.dir/llama-vocab.cpp.o.d -o CMakeFiles/llama.dir/llama-vocab.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/llama-vocab.cpp [ 45%] Building CXX object vendor/llama.cpp/src/CMakeFiles/llama.dir/llama-grammar.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -Dllama_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/src/CMakeFiles/llama.dir/llama-grammar.cpp.o -MF CMakeFiles/llama.dir/llama-grammar.cpp.o.d -o CMakeFiles/llama.dir/llama-grammar.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/llama-grammar.cpp [ 47%] Building CXX object vendor/llama.cpp/src/CMakeFiles/llama.dir/llama-sampling.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -Dllama_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/src/CMakeFiles/llama.dir/llama-sampling.cpp.o -MF CMakeFiles/llama.dir/llama-sampling.cpp.o.d -o CMakeFiles/llama.dir/llama-sampling.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/llama-sampling.cpp [ 50%] Building CXX object vendor/llama.cpp/src/CMakeFiles/llama.dir/unicode.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -Dllama_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/src/CMakeFiles/llama.dir/unicode.cpp.o -MF CMakeFiles/llama.dir/unicode.cpp.o.d -o CMakeFiles/llama.dir/unicode.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/unicode.cpp /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/unicode.cpp:204:31: warning: 'codecvt_utf8' is deprecated [-Wdeprecated-declarations] 204 | std::wstring_convert> conv; | ^ /usr/include/c++/v1/codecvt:194:28: note: 'codecvt_utf8' has been explicitly marked deprecated here 194 | class _LIBCPP_TEMPLATE_VIS _LIBCPP_DEPRECATED_IN_CXX17 codecvt_utf8 : public __codecvt_utf8<_Elem> { | ^ /usr/include/c++/v1/__config:1004:41: note: expanded from macro '_LIBCPP_DEPRECATED_IN_CXX17' 1004 | # define _LIBCPP_DEPRECATED_IN_CXX17 _LIBCPP_DEPRECATED | ^ /usr/include/c++/v1/__config:977:49: note: expanded from macro '_LIBCPP_DEPRECATED' 977 | # define _LIBCPP_DEPRECATED __attribute__((__deprecated__)) | ^ /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/unicode.cpp:204:10: warning: 'wstring_convert>' is deprecated [-Wdeprecated-declarations] 204 | std::wstring_convert> conv; | ^ /usr/include/c++/v1/locale:3114:28: note: 'wstring_convert>' has been explicitly marked deprecated here 3114 | class _LIBCPP_TEMPLATE_VIS _LIBCPP_DEPRECATED_IN_CXX17 wstring_convert { | ^ /usr/include/c++/v1/__config:1004:41: note: expanded from macro '_LIBCPP_DEPRECATED_IN_CXX17' 1004 | # define _LIBCPP_DEPRECATED_IN_CXX17 _LIBCPP_DEPRECATED | ^ /usr/include/c++/v1/__config:977:49: note: expanded from macro '_LIBCPP_DEPRECATED' 977 | # define _LIBCPP_DEPRECATED __attribute__((__deprecated__)) | ^ 2 warnings generated. [ 52%] Building CXX object vendor/llama.cpp/src/CMakeFiles/llama.dir/unicode-data.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -Dllama_EXPORTS -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/src/CMakeFiles/llama.dir/unicode-data.cpp.o -MF CMakeFiles/llama.dir/unicode-data.cpp.o.d -o CMakeFiles/llama.dir/unicode-data.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/unicode-data.cpp [ 55%] Linking CXX shared library libllama.so cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/src && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/llama.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -fPIC -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -Xlinker --dependency-file=CMakeFiles/llama.dir/link.d -fstack-protector-strong -shared -Wl,-soname,libllama.so -o libllama.so CMakeFiles/llama.dir/llama.cpp.o "CMakeFiles/llama.dir/llama-vocab.cpp.o" "CMakeFiles/llama.dir/llama-grammar.cpp.o" "CMakeFiles/llama.dir/llama-sampling.cpp.o" CMakeFiles/llama.dir/unicode.cpp.o "CMakeFiles/llama.dir/unicode-data.cpp.o" -Wl,-rpath,"\$ORIGIN" ../ggml/src/libggml.so ../ggml/src/ggml-cpu/libggml-cpu.so ../ggml/src/libggml-base.so [ 55%] Built target llama /usr/bin/make -f vendor/llama.cpp/common/CMakeFiles/build_info.dir/build.make vendor/llama.cpp/common/CMakeFiles/build_info.dir/depend [ 57%] Generating build details from Git cd /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp && /usr/local/bin/cmake -DMSVC= -DCMAKE_C_COMPILER_VERSION=18.1.6 -DCMAKE_C_COMPILER_ID=Clang -DCMAKE_VS_PLATFORM_NAME= -DCMAKE_C_COMPILER=/usr/local/libexec/ccache/cc -P /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/cmake/build-info-gen-cpp.cmake -- Could NOT find Git (missing: GIT_EXECUTABLE) Hint: The project() command has not yet been called. It sets up system-specific search paths. CMake Warning at cmake/build-info.cmake:14 (message): Git not found. Build info will not be accurate. Call Stack (most recent call first): common/cmake/build-info-gen-cpp.cmake:1 (include) cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/common /tmp/tmp_ba3024x/build/vendor/llama.cpp/common/CMakeFiles/build_info.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/common/CMakeFiles/build_info.dir/build.make vendor/llama.cpp/common/CMakeFiles/build_info.dir/build [ 60%] Building CXX object vendor/llama.cpp/common/CMakeFiles/build_info.dir/build-info.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -MD -MT vendor/llama.cpp/common/CMakeFiles/build_info.dir/build-info.cpp.o -MF CMakeFiles/build_info.dir/build-info.cpp.o.d -o CMakeFiles/build_info.dir/build-info.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/build-info.cpp [ 60%] Built target build_info /usr/bin/make -f vendor/llama.cpp/common/CMakeFiles/common.dir/build.make vendor/llama.cpp/common/CMakeFiles/common.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/common /tmp/tmp_ba3024x/build/vendor/llama.cpp/common/CMakeFiles/common.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/common/CMakeFiles/common.dir/build.make vendor/llama.cpp/common/CMakeFiles/common.dir/build [ 62%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/arg.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/arg.cpp.o -MF CMakeFiles/common.dir/arg.cpp.o.d -o CMakeFiles/common.dir/arg.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/arg.cpp [ 65%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/common.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/common.cpp.o -MF CMakeFiles/common.dir/common.cpp.o.d -o CMakeFiles/common.dir/common.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/common.cpp /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/common.cpp:655:35: warning: 'codecvt_utf8' is deprecated [-Wdeprecated-declarations] 655 | std::wstring_convert, char32_t> converter; | ^ /usr/include/c++/v1/codecvt:194:28: note: 'codecvt_utf8' has been explicitly marked deprecated here 194 | class _LIBCPP_TEMPLATE_VIS _LIBCPP_DEPRECATED_IN_CXX17 codecvt_utf8 : public __codecvt_utf8<_Elem> { | ^ /usr/include/c++/v1/__config:1004:41: note: expanded from macro '_LIBCPP_DEPRECATED_IN_CXX17' 1004 | # define _LIBCPP_DEPRECATED_IN_CXX17 _LIBCPP_DEPRECATED | ^ /usr/include/c++/v1/__config:977:49: note: expanded from macro '_LIBCPP_DEPRECATED' 977 | # define _LIBCPP_DEPRECATED __attribute__((__deprecated__)) | ^ /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/common.cpp:655:14: warning: 'wstring_convert, char32_t>' is deprecated [-Wdeprecated-declarations] 655 | std::wstring_convert, char32_t> converter; | ^ /usr/include/c++/v1/locale:3114:28: note: 'wstring_convert, char32_t>' has been explicitly marked deprecated here 3114 | class _LIBCPP_TEMPLATE_VIS _LIBCPP_DEPRECATED_IN_CXX17 wstring_convert { | ^ /usr/include/c++/v1/__config:1004:41: note: expanded from macro '_LIBCPP_DEPRECATED_IN_CXX17' 1004 | # define _LIBCPP_DEPRECATED_IN_CXX17 _LIBCPP_DEPRECATED | ^ /usr/include/c++/v1/__config:977:49: note: expanded from macro '_LIBCPP_DEPRECATED' 977 | # define _LIBCPP_DEPRECATED __attribute__((__deprecated__)) | ^ In file included from /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/common.cpp:5: In file included from /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/common.h:8: In file included from /usr/include/c++/v1/vector:325: In file included from /usr/include/c++/v1/__format/formatter_bool.h:20: In file included from /usr/include/c++/v1/__format/formatter_integral.h:35: /usr/include/c++/v1/locale:3257:1: warning: 'wstring_convert, char32_t>' is deprecated [-Wdeprecated-declarations] 3257 | wstring_convert<_Codecvt, _Elem, _WideAlloc, _ByteAlloc>::to_bytes(const _Elem* __frm, const _Elem* __frm_end) { | ^ /usr/include/c++/v1/locale:3161:12: note: in instantiation of member function 'std::wstring_convert, char32_t>::to_bytes' requested here 3161 | return to_bytes(__wstr.data(), __wstr.data() + __wstr.size()); | ^ /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/common.cpp:660:52: note: in instantiation of member function 'std::wstring_convert, char32_t>::to_bytes' requested here 660 | std::string filename_reencoded = converter.to_bytes(filename_utf32); | ^ /usr/include/c++/v1/locale:3114:28: note: 'wstring_convert, char32_t>' has been explicitly marked deprecated here 3114 | class _LIBCPP_TEMPLATE_VIS _LIBCPP_DEPRECATED_IN_CXX17 wstring_convert { | ^ /usr/include/c++/v1/__config:1004:41: note: expanded from macro '_LIBCPP_DEPRECATED_IN_CXX17' 1004 | # define _LIBCPP_DEPRECATED_IN_CXX17 _LIBCPP_DEPRECATED | ^ /usr/include/c++/v1/__config:977:49: note: expanded from macro '_LIBCPP_DEPRECATED' 977 | # define _LIBCPP_DEPRECATED __attribute__((__deprecated__)) | ^ 3 warnings generated. [ 67%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/console.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/console.cpp.o -MF CMakeFiles/common.dir/console.cpp.o.d -o CMakeFiles/common.dir/console.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/console.cpp [ 70%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/json-schema-to-grammar.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/json-schema-to-grammar.cpp.o -MF CMakeFiles/common.dir/json-schema-to-grammar.cpp.o.d -o CMakeFiles/common.dir/json-schema-to-grammar.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/json-schema-to-grammar.cpp [ 72%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/log.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/log.cpp.o -MF CMakeFiles/common.dir/log.cpp.o.d -o CMakeFiles/common.dir/log.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/log.cpp [ 75%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/ngram-cache.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/ngram-cache.cpp.o -MF CMakeFiles/common.dir/ngram-cache.cpp.o.d -o CMakeFiles/common.dir/ngram-cache.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/ngram-cache.cpp [ 77%] Building CXX object vendor/llama.cpp/common/CMakeFiles/common.dir/sampling.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -pthread -MD -MT vendor/llama.cpp/common/CMakeFiles/common.dir/sampling.cpp.o -MF CMakeFiles/common.dir/sampling.cpp.o.d -o CMakeFiles/common.dir/sampling.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/sampling.cpp [ 80%] Linking CXX static library libcommon.a cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && /usr/local/bin/cmake -P CMakeFiles/common.dir/cmake_clean_target.cmake cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/common && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/common.dir/link.txt --verbose=1 /usr/bin/llvm-ar qc libcommon.a CMakeFiles/common.dir/arg.cpp.o CMakeFiles/common.dir/common.cpp.o CMakeFiles/common.dir/console.cpp.o "CMakeFiles/common.dir/json-schema-to-grammar.cpp.o" CMakeFiles/common.dir/log.cpp.o "CMakeFiles/common.dir/ngram-cache.cpp.o" CMakeFiles/common.dir/sampling.cpp.o "CMakeFiles/build_info.dir/build-info.cpp.o" /usr/bin/llvm-ranlib libcommon.a [ 80%] Built target common /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/build [ 82%] Building CXX object vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/llava.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../../common -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -Wno-cast-qual -MD -MT vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/llava.cpp.o -MF CMakeFiles/llava.dir/llava.cpp.o.d -o CMakeFiles/llava.dir/llava.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/llava.cpp [ 85%] Building CXX object vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/clip.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_BUILD -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../../common -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fPIC -Wno-cast-qual -MD -MT vendor/llama.cpp/examples/llava/CMakeFiles/llava.dir/clip.cpp.o -MF CMakeFiles/llava.dir/clip.cpp.o.d -o CMakeFiles/llava.dir/clip.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/clip.cpp [ 85%] Built target llava /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llava_static.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llava_static.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava/CMakeFiles/llava_static.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llava_static.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llava_static.dir/build [ 87%] Linking CXX static library libllava_static.a cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && /usr/local/bin/cmake -P CMakeFiles/llava_static.dir/cmake_clean_target.cmake cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/llava_static.dir/link.txt --verbose=1 /usr/bin/llvm-ar qc libllava_static.a CMakeFiles/llava.dir/llava.cpp.o CMakeFiles/llava.dir/clip.cpp.o /usr/bin/llvm-ranlib libllava_static.a [ 87%] Built target llava_static /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llava_shared.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llava_shared.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava/CMakeFiles/llava_shared.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llava_shared.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llava_shared.dir/build [ 90%] Linking CXX shared library libllava.so cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/llava_shared.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -fPIC -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -Xlinker --dependency-file=CMakeFiles/llava_shared.dir/link.d -fstack-protector-strong -shared -Wl,-soname,libllava.so -o libllava.so CMakeFiles/llava.dir/llava.cpp.o CMakeFiles/llava.dir/clip.cpp.o -Wl,-rpath,"\$ORIGIN" ../../src/libllama.so ../../ggml/src/libggml.so ../../ggml/src/ggml-cpu/libggml-cpu.so ../../ggml/src/libggml-base.so [ 90%] Built target llava_shared /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/build [ 92%] Building CXX object vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/llava-cli.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../../common -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -pthread -MD -MT vendor/llama.cpp/examples/llava/CMakeFiles/llama-llava-cli.dir/llava-cli.cpp.o -MF CMakeFiles/llama-llava-cli.dir/llava-cli.cpp.o.d -o CMakeFiles/llama-llava-cli.dir/llava-cli.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/llava-cli.cpp [ 95%] Linking CXX executable llama-llava-cli cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/llama-llava-cli.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fstack-protector-strong -Xlinker --dependency-file=CMakeFiles/llama-llava-cli.dir/link.d "CMakeFiles/llama-llava-cli.dir/llava-cli.cpp.o" CMakeFiles/llava.dir/llava.cpp.o CMakeFiles/llava.dir/clip.cpp.o -o llama-llava-cli -Wl,-rpath,/tmp/tmp_ba3024x/build/vendor/llama.cpp/src:/tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src:/tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu: ../../common/libcommon.a -lpthread ../../src/libllama.so ../../ggml/src/libggml.so ../../ggml/src/ggml-cpu/libggml-cpu.so ../../ggml/src/libggml-base.so [ 95%] Built target llama-llava-cli /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/depend cd /tmp/tmp_ba3024x/build && /usr/local/bin/cmake -E cmake_depends "Unix Makefiles" /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2 /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/DependInfo.cmake "--color=" /usr/bin/make -f vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/build.make vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/build [ 97%] Building CXX object vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/minicpmv-cli.cpp.o cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && ccache /usr/local/libexec/ccache/c++ -DGGML_BACKEND_SHARED -DGGML_SHARED -DGGML_USE_CPU -DLLAMA_SHARED -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/common/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/src/../include -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../.. -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/../../common -I/wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/ggml/include -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -pthread -MD -MT vendor/llama.cpp/examples/llava/CMakeFiles/llama-minicpmv-cli.dir/minicpmv-cli.cpp.o -MF CMakeFiles/llama-minicpmv-cli.dir/minicpmv-cli.cpp.o.d -o CMakeFiles/llama-minicpmv-cli.dir/minicpmv-cli.cpp.o -c /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/vendor/llama.cpp/examples/llava/minicpmv-cli.cpp [100%] Linking CXX executable llama-minicpmv-cli cd /tmp/tmp_ba3024x/build/vendor/llama.cpp/examples/llava && /usr/local/bin/cmake -E cmake_link_script CMakeFiles/llama-minicpmv-cli.dir/link.txt --verbose=1 /usr/local/libexec/ccache/c++ -O2 -pipe -fstack-protector-strong -fno-strict-aliasing -O3 -DNDEBUG -fstack-protector-strong -Xlinker --dependency-file=CMakeFiles/llama-minicpmv-cli.dir/link.d "CMakeFiles/llama-minicpmv-cli.dir/minicpmv-cli.cpp.o" CMakeFiles/llava.dir/llava.cpp.o CMakeFiles/llava.dir/clip.cpp.o -o llama-minicpmv-cli -Wl,-rpath,/tmp/tmp_ba3024x/build/vendor/llama.cpp/src:/tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src:/tmp/tmp_ba3024x/build/vendor/llama.cpp/ggml/src/ggml-cpu: ../../common/libcommon.a -lpthread ../../src/libllama.so ../../ggml/src/libggml.so ../../ggml/src/ggml-cpu/libggml-cpu.so ../../ggml/src/libggml-base.so [100%] Built target llama-minicpmv-cli /usr/local/bin/cmake -E cmake_progress_start /tmp/tmp_ba3024x/build/CMakeFiles 0 *** Installing project into wheel... -- Install configuration: "Release" -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/libggml-cpu.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/libggml.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-cpu.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-alloc.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-backend.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-blas.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-cann.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-cuda.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-kompute.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-metal.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-rpc.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-sycl.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-vulkan.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/libggml.so -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-cpu.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-alloc.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-backend.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-blas.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-cann.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-cuda.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-kompute.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-metal.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-rpc.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-sycl.h -- Up-to-date: /tmp/tmp_ba3024x/wheel/platlib/include/ggml-vulkan.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/libggml-base.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/libllama.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/include/llama.h -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/cmake/llama/llama-config.cmake -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/cmake/llama/llama-version.cmake -- Installing: /tmp/tmp_ba3024x/wheel/platlib/bin/convert_hf_to_gguf.py -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/pkgconfig/llama.pc -- Installing: /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/llama_cpp/lib/libllama.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/llama_cpp/lib/libllama.so -- Installing: /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/llama_cpp/lib/libggml.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/llama_cpp/lib/libggml.so -- Installing: /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/llama_cpp/lib/libggml-base.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/llama_cpp/lib/libggml-base.so -- Installing: /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/llama_cpp/lib/libggml-cpu.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/llama_cpp/lib/libggml-cpu.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/lib/libllava.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/bin/llama-llava-cli -- Set non-toolchain portion of runtime path of "/tmp/tmp_ba3024x/wheel/platlib/bin/llama-llava-cli" to "" -- Installing: /tmp/tmp_ba3024x/wheel/platlib/bin/llama-minicpmv-cli -- Set non-toolchain portion of runtime path of "/tmp/tmp_ba3024x/wheel/platlib/bin/llama-minicpmv-cli" to "" -- Installing: /wrkdirs/usr/ports/misc/py-llama-cpp-python/work-py311/llama-cpp-python-0.3.2/llama_cpp/lib/libllava.so -- Installing: /tmp/tmp_ba3024x/wheel/platlib/llama_cpp/lib/libllava.so *** Making wheel... *** Created llama_cpp_python-0.3.2-cp311-cp311-freebsd_14_2_release_amd64.whl Successfully built llama_cpp_python-0.3.2-cp311-cp311-freebsd_14_2_release_amd64.whl =========================================================================== =================================================== ===== env: USE_PACKAGE_DEPENDS_ONLY=1 USER=root UID=0 GID=0 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-diskcache>=5.6.1 - not found ===> Installing existing package /packages/All/py311-diskcache-5.6.3_2.pkg [pkg-builder.dan.net.uk] Installing py311-diskcache-5.6.3_2... [pkg-builder.dan.net.uk] `-- Installing py311-django42-4.2.20... [pkg-builder.dan.net.uk] | `-- Installing py311-asgiref-3.8.1_1... [pkg-builder.dan.net.uk] | `-- Extracting py311-asgiref-3.8.1_1: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-sqlite3-3.11.12_10... [pkg-builder.dan.net.uk] | | `-- Installing sqlite3-3.46.1_1,1... [pkg-builder.dan.net.uk] | | `-- Installing libedit-3.1.20250104,1... [pkg-builder.dan.net.uk] | | `-- Extracting libedit-3.1.20250104,1: .......... done [pkg-builder.dan.net.uk] | | `-- Extracting sqlite3-3.46.1_1,1: ......... done [pkg-builder.dan.net.uk] | `-- Extracting py311-sqlite3-3.11.12_10: ...... done [pkg-builder.dan.net.uk] | `-- Installing py311-sqlparse-0.5.3... [pkg-builder.dan.net.uk] | `-- Extracting py311-sqlparse-0.5.3: .......... done [pkg-builder.dan.net.uk] `-- Extracting py311-django42-4.2.20: .......... done [pkg-builder.dan.net.uk] Extracting py311-diskcache-5.6.3_2: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-diskcache>=5.6.1 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-Jinja2>=2.11.3 - not found ===> Installing existing package /packages/All/py311-Jinja2-3.1.6.pkg [pkg-builder.dan.net.uk] Installing py311-Jinja2-3.1.6... [pkg-builder.dan.net.uk] `-- Installing py311-Babel-2.17.0_1... [pkg-builder.dan.net.uk] `-- Extracting py311-Babel-2.17.0_1: .......... done [pkg-builder.dan.net.uk] `-- Installing py311-markupsafe-2.1.5_1... [pkg-builder.dan.net.uk] `-- Extracting py311-markupsafe-2.1.5_1: .......... done [pkg-builder.dan.net.uk] Extracting py311-Jinja2-3.1.6: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-Jinja2>=2.11.3 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-numpy>=1.16,1<1.27,1 - not found ===> Installing existing package /packages/All/py311-numpy-1.26.4_6,1.pkg [pkg-builder.dan.net.uk] Installing py311-numpy-1.26.4_6,1... [pkg-builder.dan.net.uk] `-- Installing gcc13-13.3.0_2... [pkg-builder.dan.net.uk] | `-- Installing binutils-2.44,1... [pkg-builder.dan.net.uk] | | `-- Installing zstd-1.5.7... [pkg-builder.dan.net.uk] | | `-- Installing liblz4-1.10.0,1... [pkg-builder.dan.net.uk] | | `-- Extracting liblz4-1.10.0,1: .......... done [pkg-builder.dan.net.uk] | | `-- Extracting zstd-1.5.7: .......... done [pkg-builder.dan.net.uk] | `-- Extracting binutils-2.44,1: .......... done [pkg-builder.dan.net.uk] | `-- Installing gmp-6.3.0... [pkg-builder.dan.net.uk] | `-- Extracting gmp-6.3.0: .......... done [pkg-builder.dan.net.uk] | `-- Installing mpc-1.3.1_1... [pkg-builder.dan.net.uk] | | `-- Installing mpfr-4.2.2,1... [pkg-builder.dan.net.uk] | | `-- Extracting mpfr-4.2.2,1: .......... done [pkg-builder.dan.net.uk] | `-- Extracting mpc-1.3.1_1: ...... done [pkg-builder.dan.net.uk] `-- Extracting gcc13-13.3.0_2: .......... done create symlink for gcc13 create symlink for gcc13 (world) create symlink for g++13 create symlink for g++13 (world) create symlink for cpp13 create symlink for cpp13 (world) [pkg-builder.dan.net.uk] `-- Installing openblas-0.3.29_1,2... [pkg-builder.dan.net.uk] `-- Extracting openblas-0.3.29_1,2: .......... done [pkg-builder.dan.net.uk] `-- Installing suitesparse-umfpack-6.3.5_1... [pkg-builder.dan.net.uk] | `-- Installing suitesparse-amd-3.3.3... [pkg-builder.dan.net.uk] | | `-- Installing suitesparse-config-7.10.2... [pkg-builder.dan.net.uk] | | `-- Extracting suitesparse-config-7.10.2: .......... done [pkg-builder.dan.net.uk] | `-- Extracting suitesparse-amd-3.3.3: .......... done [pkg-builder.dan.net.uk] | `-- Installing suitesparse-camd-3.3.3... [pkg-builder.dan.net.uk] | `-- Extracting suitesparse-camd-3.3.3: .......... done [pkg-builder.dan.net.uk] | `-- Installing suitesparse-ccolamd-3.3.4... [pkg-builder.dan.net.uk] | `-- Extracting suitesparse-ccolamd-3.3.4: .......... done [pkg-builder.dan.net.uk] | `-- Installing suitesparse-cholmod-5.3.2... [pkg-builder.dan.net.uk] | | `-- Installing suitesparse-colamd-3.3.4... [pkg-builder.dan.net.uk] | | `-- Extracting suitesparse-colamd-3.3.4: .......... done [pkg-builder.dan.net.uk] | `-- Extracting suitesparse-cholmod-5.3.2: .......... done [pkg-builder.dan.net.uk] `-- Extracting suitesparse-umfpack-6.3.5_1: .......... done [pkg-builder.dan.net.uk] Extracting py311-numpy-1.26.4_6,1: .......... done ===== Message from gcc13-13.3.0_2: -- To ensure binaries built with this toolchain find appropriate versions of the necessary run-time libraries, you may want to link using -Wl,-rpath=/usr/local/lib/gcc13 For ports leveraging USE_GCC, USES=compiler, or USES=fortran this happens transparently. ===> py311-llama-cpp-python-0.3.2 depends on package: py311-numpy>=1.16,1<1.27,1 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-typing-extensions>=4.5.0 - not found ===> Installing existing package /packages/All/py311-typing-extensions-4.13.2.pkg [pkg-builder.dan.net.uk] Installing py311-typing-extensions-4.13.2... [pkg-builder.dan.net.uk] Extracting py311-typing-extensions-4.13.2: ....... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-typing-extensions>=4.5.0 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-fastapi>=0.100.0 - not found ===> Installing existing package /packages/All/py311-fastapi-0.115.12.pkg [pkg-builder.dan.net.uk] Installing py311-fastapi-0.115.12... [pkg-builder.dan.net.uk] `-- Installing py311-pydantic2-2.11.3... [pkg-builder.dan.net.uk] | `-- Installing py311-annotated-types-0.7.0... [pkg-builder.dan.net.uk] | `-- Extracting py311-annotated-types-0.7.0: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-pydantic-core-2.33.1... [pkg-builder.dan.net.uk] | `-- Extracting py311-pydantic-core-2.33.1: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-typing-inspection-0.4.0... [pkg-builder.dan.net.uk] | `-- Extracting py311-typing-inspection-0.4.0: .......... done [pkg-builder.dan.net.uk] `-- Extracting py311-pydantic2-2.11.3: .......... done [pkg-builder.dan.net.uk] `-- Installing py311-starlette-0.46.2... [pkg-builder.dan.net.uk] | `-- Installing py311-anyio-4.9.0... [pkg-builder.dan.net.uk] | | `-- Installing py311-idna-3.10... [pkg-builder.dan.net.uk] | | `-- Extracting py311-idna-3.10: .......... done [pkg-builder.dan.net.uk] | | `-- Installing py311-sniffio-1.3.1... [pkg-builder.dan.net.uk] | | `-- Extracting py311-sniffio-1.3.1: .......... done [pkg-builder.dan.net.uk] | | `-- Installing py311-truststore-0.10.1... [pkg-builder.dan.net.uk] | | `-- Extracting py311-truststore-0.10.1: .......... done [pkg-builder.dan.net.uk] | `-- Extracting py311-anyio-4.9.0: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-httpx-0.28.1... [pkg-builder.dan.net.uk] | | `-- Installing py311-certifi-2025.1.31... [pkg-builder.dan.net.uk] | | `-- Extracting py311-certifi-2025.1.31: .......... done [pkg-builder.dan.net.uk] | | `-- Installing py311-h2-4.1.0_1... [pkg-builder.dan.net.uk] | | `-- Installing py311-hpack-4.0.0_1... [pkg-builder.dan.net.uk] | | `-- Extracting py311-hpack-4.0.0_1: .......... done [pkg-builder.dan.net.uk] | | `-- Installing py311-hyperframe-6.0.0_1... [pkg-builder.dan.net.uk] | | `-- Extracting py311-hyperframe-6.0.0_1: .......... done [pkg-builder.dan.net.uk] | | `-- Extracting py311-h2-4.1.0_1: .......... done [pkg-builder.dan.net.uk] | | `-- Installing py311-httpcore-1.0.9... [pkg-builder.dan.net.uk] | | `-- Installing py311-h11-0.16.0... [pkg-builder.dan.net.uk] | | `-- Extracting py311-h11-0.16.0: .......... done [pkg-builder.dan.net.uk] | | `-- Extracting py311-httpcore-1.0.9: .......... done [pkg-builder.dan.net.uk] | | `-- Installing py311-socksio-1.0.0_1... [pkg-builder.dan.net.uk] | | `-- Extracting py311-socksio-1.0.0_1: .......... done [pkg-builder.dan.net.uk] | `-- Extracting py311-httpx-0.28.1: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-itsdangerous-2.2.0... [pkg-builder.dan.net.uk] | `-- Extracting py311-itsdangerous-2.2.0: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-python-multipart-0.0.20... [pkg-builder.dan.net.uk] | `-- Extracting py311-python-multipart-0.0.20: .......... done [pkg-builder.dan.net.uk] | `-- Installing py311-pyyaml-6.0.1_1... [pkg-builder.dan.net.uk] | | `-- Installing libyaml-0.2.5... [pkg-builder.dan.net.uk] | | `-- Extracting libyaml-0.2.5: ...... done [pkg-builder.dan.net.uk] | `-- Extracting py311-pyyaml-6.0.1_1: .......... done [pkg-builder.dan.net.uk] `-- Extracting py311-starlette-0.46.2: .......... done [pkg-builder.dan.net.uk] Extracting py311-fastapi-0.115.12: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-fastapi>=0.100.0 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-pydantic-settings>=2.0.1 - not found ===> Installing existing package /packages/All/py311-pydantic-settings-2.6.0.pkg [pkg-builder.dan.net.uk] Installing py311-pydantic-settings-2.6.0... [pkg-builder.dan.net.uk] `-- Installing py311-python-dotenv-1.1.0... [pkg-builder.dan.net.uk] | `-- Installing py311-click-8.1.7_1... [pkg-builder.dan.net.uk] | `-- Extracting py311-click-8.1.7_1: .......... done [pkg-builder.dan.net.uk] `-- Extracting py311-python-dotenv-1.1.0: .......... done [pkg-builder.dan.net.uk] Extracting py311-pydantic-settings-2.6.0: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-pydantic-settings>=2.0.1 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-sse-starlette>=1.6.1 - not found ===> Installing existing package /packages/All/py311-sse-starlette-2.3.3.pkg [pkg-builder.dan.net.uk] Installing py311-sse-starlette-2.3.3... [pkg-builder.dan.net.uk] `-- Installing py311-uvicorn-0.34.1... [pkg-builder.dan.net.uk] `-- Extracting py311-uvicorn-0.34.1: .......... done [pkg-builder.dan.net.uk] Extracting py311-sse-starlette-2.3.3: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-sse-starlette>=1.6.1 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-starlette-context>=0.3.6 - not found ===> Installing existing package /packages/All/py311-starlette-context-0.4.0.pkg [pkg-builder.dan.net.uk] Installing py311-starlette-context-0.4.0... [pkg-builder.dan.net.uk] Extracting py311-starlette-context-0.4.0: .......... done ===> py311-llama-cpp-python-0.3.2 depends on package: py311-starlette-context>=0.3.6 - found ===> Returning to build of py311-llama-cpp-python-0.3.2 ===> py311-llama-cpp-python-0.3.2 depends on package: py311-uvicorn>=0.22.0 - found ===> py311-llama-cpp-python-0.3.2 depends on package: py311-pyyaml>=5.1 - found ===> py311-llama-cpp-python-0.3.2 depends on file: /usr/local/bin/python3.11 - found =========================================================================== =================================================== ===== env: NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Staging for py311-llama-cpp-python-0.3.2 ===> Generating temporary packing list ====> Compressing man pages (compress-man) =========================================================================== =================================================== ===== env: 'PKG_NOTES=build_timestamp ports_top_git_hash ports_top_checkout_unclean port_git_hash port_checkout_unclean built_by' 'PKG_NOTE_build_timestamp=2025-05-04T06:37:59+0000' 'PKG_NOTE_ports_top_git_hash=5529c5919b' 'PKG_NOTE_ports_top_checkout_unclean=yes' 'PKG_NOTE_port_git_hash=1a35b19e6d' 'PKG_NOTE_port_checkout_unclean=no' 'PKG_NOTE_built_by=poudriere-git-3.4.2' NO_DEPENDS=yes USER=root UID=0 GID=0 ===> Building packages for py311-llama-cpp-python-0.3.2 ===> Building py311-llama-cpp-python-0.3.2 =========================================================================== =>> Cleaning up wrkdir ===> Cleaning for py311-llama-cpp-python-0.3.2 build of misc/py-llama-cpp-python@py311 | py311-llama-cpp-python-0.3.2 ended at Sun May 4 07:44:50 BST 2025 build time: 00:06:52