55 push :
66 branches :
77 - master
8- paths : ['.github/workflows/build.yml', '.github/workflows/build-linux-cross.yml', '**/CMakeLists.txt', '**/.cmake', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.cuh', '**/*.swift', '**/*.m', '**/*.metal', '**/*.comp']
8+ paths : [
9+ ' .github/workflows/build.yml' ,
10+ ' .github/workflows/build-linux-cross.yml' ,
11+ ' .github/workflows/build-cmake-pkg.yml' ,
12+ ' **/CMakeLists.txt' ,
13+ ' **/.cmake' ,
14+ ' **/*.h' ,
15+ ' **/*.hpp' ,
16+ ' **/*.c' ,
17+ ' **/*.cpp' ,
18+ ' **/*.cu' ,
19+ ' **/*.cuh' ,
20+ ' **/*.swift' ,
21+ ' **/*.m' ,
22+ ' **/*.metal' ,
23+ ' **/*.comp'
24+ ]
25+
926 pull_request :
1027 types : [opened, synchronize, reopened]
11- paths : ['.github/workflows/build.yml', '.github/workflows/build-linux-cross.yml', '**/CMakeLists.txt', '**/.cmake', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.cuh', '**/*.swift', '**/*.m', '**/*.metal', '**/*.comp']
28+ paths : [
29+ ' .github/workflows/build.yml' ,
30+ ' .github/workflows/build-linux-cross.yml' ,
31+ ' .github/workflows/build-cmake-pkg.yml' ,
32+ ' **/CMakeLists.txt' ,
33+ ' **/.cmake' ,
34+ ' **/*.h' ,
35+ ' **/*.hpp' ,
36+ ' **/*.c' ,
37+ ' **/*.cpp' ,
38+ ' **/*.cu' ,
39+ ' **/*.cuh' ,
40+ ' **/*.swift' ,
41+ ' **/*.m' ,
42+ ' **/*.metal' ,
43+ ' **/*.comp'
44+ ]
1245
1346concurrency :
1447 group : ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
1548 cancel-in-progress : true
1649
50+ env :
51+ GGML_NLOOP : 3
52+ GGML_N_THREADS : 1
53+
1754jobs :
1855 macOS-latest-cmake-arm64 :
1956 runs-on : macos-14
4481 -DCMAKE_BUILD_RPATH="@loader_path" \
4582 -DLLAMA_FATAL_WARNINGS=ON \
4683 -DGGML_METAL_USE_BF16=ON \
47- -DGGML_METAL_EMBED_LIBRARY=ON \
84+ -DGGML_METAL_EMBED_LIBRARY=OFF \
85+ -DGGML_METAL_SHADER_DEBUG=ON \
4886 -DGGML_RPC=ON
4987 cmake --build build --config Release -j $(sysctl -n hw.logicalcpu)
5088 cmake --build build --config Release --target libchatllm -j $(sysctl -n hw.logicalcpu)
@@ -284,6 +322,7 @@ jobs:
284322 id : cmake_test
285323 run : |
286324 cd build
325+ export GGML_VK_VISIBLE_DEVICES=0
287326 # This is using llvmpipe and runs slower than other backends
288327 ctest -L main --verbose --timeout 3600
289328
@@ -455,6 +494,9 @@ jobs:
455494 build-linux-cross :
456495 uses : ./.github/workflows/build-linux-cross.yml
457496
497+ build-cmake-pkg :
498+ uses : ./.github/workflows/build-cmake-pkg.yml
499+
458500 macOS-latest-cmake-ios :
459501 runs-on : macos-latest
460502 if : false
@@ -609,8 +651,7 @@ jobs:
609651 ./build-xcframework.sh
610652
611653 windows-msys2 :
612- runs-on : windows-latest
613- if : false
654+ runs-on : windows-2025
614655
615656 strategy :
616657 fail-fast : false
@@ -660,29 +701,31 @@ jobs:
660701 cmake --build build --config ${{ matrix.build }} -j $(nproc)
661702
662703 windows-latest-cmake :
663- runs-on : windows-latest
664- if : false
704+ runs-on : windows-2025
665705
666706 env :
667707 OPENBLAS_VERSION : 0.3.23
668708 SDE_VERSION : 9.33.0-2024-01-07
669- VULKAN_VERSION : 1.4.309.0
709+ VULKAN_VERSION : 1.4.313.2
670710
671711 strategy :
672712 matrix :
673713 include :
674- - build : ' cpu-x64'
675- defines : ' -G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF'
714+ - build : ' cpu-x64 (static)'
715+ arch : ' x64'
716+ defines : ' -G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DBUILD_SHARED_LIBS=OFF'
676717 - build : ' openblas-x64'
718+ arch : ' x64'
677719 defines : ' -G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"'
678720 - build : ' vulkan-x64'
679- defines : ' -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_VULKAN=ON'
721+ arch : ' x64'
722+ defines : ' -DCMAKE_BUILD_TYPE=Release -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_VULKAN=ON'
680723 - build : ' llvm-arm64'
724+ arch : ' arm64'
681725 defines : ' -G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON'
682726 - build : ' llvm-arm64-opencl-adreno'
727+ arch : ' arm64'
683728 defines : ' -G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/arm64-windows-llvm.cmake -DCMAKE_PREFIX_PATH="$env:RUNNER_TEMP/opencl-arm64-release" -DGGML_OPENCL=ON -DGGML_OPENCL_USE_ADRENO_KERNELS=ON'
684- # - build: 'kompute-x64'
685- # defines: '-G "Ninja Multi-Config" -D CMAKE_TOOLCHAIN_FILE=cmake/x64-windows-llvm.cmake -DGGML_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DGGML_RPC=ON -DGGML_BACKEND_DL=ON -DGGML_CPU_ALL_VARIANTS=ON -DGGML_OPENMP=OFF -DGGML_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON'
686729
687730 steps :
688731 - name : Clone
@@ -696,12 +739,6 @@ jobs:
696739 variant : ccache
697740 evict-old-files : 1d
698741
699- - name : Clone Kompute submodule
700- id : clone_kompute
701- if : ${{ matrix.build == 'kompute-x64' }}
702- run : |
703- git submodule update --init ggml/src/ggml-kompute/kompute
704-
705742 - name : Download OpenBLAS
706743 id : get_openblas
707744 if : ${{ matrix.build == 'openblas-x64' }}
@@ -717,9 +754,9 @@ jobs:
717754
718755 - name : Install Vulkan SDK
719756 id : get_vulkan
720- if : ${{ matrix.build == 'kompute-x64' || matrix.build == ' vulkan-x64' }}
757+ if : ${{ matrix.build == 'vulkan-x64' }}
721758 run : |
722- curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/VulkanSDK- ${env:VULKAN_VERSION}-Installer .exe"
759+ curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/vulkansdk-windows-X64- ${env:VULKAN_VERSION}.exe"
723760 & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install
724761 Add-Content $env:GITHUB_ENV "VULKAN_SDK=C:\VulkanSDK\${env:VULKAN_VERSION}"
725762 Add-Content $env:GITHUB_PATH "C:\VulkanSDK\${env:VULKAN_VERSION}\bin"
@@ -752,6 +789,8 @@ jobs:
752789 - name : libCURL
753790 id : get_libcurl
754791 uses : ./.github/actions/windows-setup-curl
792+ with :
793+ architecture : ${{ matrix.arch == 'x64' && 'win64' || 'win64a' }}
755794
756795 - name : Build
757796 id : cmake_build
@@ -762,6 +801,7 @@ jobs:
762801 -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
763802 cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS}
764803 cmake --build build --config Release -j ${env:NUMBER_OF_PROCESSORS} --target libchatllm
804+ cp $env:CURL_PATH/bin/libcurl-*.dll build/bin/Release
765805
766806 - name : Add libopenblas.dll
767807 id : add_libopenblas_dll
@@ -772,7 +812,7 @@ jobs:
772812
773813 - name : Test
774814 id : cmake_test
775- if : ${{ matrix.build != 'llvm-arm64' && matrix.build ! = 'llvm-arm64-opencl-adreno ' }}
815+ if : ${{ matrix.arch = = 'x64 ' }}
776816 run : |
777817 cd build
778818 ctest -L main -C Release --verbose --timeout 900
@@ -826,11 +866,10 @@ jobs:
826866
827867 windows-2022-cmake-cuda :
828868 runs-on : windows-2022
829- if : false
830869
831870 strategy :
832871 matrix :
833- cuda : ['12.4', '11.7' ]
872+ cuda : ['12.4']
834873
835874 steps :
836875 - name : Clone
@@ -864,7 +903,7 @@ jobs:
864903 env :
865904 CURL_PATH : ${{ steps.get_libcurl.outputs.curl_path }}
866905 run : |
867- call "C:\Program Files (x86) \Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64 .bat"
906+ call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvarsall .bat" x64
868907 cmake -S . -B build -G "Ninja Multi-Config" ^
869908 -DLLAMA_BUILD_SERVER=ON ^
870909 -DGGML_NATIVE=OFF ^
@@ -877,9 +916,42 @@ jobs:
877916 cmake --build build --config Release -j %NINJA_JOBS% -t ggml
878917 cmake --build build --config Release
879918
919+ windows-latest-cmake-sycl :
920+ runs-on : windows-2022
921+
922+ defaults :
923+ run :
924+ shell : bash
925+
926+ env :
927+ WINDOWS_BASEKIT_URL : https://registrationcenter-download.intel.com/akdlm/IRC_NAS/7cd9bba0-7aab-4e30-b3ae-2221006a4a05/intel-oneapi-base-toolkit-2025.1.1.34_offline.exe
928+ WINDOWS_DPCPP_MKL : intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel:intel.oneapi.win.dnnl:intel.oneapi.win.tbb.devel
929+ ONEAPI_ROOT : " C:/Program Files (x86)/Intel/oneAPI"
930+ steps :
931+ - name : Clone
932+ id : checkout
933+ uses : actions/checkout@v4
934+
935+ - name : ccache
936+ uses :
hendrikmuhs/[email protected] 937+ with :
938+ key : windows-latest-cmake-sycl
939+ variant : ccache
940+ evict-old-files : 1d
941+
942+ - name : Install
943+ run : |
944+ scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL
945+
946+ # TODO: add libcurl support ; we will also need to modify win-build-sycl.bat to accept user-specified args
947+
948+ - name : Build
949+ id : cmake_build
950+ run : examples/sycl/win-build-sycl.bat
951+
880952 windows-latest-cmake-hip :
881953 if : ${{ github.event.inputs.create_release != 'true' }}
882- runs-on : windows-latest
954+ runs-on : windows-2022
883955
884956 steps :
885957 - name : Clone
@@ -933,3 +1005,101 @@ jobs:
9331005 -DGGML_RPC=ON `
9341006 -DCURL_LIBRARY="$env:CURL_PATH/lib/libcurl.dll.a" -DCURL_INCLUDE_DIR="$env:CURL_PATH/include"
9351007 cmake --build build -j ${env:NUMBER_OF_PROCESSORS}
1008+
1009+ ios-xcode-build :
1010+ runs-on : macos-latest
1011+ if : false
1012+
1013+ steps :
1014+ - name : Checkout code
1015+ uses : actions/checkout@v4
1016+
1017+ - name : Build
1018+ id : cmake_build
1019+ run : |
1020+ sysctl -a
1021+ cmake -B build -G Xcode \
1022+ -DGGML_METAL_USE_BF16=ON \
1023+ -DGGML_METAL_EMBED_LIBRARY=ON \
1024+ -DLLAMA_CURL=OFF \
1025+ -DLLAMA_BUILD_EXAMPLES=OFF \
1026+ -DLLAMA_BUILD_TOOLS=OFF \
1027+ -DLLAMA_BUILD_TESTS=OFF \
1028+ -DLLAMA_BUILD_SERVER=OFF \
1029+ -DCMAKE_SYSTEM_NAME=iOS \
1030+ -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 \
1031+ -DCMAKE_XCODE_ATTRIBUTE_DEVELOPMENT_TEAM=ggml
1032+ cmake --build build --config Release -j $(sysctl -n hw.logicalcpu) -- CODE_SIGNING_ALLOWED=NO
1033+
1034+ - name : xcodebuild for swift package
1035+ id : xcodebuild
1036+ run : |
1037+ ./build-xcframework.sh
1038+
1039+ - name : Build Xcode project
1040+ run : xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' FRAMEWORK_FOLDER_PATH=./build-ios build
1041+
1042+ android-build :
1043+ runs-on : ubuntu-latest
1044+ if : false
1045+
1046+ steps :
1047+ - name : Clone
1048+ uses : actions/checkout@v4
1049+
1050+ - name : ccache
1051+ uses :
hendrikmuhs/[email protected] 1052+ with :
1053+ key : android-build
1054+ evict-old-files : 1d
1055+
1056+ - name : Set up JDK
1057+ uses : actions/setup-java@v3
1058+ with :
1059+ java-version : 17
1060+ distribution : zulu
1061+
1062+ - name : Setup Android SDK
1063+ uses : android-actions/setup-android@v3
1064+ with :
1065+ log-accepted-android-sdk-licenses : false
1066+
1067+ - name : Build
1068+ run : |
1069+ cd examples/llama.android
1070+ ./gradlew build --no-daemon
1071+
1072+ openEuler-latest-cmake-cann :
1073+ if : ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'Ascend NPU') }}
1074+ defaults :
1075+ run :
1076+ shell : bash -el {0}
1077+ strategy :
1078+ matrix :
1079+ arch : [x86, aarch64]
1080+ cann :
1081+ - ' 8.1.RC1.alpha001-910b-openeuler22.03-py3.10'
1082+ device :
1083+ - ' ascend910b3'
1084+ build :
1085+ - ' Release'
1086+ runs-on : ${{ matrix.arch == 'aarch64' && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
1087+ container : ascendai/cann:${{ matrix.cann }}
1088+ steps :
1089+ - name : Checkout
1090+ uses : actions/checkout@v4
1091+
1092+ - name : Dependencies
1093+ run : |
1094+ yum update -y
1095+ yum install -y git gcc gcc-c++ make cmake libcurl-devel
1096+
1097+ - name : Build
1098+ run : |
1099+ export LD_LIBRARY_PATH=${ASCEND_TOOLKIT_HOME}/lib64:${ASCEND_TOOLKIT_HOME}/$(uname -m)-linux/devlib/:${LD_LIBRARY_PATH}
1100+
1101+ cmake -S . -B build \
1102+ -DCMAKE_BUILD_TYPE=${{ matrix.build }} \
1103+ -DGGML_CANN=on \
1104+ -DSOC_TYPE=${{ matrix.device }}
1105+ cmake --build build -j $(nproc)
0 commit comments