diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8d419305fefb..454fda191f24 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -34,7 +34,7 @@ jobs: - x86_64-manylinux2014 - x86_64-manylinux2014-cuda117 - x86_64-manylinux2014-cuda122 - - x86_64-manylinux2014-vulkan + - x86_64-manylinux_2_28-vulkan - x86_64-windows-msvc - x86_64-windows-msvc-vulkan # - x86_64-windows-msvc-cuda117 @@ -59,10 +59,10 @@ jobs: binary: x86_64-manylinux2014-cuda122 container: sameli/manylinux2014_x86_64_cuda_12.2 build_args: --features binary,cuda - - os: buildjet-2vcpu-ubuntu-2204 + - os: buildjet-4vcpu-ubuntu-2204 target: x86_64-unknown-linux-gnu - binary: x86_64-manylinux2014-vulkan - container: quay.io/pypa/manylinux2014_x86_64 + binary: x86_64-manylinux_2_28-vulkan + container: quay.io/pypa/manylinux_2_28_x86_64 build_args: --features binary,vulkan vulkan_sdk: '1.3.239.0' - os: windows-latest diff --git a/crates/llama-cpp-server/build.rs b/crates/llama-cpp-server/build.rs index 35a4edbb838a..4f43224dfdc0 100644 --- a/crates/llama-cpp-server/build.rs +++ b/crates/llama-cpp-server/build.rs @@ -57,7 +57,7 @@ fn main() { config.define("AMDGPU_TARGETS", amd_gpu_targets.join(";")); } if cfg!(feature = "vulkan") { - config.define("LLAMA_VULKAN", "ON"); + config.define("GGML_VULKAN", "ON"); } let out = config.build();