Skip to content

Commit bf488d4

Browse files
committed
chore(ci): fix download llama from upstream and drop cuda11.7 release
1 parent ffe6df5 commit bf488d4

File tree

2 files changed

+13
-12
lines changed

2 files changed

+13
-12
lines changed

.github/workflows/release.yml

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ jobs:
3333
- aarch64-apple-darwin
3434
- x86_64-manylinux_2_28
3535
- x86_64-manylinux_2_28-cuda123
36-
- x86_64-windows-msvc
3736
include:
3837
- os: macos-latest
3938
target: aarch64-apple-darwin
@@ -49,11 +48,6 @@ jobs:
4948
binary: x86_64-manylinux_2_28-cuda123
5049
container: sameli/manylinux_2_28_x86_64_cuda_12.3@sha256:e12416bf249ab312f9dcfdebd7939b968dd6f1b6f810abbede818df875e86a7c
5150
build_args: --features binary,cuda
52-
- os: windows-2022
53-
target: x86_64-pc-windows-msvc
54-
binary: x86_64-windows-msvc
55-
build_args: --features binary
56-
ext: .exe
5751

5852
env:
5953
SCCACHE_GHA_ENABLED: true
@@ -235,13 +229,13 @@ jobs:
235229
- name: Display structure of downloaded files
236230
run: ls -R
237231

238-
- name: Package CUDA 11.7 for Windows
232+
- name: Package CPU for Windows
239233
run: >
240-
LLAMA_CPP_PLATFORM=win-cuda-cu11.7-x64 OUTPUT_NAME=tabby_x86_64-windows-msvc-cuda117 ./ci/package-from-upstream.sh
234+
LLAMA_CPP_PLATFORM=win-cpu-x64 OUTPUT_NAME=tabby_x86_64-windows-msvc ./ci/package-from-upstream.sh
241235
242236
- name: Package CUDA 12.4 for Windows
243237
run: >
244-
LLAMA_CPP_PLATFORM=win-cuda-cu12.4-x64 OUTPUT_NAME=tabby_x86_64-windows-msvc-cuda124 ./ci/package-from-upstream.sh
238+
LLAMA_CPP_PLATFORM=win-cuda-12.4-x64 OUTPUT_NAME=tabby_x86_64-windows-msvc-cuda124 ./ci/package-from-upstream.sh
245239
246240
- name: Package Vulkan for Windows
247241
run: >

ci/package-from-upstream.sh

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,25 @@
11
#!/bin/bash
22

3+
set -e
4+
35
# get current bash file directory
46
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
57
LLAMA_CPP_PATH="${PROJECT_ROOT}/crates/llama-cpp-server/llama.cpp"
68

79
# Input variables
810
LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION:-$(cd ${LLAMA_CPP_PATH} && git fetch --tags origin >/dev/null && git describe --tags --abbrev=0)}
911
echo "LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION}"
10-
LLAMA_CPP_PLATFORM=${LLAMA_CPP_PLATFORM:-win-cuda-cu11.7-x64}
11-
OUTPUT_NAME=${OUTPUT_NAME:-tabby_x86_64-windows-msvc-cuda117}
12+
LLAMA_CPP_PLATFORM=${LLAMA_CPP_PLATFORM:-win-cuda-cu12.4-x64}
1213

13-
NAME=llama-${LLAMA_CPP_VERSION}-bin-${LLAMA_CPP_PLATFORM}
14+
if [[ $LLAMA_CPP_PLATFORM == *cuda* ]]; then
15+
NAME=cudart-llama-bin-${LLAMA_CPP_PLATFORM}
16+
else
17+
NAME=llama-${LLAMA_CPP_VERSION}-bin-${LLAMA_CPP_PLATFORM}
18+
fi
1419
ZIP_FILE=${NAME}.zip
1520

21+
OUTPUT_NAME=${OUTPUT_NAME:-tabby_x86_64-windows-msvc-cuda124}
22+
1623
if [[ ${LLAMA_CPP_PLATFORM} == win* ]]; then
1724
TABBY_BINARY=${TABBY_BINARY:-tabby_x86_64-windows-msvc.exe}
1825
TABBY_EXTENSION=.exe

0 commit comments

Comments
 (0)