File tree Expand file tree Collapse file tree 2 files changed +49
-1
lines changed Expand file tree Collapse file tree 2 files changed +49
-1
lines changed Original file line number Diff line number Diff line change @@ -26,4 +26,43 @@ GG_BUILD_CUDA=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
26
26
# with SYCL support
27
27
source /opt/intel/oneapi/setvars.sh
28
28
GG_BUILD_SYCL=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
29
+
30
+ # with MUSA support
31
+ GG_BUILD_MUSA=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
32
+ ```
33
+
34
+ ## Running MUSA CI in a Docker Container
35
+
36
+ Assuming ` $PWD ` is the root of the ` llama.cpp ` repository, follow these steps to set up and run MUSA CI in a Docker container:
37
+
38
+ ### 1. Create a local directory to store cached models, configuration files and venv:
39
+
40
+ ``` bash
41
+ mkdir -p $HOME /llama.cpp/ci-cache
42
+ ```
43
+
44
+ ### 2. Create a local directory to store CI run results:
45
+
46
+ ``` bash
47
+ mkdir -p $HOME /llama.cpp/ci-results
48
+ ```
49
+
50
+ ### 3. Start a Docker container and run the CI:
51
+
52
+ ``` bash
53
+ docker run --privileged -it \
54
+ -v $HOME /llama.cpp/ci-cache:/ci-cache \
55
+ -v $HOME /llama.cpp/ci-results:/ci-results \
56
+ -v $PWD :/ws -w /ws \
57
+ mthreads/musa:rc3.1.1-devel-ubuntu22.04
29
58
```
59
+
60
+ Inside the container, execute the following commands:
61
+
62
+ ``` bash
63
+ apt update -y && apt install -y cmake git python3.10-venv wget
64
+ git config --global --add safe.directory /ws
65
+ GG_BUILD_MUSA=1 bash ./ci/run.sh /ci-results /ci-cache
66
+ ```
67
+
68
+ This setup ensures that the CI runs within an isolated Docker environment while maintaining cached files and results across runs.
Original file line number Diff line number Diff line change 16
16
# # with VULKAN support
17
17
# GG_BUILD_VULKAN=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
18
18
#
19
+ # # with MUSA support
20
+ # GG_BUILD_MUSA=1 bash ./ci/run.sh ./tmp/results ./tmp/mnt
21
+ #
19
22
20
23
if [ -z " $2 " ]; then
21
24
echo " usage: $0 <output-dir> <mnt-dir>"
62
65
if [ ! -z ${GG_BUILD_VULKAN} ]; then
63
66
CMAKE_EXTRA=" ${CMAKE_EXTRA} -DGGML_VULKAN=1"
64
67
fi
68
+
69
+ if [ ! -z ${GG_BUILD_MUSA} ]; then
70
+ # Use qy1 by default (MTT S80)
71
+ MUSA_ARCH=${MUSA_ARCH:- 21}
72
+ CMAKE_EXTRA=" -DGGML_MUSA=ON -DMUSA_ARCHITECTURES=${MUSA_ARCH} "
73
+ fi
65
74
# # helpers
66
75
67
76
# download a file if it does not exist or if it is outdated
@@ -811,7 +820,7 @@ export LLAMA_LOG_PREFIX=1
811
820
export LLAMA_LOG_TIMESTAMPS=1
812
821
813
822
if [ -z ${GG_BUILD_LOW_PERF} ]; then
814
- # Create symlink: ./llama.cpp/models-mnt -> $MNT/models/models-mnt
823
+ # Create symlink: ./llama.cpp/models-mnt -> $MNT/models
815
824
rm -rf ${SRC} /models-mnt
816
825
mnt_models=${MNT} /models
817
826
mkdir -p ${mnt_models}
You can’t perform that action at this time.
0 commit comments