6565 --target sglang-sagemaker \
6666 -f docker/sglang/Dockerfile .
6767
68- - name : Docker Push and save image URI artifact
68+ - name : Docker push and save image URI artifact
6969 run : |
7070 docker push $IMAGE_URI
7171 docker rmi $IMAGE_URI
7777 name : sglang-sagemaker-image-uri
7878 path : image_uri.txt
7979
80- sglang-regression -test :
80+ sglang-local-benchmark -test :
8181 needs : [build-sglang-image]
8282 if : needs.build-sglang-image.result == 'success'
8383 runs-on :
@@ -87,36 +87,28 @@ jobs:
8787 - name : Checkout DLC source
8888 uses : actions/checkout@v5
8989
90- - name : Container Pull
90+ - name : Container pull
9191 uses : ./.github/actions/container-pull
9292 with :
9393 aws_region : ${{ secrets.AWS_REGION }}
9494 aws_account_id : ${{ secrets.AWS_ACCOUNT_ID }}
9595 artifact_name : sglang-sagemaker-image-uri
9696
97- - name : Checkout SGLang Tests
98- uses : actions/checkout@v5
99- with :
100- repository : sgl-project/sglang
101- ref : v0.5.5
102- path : sglang_source
103-
104- - name : Setup for SGLang Datasets
97+ - name : Setup for SGLang datasets
10598 run : |
106- mkdir -p ${HOME} /dataset
107- if [ ! -f ${HOME} /dataset/ShareGPT_V3_unfiltered_cleaned_split.json ]; then
99+ mkdir -p /tmp/sglang /dataset
100+ if [ ! -f /tmp/sglang /dataset/ShareGPT_V3_unfiltered_cleaned_split.json ]; then
108101 echo "Downloading ShareGPT dataset..."
109- wget -P ${HOME} /dataset https://huggingface.co/datasets/anon8231489123/ShareGPT_Vicuna_unfiltered/resolve/main/ShareGPT_V3_unfiltered_cleaned_split.json
102+ wget -P /tmp/sglang /dataset https://huggingface.co/datasets/anon8231489123/ShareGPT_Vicuna_unfiltered/resolve/main/ShareGPT_V3_unfiltered_cleaned_split.json
110103 else
111104 echo "ShareGPT dataset already exists. Skipping download."
112105 fi
113106
114107 - name : Start container
115108 run : |
116- # CONTAINER_ID=$(docker run -d --rm --gpus=all --entrypoint /bin/bash \
117109 CONTAINER_ID=$(docker run -d --rm --gpus=all \
118110 -v ${HOME}/.cache/huggingface:/root/.cache/huggingface \
119- -v ${HOME} /dataset:/dataset \
111+ -v /tmp/sglang /dataset:/dataset \
120112 -v ./sglang_source:/workdir --workdir /workdir \
121113 -p 30000:30000 \
122114 -e SM_SGLANG_MODEL_PATH=Qwen/Qwen3-0.6B \
@@ -130,14 +122,14 @@ jobs:
130122 sleep 300s
131123 docker logs --tail 200 ${CONTAINER_ID}
132124
133- - name : Setup for SGLang Test
125+ - name : Setup for SGLang tests
134126 run : |
135127 docker exec ${CONTAINER_ID} sh -c '
136128 set -eux
137129 # bash scripts/ci/ci_install_dependency.sh
138130 '
139131
140- - name : Run SGLang Tests
132+ - name : Run SGLang tests
141133 run : |
142134 docker exec ${CONTAINER_ID} python3 -m sglang.bench_serving \
143135 --backend sglang \
@@ -147,9 +139,9 @@ jobs:
147139 --dataset-name sharegpt \
148140 --dataset-path /dataset/ShareGPT_V3_unfiltered_cleaned_split.json
149141
150- - name : Cleanup for SGLang Datasets
142+ - name : Cleanup SGLang datasets
151143 run : |
152- rm -rf ${HOME} /dataset
144+ rm -rf /tmp/sglang /dataset
153145
154146 - name : Cleanup container and images
155147 if : always()
0 commit comments