@@ -106,7 +106,7 @@ jobs:
106106
107107 - name : Start container
108108 run : |
109- CONTAINER_ID=$(docker run -d -it --rm --gpus=all --entrypoint /bin/bash \
109+ CONTAINER_ID=$(docker run -d -it --rm --gpus=all \
110110 -v ${HOME}/.cache/huggingface:/root/.cache/huggingface \
111111 -v /tmp/sglang/dataset:/dataset \
112112 -v ./sglang_source:/workdir --workdir /workdir \
@@ -119,22 +119,22 @@ jobs:
119119 ${IMAGE_URI})
120120 echo "CONTAINER_ID=$CONTAINER_ID" >> $GITHUB_ENV
121121 echo "Waiting for container startup ..."
122- # sleep 300s
122+ sleep 300s
123123 docker logs ${CONTAINER_ID}
124124
125- # - name: Run SGLang tests
126- # run: |
127- # docker exec ${CONTAINER_ID} python3 -m sglang.bench_serving \
128- # --backend sglang \
129- # --host 127.0.0.1 --port 30000 \
130- # --num-prompts 1000 \
131- # --model Qwen/Qwen3-0.6B \
132- # --dataset-name sharegpt \
133- # --dataset-path /dataset/ShareGPT_V3_unfiltered_cleaned_split.json
134- #
135- # - name: Cleanup SGLang datasets
136- # run: |
137- # rm -rf /tmp/sglang/dataset
125+ - name : Run SGLang tests
126+ run : |
127+ docker exec ${CONTAINER_ID} python3 -m sglang.bench_serving \
128+ --backend sglang \
129+ --host 127.0.0.1 --port 30000 \
130+ --num-prompts 1000 \
131+ --model Qwen/Qwen3-0.6B \
132+ --dataset-name sharegpt \
133+ --dataset-path /dataset/ShareGPT_V3_unfiltered_cleaned_split.json
134+
135+ - name : Cleanup SGLang datasets
136+ run : |
137+ rm -rf /tmp/sglang/dataset
138138
139139 - name : Cleanup container and images
140140 if : always()
0 commit comments