Skip to content

Commit fadf714

Browse files
committed
update test
Signed-off-by: Junpu Fan <junpu@amazon.com>
1 parent 872029d commit fadf714

File tree

1 file changed

+36
-4
lines changed

1 file changed

+36
-4
lines changed

.github/workflows/pr-example.yml

Lines changed: 36 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -85,9 +85,41 @@ jobs:
8585
echo "Resolved image URI: $IMAGE_URI"
8686
- name: Test image
8787
run: |
88+
# Download ShareGPT dataset if it doesn't exist
89+
mkdir -p ${HOME}/dataset
90+
if [ ! -f ${HOME}/dataset/ShareGPT_V3_unfiltered_cleaned_split.json ]; then
91+
echo "Downloading ShareGPT dataset..."
92+
wget -P ${HOME}/dataset https://huggingface.co/datasets/anon8231489123/ShareGPT_Vicuna_unfiltered/resolve/main/ShareGPT_V3_unfiltered_cleaned_split.json
93+
else
94+
echo "ShareGPT dataset already exists. Skipping download."
95+
fi
96+
8897
aws ecr get-login-password --region ${{ secrets.AWS_REGION }} | docker login --username AWS --password-stdin ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.${{ secrets.AWS_REGION }}.amazonaws.com
8998
docker pull "$IMAGE_URI"
90-
docker run --rm --gpus=all \
91-
--entrypoint /bin/bash \
92-
"$IMAGE_URI" \
93-
-c "python -c 'import vllm; print(vllm.__version__)'"
99+
CONTAINER_NAME=vllm-rayserve
100+
docker stop ${CONTAINER_NAME} || true
101+
docker rm -f ${CONTAINER_NAME} || true
102+
docker run --name ${CONTAINER_NAME} \
103+
-d --gpus=all --entrypoint /bin/bash \
104+
-v ${HOME}/.cache/huggingface:/root/.cache/huggingface \
105+
-v ${HOME}/.cache/vllm:/root/.cache/vllm \
106+
-v ${HOME}/dataset:/dataset \
107+
-e "HUGGING_FACE_HUB_TOKEN=${{ secrets.HUGGING_FACE_HUB_TOKEN }} \
108+
${IMAGE_URI} \
109+
-c "vllm serve Qwen/Qwen3-0.6B --reasoning-parser qwen3"
110+
sleep 60
111+
docker logs ${CONTAINER_NAME}
112+
113+
# run serving benchmark
114+
echo "start running serving benchmark workflow..."
115+
docker exec ${CONTAINER_NAME} vllm bench serve \
116+
--backend vllm \
117+
--model Qwen/Qwen3-0.6B \
118+
--dataset-name sharegpt \
119+
--dataset-path /dataset/ShareGPT_V3_unfiltered_cleaned_split.json \
120+
--num-prompts 1000
121+
122+
# cleanup container
123+
docker stop ${CONTAINER_NAME}
124+
docker rm -f ${CONTAINER_NAME}
125+

0 commit comments

Comments
 (0)