Skip to content

Commit ff6bba4

Browse files
committed
try smart cleanup
Signed-off-by: Junpu Fan <junpu@amazon.com>
1 parent b75b924 commit ff6bba4

File tree

2 files changed

+22
-25
lines changed

2 files changed

+22
-25
lines changed
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
#!/usr/bin/env bash
2+
set -euo pipefail
3+
4+
# Delete images older than 1 day (24h)
5+
cutoff=$(date -d '1 day ago' +%s)
6+
7+
docker images --format '{{.ID}} {{.Repository}}:{{.Tag}} {{.CreatedAt}}' \
8+
| while read -r id name created_at _; do
9+
created_ts=$(date -d "$created_at" +%s 2>/dev/null || echo 0)
10+
if (( created_ts < cutoff )); then
11+
echo "Deleting old image: $name ($id, created $created_at)"
12+
docker rmi -f "$id" || true
13+
fi
14+
done

.github/workflows/pr-vllm-rayserve.yml

Lines changed: 8 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,6 @@ jobs:
106106
CONTAINER_ID=$(docker run -d -it --rm --gpus=all --entrypoint /bin/bash \
107107
-v ${HOME}/.cache/huggingface:/root/.cache/huggingface \
108108
-v ${HOME}/.cache/vllm:/root/.cache/vllm \
109-
-v ${HOME}/dataset:/root/dataset \
110109
-v ./vllm_tests:/workdir --workdir /workdir \
111110
${IMAGE_URI})
112111
echo "CONTAINER_ID=$CONTAINER_ID" >> $GITHUB_ENV
@@ -123,34 +122,18 @@ jobs:
123122
run: |
124123
docker exec ${CONTAINER_ID} sh -c '
125124
set -eux
126-
nvidia-smi
125+
nvidia-smi
127126
pytest -s -v tests/test_logger.py
127+
# Entrypoints Integration Test (LLM) # 30min
128+
# export VLLM_WORKER_MULTIPROC_METHOD=spawn
129+
# pytest -v -s entrypoints/llm --ignore=entrypoints/llm/test_generate.py --ignore=entrypoints/llm/test_collective_rpc.py
130+
# pytest -v -s entrypoints/llm/test_generate.py # it needs a clean process
131+
# pytest -v -s entrypoints/offline_mode # Needs to avoid interference with other tests
128132
'
129-
130-
- name: Run qwen3 benchmark
131-
run: |
132-
# Download ShareGPT dataset if it doesn't exist
133-
mkdir -p ${HOME}/dataset
134-
if [ ! -f ${HOME}/dataset/ShareGPT_V3_unfiltered_cleaned_split.json ]; then
135-
echo "Downloading ShareGPT dataset..."
136-
wget -q -P ${HOME}/dataset https://huggingface.co/datasets/anon8231489123/ShareGPT_Vicuna_unfiltered/resolve/main/ShareGPT_V3_unfiltered_cleaned_split.json else
137-
echo "ShareGPT dataset already exists. Skipping download."
138-
fi
139-
140-
sleep 60
141-
142-
# run serving benchmark
143-
echo "start running serving benchmark workflow..."
144-
docker exec ${CONTAINER_ID} vllm bench serve \
145-
--backend vllm \
146-
--model Qwen/Qwen3-0.6B \
147-
--dataset-name sharegpt \
148-
--dataset-path /dataset/ShareGPT_V3_unfiltered_cleaned_split.json \
149-
--num-prompts 1000
150-
133+
151134
- name: Cleanup container and image
152135
if: always()
153136
run: |
154137
docker stop ${CONTAINER_ID} || true
155138
docker rm -f ${CONTAINER_ID} || true
156-
docker rmi ${IMAGE_URI} || true
139+
- run: .github/scripts/cleanup_old_image.sh

0 commit comments

Comments
 (0)