Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions .github/workflows/redhat-distro-container.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ env:
jobs:
build-test-push:
runs-on: ubuntu-latest
env:
INFERENCE_MODEL: meta-llama/Llama-3.2-1B-Instruct
VLLM_URL: http://localhost:8000/v1
strategy:
matrix:
platform: [linux/amd64] # TODO: enable other arch once all pip packages are available.
Expand All @@ -32,6 +35,12 @@ jobs:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0

- name: Install uv
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
with:
python-version: 3.12
version: 0.7.6

- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0

Expand All @@ -55,14 +64,22 @@ jobs:
id: vllm
uses: ./.github/actions/setup-vllm

- name: Smoke test image
- name: Start and smoke test LLS distro image
id: smoke-test
shell: bash
env:
INFERENCE_MODEL: meta-llama/Llama-3.2-1B-Instruct
VLLM_URL: http://localhost:8000/v1
run: ./tests/smoke.sh

- name: Integration tests
id: integration-tests
shell: bash
run: ./tests/run_integration_tests.sh

- name: cleanup
if: always()
shell: bash
run: |
docker rm -f vllm llama-stack

- name: Log in to Quay.io
id: login
if: github.event_name == 'push'
Expand Down
75 changes: 75 additions & 0 deletions tests/run_integration_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
#!/usr/bin/env bash

set -euo pipefail

# Configuration
LLAMA_STACK_REPO="https://github.com/meta-llama/llama-stack.git"
WORK_DIR="/tmp/llama-stack-integration-tests"
INFERENCE_MODEL="${INFERENCE_MODEL:-meta-llama/Llama-3.2-1B-Instruct}"

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"

# Get version dynamically from Containerfile.in (look in parent directory)
CONTAINERFILE_IN="$SCRIPT_DIR/../distribution/Containerfile.in"
LLAMA_STACK_VERSION=$(grep -o 'llama-stack==[0-9]\+\.[0-9]\+\.[0-9]\+' "$CONTAINERFILE_IN" | cut -d'=' -f3)
if [ -z "$LLAMA_STACK_VERSION" ]; then
echo "Error: Could not extract llama-stack version from Containerfile.in"
exit 1
fi

function clone_llama_stack() {
# Clone the repository if it doesn't exist
if [ ! -d "$WORK_DIR" ]; then
git clone "$LLAMA_STACK_REPO" "$WORK_DIR"
fi

# Checkout the specific tag
cd "$WORK_DIR"
# fetch origin incase we didn't clone a fresh repo
git fetch origin
if ! git checkout "v$LLAMA_STACK_VERSION"; then
echo "Error: Could not checkout tag v$LLAMA_STACK_VERSION"
echo "Available tags:"
git tag | grep "^v" | tail -10
exit 1
fi
}

function run_integration_tests() {
echo "Running integration tests..."

cd "$WORK_DIR"

# Test to skip
SKIP_TESTS="test_text_chat_completion_tool_calling_tools_not_in_request or test_inference_store_tool_calls"

# Dynamically determine the path to run.yaml from the original script directory
STACK_CONFIG_PATH="$SCRIPT_DIR/../distribution/run.yaml"
if [ ! -f "$STACK_CONFIG_PATH" ]; then
echo "Error: Could not find stack config at $STACK_CONFIG_PATH"
exit 1
fi

uv run pytest -s -v tests/integration/inference/ \
--stack-config=server:"$STACK_CONFIG_PATH" \
--text-model=vllm-inference/"$INFERENCE_MODEL" \
-k "not ($SKIP_TESTS)"
}

function main() {
echo "Starting llama-stack integration tests"
echo "Configuration:"
echo " LLAMA_STACK_VERSION: $LLAMA_STACK_VERSION"
echo " LLAMA_STACK_REPO: $LLAMA_STACK_REPO"
echo " WORK_DIR: $WORK_DIR"
echo " INFERENCE_MODEL: $INFERENCE_MODEL"

clone_llama_stack
run_integration_tests

echo "Integration tests completed successfully!"
}


main "$@"
exit 0
1 change: 0 additions & 1 deletion tests/smoke.sh
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,5 @@ main() {
echo "===> Smoke test completed successfully!"
}

trap 'docker rm -f -v llama-stack >/dev/null 2>&1 || true' EXIT
main "$@"
exit 0
Loading