Skip to content

🐛 Fixes #472 Remove stateless kai resources when set to false #88

🐛 Fixes #472 Remove stateless kai resources when set to false

🐛 Fixes #472 Remove stateless kai resources when set to false #88

Workflow file for this run

name: LLM Proxy Integration Test
on:
pull_request:
branches: [ main ]
push:
branches: [ main ]
jobs:
llm-proxy-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: '1.22'
- name: Start minikube
uses: ./.github/actions/start-minikube
- name: Build operator images
env:
IMG: ttl.sh/konveyor-tackle-operator-${{ github.run_id }}:2h
BUNDLE_IMG: ttl.sh/konveyor-tackle-operator-bundle-${{ github.run_id }}:2h
run: |
make docker-build docker-push
make bundle bundle-build bundle-push
- name: Setup test environment
run: |
# Create namespace
kubectl create namespace konveyor-tackle || true
# Create API key secret for LLM proxy
kubectl create secret generic kai-api-keys \
--from-literal=OPENAI_API_KEY=dummy-key-for-llemulator \
-n konveyor-tackle \
--dry-run=client -o yaml | kubectl apply -f -
# Deploy llemulator BEFORE Konveyor so llm-proxy can connect to it
./test/e2e/llm-proxy/setup-llemulator.sh
- name: Install Konveyor
uses: ./.github/actions/install-konveyor
with:
bundle_image: ttl.sh/konveyor-tackle-operator-bundle-${{ github.run_id }}:2h
namespace: konveyor-tackle
tackle_cr: |
kind: Tackle
apiVersion: tackle.konveyor.io/v1alpha1
metadata:
name: tackle
spec:
feature_auth_required: true
kai_solution_server_enabled: false
kai_llm_proxy_enabled: true
kai_llm_model: gpt-4o
kai_llm_provider: openai
kai_llm_baseurl: http://llemulator.konveyor-tackle.svc.cluster.local/v1
- name: Run LLM proxy integration tests
run: ./test/e2e/llm-proxy/run-test.sh
- name: Debug on failure
if: failure()
run: |
# Create debug directory
mkdir -p /tmp/llm-proxy-debug
echo "=== Pod Status ==="
kubectl get pods -n konveyor-tackle | tee /tmp/llm-proxy-debug/pod-status.txt
echo ""
echo "=== Recent Events ==="
kubectl get events -n konveyor-tackle --sort-by='.lastTimestamp' | tail -20 | tee /tmp/llm-proxy-debug/events.txt
echo ""
echo "=== LLM Proxy Logs ==="
kubectl logs -n konveyor-tackle deployment/llm-proxy --tail=100 | tee /tmp/llm-proxy-debug/llm-proxy.log || true
echo ""
echo "=== Llemulator Logs ==="
kubectl logs -n konveyor-tackle deployment/llemulator --tail=100 | tee /tmp/llm-proxy-debug/llemulator.log || true
echo ""
echo "=== Tackle CR ==="
kubectl get tackles.tackle.konveyor.io tackle -n konveyor-tackle -o yaml | tee /tmp/llm-proxy-debug/tackle-cr.yaml || true
echo ""
echo "=== All Deployments ==="
kubectl get deployments -n konveyor-tackle -o yaml | tee /tmp/llm-proxy-debug/deployments.yaml || true
- name: Upload debug artifacts on failure
if: failure()
uses: actions/upload-artifact@v4
with:
name: llm-proxy-debug-output
path: /tmp/llm-proxy-debug