Skip to content

(ci) Run e2e tests as part of PR/CI checks #1

(ci) Run e2e tests as part of PR/CI checks

(ci) Run e2e tests as part of PR/CI checks #1

Workflow file for this run

name: E2E Tests
on:
push:
branches: ["master", "main"]
pull_request:
branches: ["master", "main"]
workflow_dispatch:
jobs:
e2e-tests:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version-file: 'go.mod'
- name: Create Kafka KRaft configuration
run: |
mkdir -p kafka-config
cat > kafka-config/server.properties << 'EOF'
# KRaft mode configuration
process.roles=broker,controller
node.id=1
controller.quorum.voters=1@localhost:9093
# Listeners
listeners=PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093
advertised.listeners=PLAINTEXT://localhost:9092
listener.security.protocol.map=PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT
controller.listener.names=CONTROLLER
inter.broker.listener.name=PLAINTEXT
# Log directories
log.dirs=/tmp/kraft-combined-logs
# Cluster ID will be generated
# cluster.id will be set dynamically
# Topic defaults
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1
default.replication.factor=1
min.insync.replicas=1
# Performance tuning for CI
num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600
EOF
- name: Start Kafka 4.0 with KRaft
run: |
# Download and extract Kafka 4.0
wget -q https://archive.apache.org/dist/kafka/4.0.0/kafka_2.13-4.0.0.tgz
tar -xzf kafka_2.13-4.0.0.tgz
# Generate cluster ID
CLUSTER_ID=$(kafka_2.13-4.0.0/bin/kafka-storage.sh random-uuid)
echo "Generated Cluster ID: $CLUSTER_ID"
# Format storage
kafka_2.13-4.0.0/bin/kafka-storage.sh format \
-t $CLUSTER_ID \
-c kafka-config/server.properties
# Start Kafka in background
kafka_2.13-4.0.0/bin/kafka-server-start.sh kafka-config/server.properties > kafka.log 2>&1 &
KAFKA_PID=$!
echo "KAFKA_PID=$KAFKA_PID" >> $GITHUB_ENV
# Wait for Kafka to be ready
echo "Waiting for Kafka to start..."
for i in {1..30}; do
if kafka_2.13-4.0.0/bin/kafka-broker-api-versions.sh --bootstrap-server localhost:9092 > /dev/null 2>&1; then
echo "Kafka is ready!"
break
fi
if [ $i -eq 30 ]; then
echo "Kafka failed to start within 30 seconds"
cat kafka.log
exit 1
fi
echo "Attempt $i/30: Kafka not ready yet, waiting..."
sleep 1
done
# Verify Kafka is running
kafka_2.13-4.0.0/bin/kafka-broker-api-versions.sh --bootstrap-server localhost:9092
- name: Create KMinion configuration
run: |
cat > kminion-config.yaml << 'EOF'
kafka:
brokers:
- localhost:9092
clientId: kminion-e2e-test
minion:
endToEnd:
enabled: true
probeInterval: 500ms
topicManagement:
enabled: true
name: kminion-end-to-end
replicationFactor: 1
partitionsPerBroker: 1
reconciliationInterval: 10s
producer:
ackSla: 5s
requiredAcks: all
consumer:
groupIdPrefix: kminion-end-to-end
deleteStaleConsumerGroups: false
roundtripSla: 10s
commitSla: 5s
exporter:
namespace: kminion
port: 8080
logger:
level: info
EOF
- name: Build KMinion
run: |
go build -o kminion .
- name: Start KMinion with E2E tests
run: |
export CONFIG_FILEPATH=kminion-config.yaml
./kminion > kminion.log 2>&1 &
KMINION_PID=$!
echo "KMINION_PID=$KMINION_PID" >> $GITHUB_ENV
# Wait for KMinion to start
echo "Waiting for KMinion to start..."
for i in {1..30}; do
if curl -s http://localhost:8080/metrics > /dev/null 2>&1; then
echo "KMinion is ready!"
break
fi
if [ $i -eq 30 ]; then
echo "KMinion failed to start within 30 seconds"
cat kminion.log
exit 1
fi
echo "Attempt $i/30: KMinion not ready yet, waiting..."
sleep 1
done
- name: Wait for E2E tests to run
run: |
echo "Waiting for E2E tests to produce metrics..."
sleep 30
- name: Validate E2E metrics
run: |
echo "Fetching metrics from KMinion..."
METRICS=$(curl -s http://localhost:8080/metrics)
echo "=== KMinion Metrics Output ==="
echo "$METRICS" | grep -E "kminion_end_to_end_" || true
echo "=============================="
# Check for essential E2E metrics
REQUIRED_METRICS=(
"kminion_end_to_end_messages_produced_total"
"kminion_end_to_end_messages_received_total"
"kminion_end_to_end_produce_latency_seconds"
"kminion_end_to_end_roundtrip_latency_seconds"
"kminion_end_to_end_offset_commit_latency_seconds"
)
MISSING_METRICS=()
for metric in "${REQUIRED_METRICS[@]}"; do
if ! echo "$METRICS" | grep -q "^${metric}"; then
MISSING_METRICS+=("$metric")
fi
done
if [ ${#MISSING_METRICS[@]} -ne 0 ]; then
echo "ERROR: Missing required E2E metrics:"
printf '%s\n' "${MISSING_METRICS[@]}"
echo ""
echo "=== Full KMinion logs ==="
cat kminion.log
exit 1
fi
# Verify messages were produced and received
PRODUCED=$(echo "$METRICS" | grep "^kminion_end_to_end_messages_produced_total" | awk '{print $2}')
RECEIVED=$(echo "$METRICS" | grep "^kminion_end_to_end_messages_received_total" | awk '{print $2}')
echo "Messages produced: $PRODUCED"
echo "Messages received: $RECEIVED"
if [ -z "$PRODUCED" ] || [ "$PRODUCED" = "0" ]; then
echo "ERROR: No messages were produced"
cat kminion.log
exit 1
fi
if [ -z "$RECEIVED" ] || [ "$RECEIVED" = "0" ]; then
echo "ERROR: No messages were received"
cat kminion.log
exit 1
fi
echo "✅ E2E tests passed successfully!"
echo " - Messages produced: $PRODUCED"
echo " - Messages received: $RECEIVED"
- name: Show logs on failure
if: failure()
run: |
echo "=== Kafka logs ==="
cat kafka.log || echo "No Kafka logs found"
echo ""
echo "=== KMinion logs ==="
cat kminion.log || echo "No KMinion logs found"
- name: Cleanup
if: always()
run: |
# Stop KMinion
if [ ! -z "$KMINION_PID" ]; then
kill $KMINION_PID || true
fi
# Stop Kafka
if [ ! -z "$KAFKA_PID" ]; then
kill $KAFKA_PID || true
fi
# Wait a bit for graceful shutdown
sleep 2