diff --git a/.github/workflows/e2e-tests.yaml b/.github/workflows/e2e-tests.yaml new file mode 100644 index 00000000..13153f00 --- /dev/null +++ b/.github/workflows/e2e-tests.yaml @@ -0,0 +1,48 @@ +name: E2E Tests + +on: + push: + branches: ["main", "master"] + pull_request: + branches: ["main", "master"] + workflow_dispatch: + +jobs: + e2e-tests: + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: Checkout code + uses: actions/checkout@v5 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: 'go.mod' + + - name: Start Kafka 4 with KRaft + run: make e2e-setup + + - name: Build KMinion + run: make build + + - name: Start KMinion with E2E tests + run: make e2e-start + + - name: Run integration tests + run: make e2e-test + + - name: Show logs on failure + if: failure() + run: | + echo "=== Kafka logs ===" + docker logs broker 2>&1 | tail -100 || echo "No Kafka logs found" + echo "" + echo "=== KMinion logs ===" + cat kminion.log || echo "No KMinion logs found" + + - name: Cleanup + if: always() + run: make e2e-cleanup + diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..4d62e53d --- /dev/null +++ b/Makefile @@ -0,0 +1,42 @@ +.PHONY: help build test e2e-setup e2e-start e2e-stop e2e-test e2e-cleanup e2e-full + +help: ## Show this help message + @echo 'Usage: make [target]' + @echo '' + @echo 'Available targets:' + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-20s\033[0m %s\n", $$1, $$2}' + +build: ## Build KMinion binary + go build -o kminion . + +test: ## Run unit tests + go test -v ./... + +e2e-setup: ## Start Kafka cluster for E2E testing + @chmod +x e2e/bin/setup-kafka.sh + ./e2e/bin/setup-kafka.sh start + +e2e-start: ## Start KMinion with E2E configuration + @chmod +x e2e/bin/start-kminion.sh + ./e2e/bin/start-kminion.sh start + +e2e-stop: ## Stop KMinion + @chmod +x e2e/bin/start-kminion.sh + ./e2e/bin/start-kminion.sh stop + +e2e-test: ## Run E2E integration tests (requires Kafka and KMinion to be running) + @chmod +x e2e/bin/integration-test.sh + ./e2e/bin/integration-test.sh + +e2e-cleanup: ## Stop and cleanup Kafka cluster and KMinion + @chmod +x e2e/bin/start-kminion.sh e2e/bin/setup-kafka.sh + ./e2e/bin/start-kminion.sh stop || true + ./e2e/bin/setup-kafka.sh stop || true + +e2e-full: e2e-setup ## Run full E2E test suite (setup, build, start, test, cleanup) + @echo "Starting full E2E test suite..." + @trap '$(MAKE) e2e-cleanup' EXIT; \ + $(MAKE) build && \ + $(MAKE) e2e-start && \ + $(MAKE) e2e-test + diff --git a/e2e/.gitignore b/e2e/.gitignore new file mode 100644 index 00000000..ce03650e --- /dev/null +++ b/e2e/.gitignore @@ -0,0 +1,4 @@ +# E2E test artifacts +*.log +*.pid + diff --git a/e2e/bin/README.md b/e2e/bin/README.md new file mode 100644 index 00000000..c2fa198a --- /dev/null +++ b/e2e/bin/README.md @@ -0,0 +1,277 @@ +# KMinion E2E Integration Tests + +This directory contains end-to-end integration tests for KMinion, including scripts to set up a test Kafka cluster and validate KMinion's metrics. + +## Overview + +The E2E test suite validates: +- **End-to-End Monitoring**: KMinion's ability to produce, consume, and measure message latencies +- **Built-in Metrics**: Core exporter metrics, Kafka cluster info, broker info, and topic metrics + +## Scripts + +All scripts are located in the `e2e/bin/` directory. + +### `setup-kafka.sh` +Manages a Kafka 4.0 cluster with KRaft mode in Docker for testing. + +**Usage:** +```bash +# Start Kafka +./e2e/bin/setup-kafka.sh start + +# Stop Kafka +./e2e/bin/setup-kafka.sh stop + +# Restart Kafka +./e2e/bin/setup-kafka.sh restart + +# Check status +./e2e/bin/setup-kafka.sh status + +# View logs +./e2e/bin/setup-kafka.sh logs +``` + +**Environment Variables:** +- `KAFKA_VERSION`: Kafka version (default: `4.1.0`) +- `KAFKA_PORT`: Kafka port (default: `9092`) +- `CONTAINER_NAME`: Docker container name (default: `broker`) +- `WAIT_TIMEOUT`: Wait timeout in seconds (default: `60`) + +**Example:** +```bash +# Start Kafka on a different port +KAFKA_PORT=9093 ./e2e/bin/setup-kafka.sh start +``` + +### `start-kminion.sh` +Starts KMinion with E2E configuration and waits for it to be ready. + +**Usage:** +```bash +# Start KMinion +./e2e/bin/start-kminion.sh start + +# Stop KMinion +./e2e/bin/start-kminion.sh stop + +# Restart KMinion +./e2e/bin/start-kminion.sh restart + +# Check status +./e2e/bin/start-kminion.sh status + +# View logs +./e2e/bin/start-kminion.sh logs +``` + +**Environment Variables:** +- `CONFIG_FILE`: Path to config file (default: `e2e/bin/test-config.yaml`) +- `KMINION_BIN`: Path to KMinion binary (default: `./kminion`) +- `LOG_FILE`: Path to log file (default: `kminion.log`) +- `METRICS_URL`: Metrics endpoint URL (default: `http://localhost:8080/metrics`) +- `WAIT_TIMEOUT`: Wait timeout in seconds (default: `30`) + +**Example:** +```bash +# Start with custom config +CONFIG_FILE=my-config.yaml ./e2e/bin/start-kminion.sh start +``` + +### `integration-test.sh` +Validates KMinion's E2E and built-in metrics. + +**Usage:** +```bash +# Run all integration tests +./e2e/bin/integration-test.sh +``` + +**Environment Variables:** +- `METRICS_URL`: KMinion metrics endpoint (default: `http://localhost:8080/metrics`) +- `WAIT_TIMEOUT`: Wait timeout in seconds (default: `30`) + +**Example:** +```bash +# Test against a different KMinion instance +METRICS_URL=http://localhost:8081/metrics ./e2e/bin/integration-test.sh +``` + +## Running Tests Locally + +### Prerequisites +- Docker +- `curl` +- `nc` (netcat) +- Go 1.21+ (for building KMinion) +- GNU Make + +### Quick Start with Makefile + +The easiest way to run E2E tests is using the Makefile targets: + +```bash +# Run the complete E2E test suite (setup, build, test, cleanup) +make e2e-full +``` + +This single command will: +1. Start Kafka cluster +2. Build KMinion +3. Start KMinion with E2E config +4. Run integration tests +5. Cleanup everything + +### Step-by-Step with Makefile + +If you prefer to run steps individually: + +```bash +# 1. Start Kafka cluster +make e2e-setup + +# 2. Build KMinion +make build + +# 3. Start KMinion with E2E config +make e2e-start + +# 4. Run integration tests +make e2e-test + +# 5. Cleanup +make e2e-cleanup +``` + +### Manual Test Flow (without Makefile) + +1. **Start Kafka cluster:** + ```bash + ./e2e/bin/setup-kafka.sh start + ``` + +2. **Build KMinion:** + ```bash + go build -o kminion . + ``` + +3. **Start KMinion:** + ```bash + ./e2e/bin/start-kminion.sh start + ``` + +4. **Run integration tests:** + ```bash + ./e2e/bin/integration-test.sh + ``` + +5. **Cleanup:** + ```bash + ./e2e/bin/start-kminion.sh stop + ./e2e/bin/setup-kafka.sh stop + ``` + +### Available Makefile Targets + +```bash +make help # Show all available targets +make build # Build KMinion binary +make test # Run unit tests +make e2e-setup # Start Kafka cluster for E2E testing +make e2e-start # Start KMinion with E2E configuration +make e2e-stop # Stop KMinion +make e2e-test # Run E2E integration tests (requires Kafka and KMinion) +make e2e-cleanup # Stop and cleanup Kafka cluster and KMinion +make e2e-full # Run full E2E test suite (setup, build, start, test, cleanup) +``` + +## CI Integration + +The GitHub Actions workflow (`.github/workflows/e2e-tests.yaml`) uses Makefile targets: + +```yaml +- name: Start Kafka 4 with KRaft + run: make e2e-setup + +- name: Build KMinion + run: make build + +- name: Start KMinion with E2E tests + run: make e2e-start + +- name: Run integration tests + run: make e2e-test + +- name: Cleanup + run: make e2e-cleanup +``` + +## Validated Metrics + +### E2E Metrics +- `kminion_end_to_end_messages_produced_total` - Total messages produced +- `kminion_end_to_end_messages_received_total` - Total messages received +- `kminion_end_to_end_produce_latency_seconds` - Producer ack latency +- `kminion_end_to_end_roundtrip_latency_seconds` - End-to-end roundtrip latency +- `kminion_end_to_end_offset_commit_latency_seconds` - Offset commit latency + +### Built-in Metrics +- `kminion_exporter_up` - Exporter health status +- `kminion_exporter_offset_consumer_records_consumed_total` - Offset consumer records +- `kminion_kafka_cluster_info` - Cluster metadata +- `kminion_kafka_broker_info` - Broker information +- `kminion_kafka_topic_info` - Topic metadata +- `kminion_kafka_topic_partition_count` - Partition counts +- `kminion_kafka_topic_partition_high_water_mark` - Partition offsets +- `kminion_kafka_topic_high_water_mark_sum` - Aggregated topic offsets +- `kminion_kafka_consumer_group_info` - Consumer group information +- `kminion_kafka_broker_log_dir_size_total_bytes` - Broker log directory sizes + +## Troubleshooting + +### Kafka won't start +```bash +# Check logs +./e2e/bin/setup-kafka.sh logs + +# Restart +./e2e/bin/setup-kafka.sh restart +``` + +### KMinion not producing metrics +```bash +# Check if KMinion is running +curl http://localhost:8080/metrics + +# Check KMinion logs +tail -f kminion.log +``` + +### Port conflicts +```bash +# Use different port +KAFKA_PORT=9093 ./e2e/bin/setup-kafka.sh start + +# Update KMinion config to use localhost:9093 +``` + +## Development + +To add new test validations, edit `integration-test.sh` and add new validation functions following the existing pattern: + +```bash +validate_my_new_metric() { + log_info "Validating my new metric..." + local metrics + metrics=$(fetch_metrics "$METRICS_URL") || return 1 + + # Your validation logic here + + log_info "✅ My new metric validation passed" + return 0 +} +``` + +Then call it from the `main()` function. + diff --git a/e2e/bin/integration-test.sh b/e2e/bin/integration-test.sh new file mode 100755 index 00000000..8fe17adb --- /dev/null +++ b/e2e/bin/integration-test.sh @@ -0,0 +1,236 @@ +#!/usr/bin/env bash +set -euo pipefail + +# KMinion E2E Integration Test Script +# This script validates KMinion's end-to-end monitoring and built-in metrics +# Can be run locally or in CI + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +METRICS_URL="${METRICS_URL:-http://localhost:8080/metrics}" +WAIT_TIMEOUT="${WAIT_TIMEOUT:-30}" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +log_info() { + echo -e "${GREEN}[INFO]${NC} $*" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $*" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $*" +} + +# Fetch metrics from KMinion +fetch_metrics() { + local url="$1" + if ! curl -sf "$url" 2>/dev/null; then + log_error "Failed to fetch metrics from $url" + return 1 + fi +} + +# Validate E2E metrics +validate_e2e_metrics() { + log_info "Validating E2E metrics..." + + local metrics + metrics=$(fetch_metrics "$METRICS_URL") || return 1 + + # Required E2E metrics + local required_metrics=( + "kminion_end_to_end_messages_produced_total" + "kminion_end_to_end_messages_received_total" + "kminion_end_to_end_produce_latency_seconds" + "kminion_end_to_end_roundtrip_latency_seconds" + "kminion_end_to_end_offset_commit_latency_seconds" + ) + + local missing_metrics=() + for metric in "${required_metrics[@]}"; do + if ! echo "$metrics" | grep -q "^${metric}"; then + missing_metrics+=("$metric") + fi + done + + if [ ${#missing_metrics[@]} -ne 0 ]; then + log_error "Missing required E2E metrics:" + printf ' - %s\n' "${missing_metrics[@]}" + echo "" + log_info "Available E2E metrics:" + echo "$metrics" | grep -E "kminion_end_to_end_" || echo " (none found)" + return 1 + fi + + # Verify messages were produced and received + local produced received + produced=$(echo "$metrics" | grep "^kminion_end_to_end_messages_produced_total" | awk '{print $2}') + received=$(echo "$metrics" | grep "^kminion_end_to_end_messages_received_total" | awk '{print $2}') + + if [ -z "$produced" ] || [ "$produced" = "0" ]; then + log_error "No messages were produced (expected > 0)" + return 1 + fi + + if [ -z "$received" ] || [ "$received" = "0" ]; then + log_error "No messages were received (expected > 0)" + return 1 + fi + + log_info "✅ E2E metrics validation passed" + log_info " Messages produced: $produced" + log_info " Messages received: $received" + return 0 +} + +# Validate built-in KMinion metrics +validate_builtin_metrics() { + log_info "Validating built-in KMinion metrics..." + + local metrics + metrics=$(fetch_metrics "$METRICS_URL") || return 1 + + # Core exporter metrics + local core_metrics=( + "kminion_exporter_up" + "kminion_exporter_offset_consumer_records_consumed_total" + ) + + # Kafka cluster metrics + local kafka_metrics=( + "kminion_kafka_cluster_info" + "kminion_kafka_broker_info" + ) + + # Topic metrics + local topic_metrics=( + "kminion_kafka_topic_info" + "kminion_kafka_topic_partition_high_water_mark" + "kminion_kafka_topic_high_water_mark_sum" + "kminion_kafka_topic_partition_low_water_mark" + "kminion_kafka_topic_low_water_mark_sum" + ) + + # Consumer group metrics + local consumer_group_metrics=( + "kminion_kafka_consumer_group_info" + "kminion_kafka_consumer_group_members" + "kminion_kafka_consumer_group_topic_lag" + ) + + # Log dir metrics + local logdir_metrics=( + "kminion_kafka_broker_log_dir_size_total_bytes" + "kminion_kafka_topic_log_dir_size_total_bytes" + ) + + local missing_metrics=() + + # Check all metric categories + for metric in "${core_metrics[@]}" "${kafka_metrics[@]}" "${topic_metrics[@]}" "${consumer_group_metrics[@]}" "${logdir_metrics[@]}"; do + if ! echo "$metrics" | grep -q "^${metric}"; then + missing_metrics+=("$metric") + fi + done + + if [ ${#missing_metrics[@]} -ne 0 ]; then + log_error "Missing required built-in metrics:" + printf ' - %s\n' "${missing_metrics[@]}" + echo "" + log_info "Available metrics:" + echo "$metrics" | grep "^kminion_" + return 1 + fi + + # Validate specific metric values + local exporter_up + exporter_up=$(echo "$metrics" | grep "^kminion_exporter_up" | awk '{print $2}') + if [ "$exporter_up" != "1" ]; then + log_error "kminion_exporter_up should be 1, got: $exporter_up" + return 1 + fi + + # Check that cluster info has broker_count label + if ! echo "$metrics" | grep "kminion_kafka_cluster_info" | grep -q "broker_count"; then + log_error "kminion_kafka_cluster_info missing broker_count label" + return 1 + fi + + # Check that we have broker info for at least one broker + local broker_count + broker_count=$(echo "$metrics" | grep "^kminion_kafka_broker_info" | wc -l | tr -d ' ') + if [ "$broker_count" -lt 1 ]; then + log_error "No broker info metrics found" + return 1 + fi + + log_info "✅ Built-in metrics validation passed" + log_info " Exporter up: $exporter_up" + log_info " Brokers detected: $broker_count" + return 0 +} + +# Wait for KMinion to be ready +wait_for_kminion() { + log_info "Waiting for KMinion to be ready (timeout: ${WAIT_TIMEOUT}s)..." + + local attempt=0 + while [ $attempt -lt "$WAIT_TIMEOUT" ]; do + if curl -sf "$METRICS_URL" > /dev/null 2>&1; then + log_info "KMinion is ready!" + return 0 + fi + attempt=$((attempt + 1)) + sleep 1 + done + + log_error "KMinion did not become ready within ${WAIT_TIMEOUT}s" + return 1 +} + +# Main test execution +main() { + log_info "Starting KMinion integration tests" + log_info "Metrics URL: $METRICS_URL" + + # Wait for KMinion to be ready + if ! wait_for_kminion; then + exit 1 + fi + + # Give E2E tests time to produce metrics + log_info "Waiting ${WAIT_TIMEOUT}s for E2E tests to produce metrics..." + sleep "$WAIT_TIMEOUT" + + local failed=0 + + # Run E2E metrics validation + if ! validate_e2e_metrics; then + failed=1 + fi + + # Run built-in metrics validation + if ! validate_builtin_metrics; then + failed=1 + fi + + if [ $failed -eq 0 ]; then + log_info "🎉 All integration tests passed!" + return 0 + else + log_error "❌ Some integration tests failed" + return 1 + fi +} + +# Run main if executed directly +if [ "${BASH_SOURCE[0]}" = "${0}" ]; then + main "$@" +fi + diff --git a/e2e/bin/setup-kafka.sh b/e2e/bin/setup-kafka.sh new file mode 100755 index 00000000..341e78aa --- /dev/null +++ b/e2e/bin/setup-kafka.sh @@ -0,0 +1,204 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Kafka Setup Script for E2E Testing +# Starts a Kafka 4.0 cluster with KRaft mode in Docker + +KAFKA_VERSION="${KAFKA_VERSION:-4.1.0}" +KAFKA_PORT="${KAFKA_PORT:-9092}" +CONTAINER_NAME="${CONTAINER_NAME:-broker}" +WAIT_TIMEOUT="${WAIT_TIMEOUT:-60}" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +log_info() { + echo -e "${GREEN}[INFO]${NC} $*" +} + +log_warn() { + echo -e "${YELLOW}[WARN]${NC} $*" +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $*" +} + +# Check if Kafka container is already running +check_existing_container() { + if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then + log_warn "Container '$CONTAINER_NAME' already exists" + log_info "Removing existing container..." + docker rm -f "$CONTAINER_NAME" > /dev/null 2>&1 || true + fi +} + +# Start Kafka container +start_kafka() { + log_info "Starting Kafka $KAFKA_VERSION with KRaft mode..." + + docker run -d \ + --name "$CONTAINER_NAME" \ + -p "${KAFKA_PORT}:9092" \ + -p 9093:9093 \ + -e KAFKA_NODE_ID=1 \ + -e KAFKA_PROCESS_ROLES=broker,controller \ + -e KAFKA_LISTENERS=PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093 \ + -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:${KAFKA_PORT} \ + -e KAFKA_CONTROLLER_LISTENER_NAMES=CONTROLLER \ + -e KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT \ + -e KAFKA_CONTROLLER_QUORUM_VOTERS=1@localhost:9093 \ + -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 \ + -e KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1 \ + -e KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1 \ + -e KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 \ + -e KAFKA_NUM_PARTITIONS=3 \ + "apache/kafka:${KAFKA_VERSION}" + + log_info "Kafka container started: $CONTAINER_NAME" +} + +# Wait for Kafka to be ready +wait_for_kafka() { + log_info "Waiting for Kafka to be ready (timeout: ${WAIT_TIMEOUT}s)..." + + local attempt=0 + local ready=false + + while [ $attempt -lt "$WAIT_TIMEOUT" ]; do + # Try to get broker API versions directly + if docker exec "$CONTAINER_NAME" /opt/kafka/bin/kafka-broker-api-versions.sh \ + --bootstrap-server "localhost:${KAFKA_PORT}" > /dev/null 2>&1; then + log_info "✅ Kafka is ready and responding to requests!" + ready=true + break + fi + + attempt=$((attempt + 1)) + if [ $((attempt % 10)) -eq 0 ]; then + log_info "Attempt $attempt/$WAIT_TIMEOUT: Kafka not ready yet, waiting..." + fi + sleep 1 + done + + if [ "$ready" = false ]; then + log_error "Kafka failed to start within ${WAIT_TIMEOUT}s" + log_error "Container logs:" + docker logs "$CONTAINER_NAME" 2>&1 | tail -50 + return 1 + fi + + return 0 +} + +# Verify Kafka is running +verify_kafka() { + log_info "Verifying Kafka cluster..." + + if ! docker exec "$CONTAINER_NAME" /opt/kafka/bin/kafka-broker-api-versions.sh \ + --bootstrap-server "localhost:${KAFKA_PORT}"; then + log_error "Failed to verify Kafka cluster" + return 1 + fi + + log_info "Kafka cluster is up and running!" + return 0 +} + +# Stop and remove Kafka container +stop_kafka() { + log_info "Stopping Kafka container..." + + if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then + docker stop "$CONTAINER_NAME" > /dev/null 2>&1 || true + docker rm "$CONTAINER_NAME" > /dev/null 2>&1 || true + log_info "Kafka container stopped and removed" + else + log_warn "Container '$CONTAINER_NAME' not found" + fi +} + +# Show usage +usage() { + cat < "$LOG_FILE" 2>&1 & + local pid=$! + + # Save PID to file for later cleanup + echo "$pid" > kminion.pid + + log_info "KMinion started with PID: $pid" + log_info "Logs: $LOG_FILE" + + echo "$pid" +} + +# Wait for KMinion to be ready +wait_for_kminion() { + local pid=$1 + log_info "Waiting for KMinion to be ready (timeout: ${WAIT_TIMEOUT}s)..." + + # Give KMinion a few seconds to initialize before checking + sleep 3 + + local attempt=0 + while [ $attempt -lt "$WAIT_TIMEOUT" ]; do + # Check if metrics endpoint is responding + if curl -sf "$METRICS_URL" > /dev/null 2>&1; then + log_info "✅ KMinion is ready!" + return 0 + fi + + # Check if process died (only after initial startup period) + if [ $attempt -gt 5 ]; then + if ! kill -0 "$pid" 2>/dev/null; then + log_error "KMinion process died unexpectedly" + log_error "Last 30 lines of log:" + tail -30 "$LOG_FILE" 2>&1 || true + return 1 + fi + fi + + attempt=$((attempt + 1)) + if [ $((attempt % 5)) -eq 0 ]; then + log_info "Attempt $attempt/$WAIT_TIMEOUT: KMinion not ready yet, waiting..." + fi + sleep 1 + done + + log_error "KMinion failed to start within ${WAIT_TIMEOUT}s" + log_error "Last 50 lines of log:" + tail -50 "$LOG_FILE" 2>&1 || true + return 1 +} + +# Stop KMinion +stop_kminion() { + if [ -f kminion.pid ]; then + local pid + pid=$(cat kminion.pid) + log_info "Stopping KMinion (PID: $pid)..." + + if kill -0 "$pid" 2>/dev/null; then + kill "$pid" 2>/dev/null || true + sleep 2 + + # Force kill if still running + if kill -0 "$pid" 2>/dev/null; then + log_warn "Force killing KMinion..." + kill -9 "$pid" 2>/dev/null || true + fi + fi + + rm -f kminion.pid + log_info "KMinion stopped" + else + log_warn "No PID file found" + fi +} + +# Show usage +usage() { + cat </dev/null; then + log_info "KMinion is running (PID: $pid)" + if curl -sf "$METRICS_URL" > /dev/null 2>&1; then + log_info "Metrics endpoint is responding" + else + log_warn "Metrics endpoint is not responding" + fi + exit 0 + else + log_warn "PID file exists but process is not running" + exit 1 + fi + else + log_warn "KMinion is not running" + exit 1 + fi + ;; + logs) + if [ -f "$LOG_FILE" ]; then + tail -f "$LOG_FILE" + else + log_error "Log file not found: $LOG_FILE" + exit 1 + fi + ;; + help|--help|-h) + usage + ;; + *) + log_error "Unknown command: $command" + usage + exit 1 + ;; + esac +} + +# Run main if executed directly +if [ "${BASH_SOURCE[0]}" = "${0}" ]; then + main "$@" +fi + diff --git a/e2e/bin/test-config.yaml b/e2e/bin/test-config.yaml new file mode 100644 index 00000000..910b3fd4 --- /dev/null +++ b/e2e/bin/test-config.yaml @@ -0,0 +1,49 @@ +kafka: + brokers: + - localhost:9092 + clientId: kminion-e2e-test + +minion: + consumerGroups: + enabled: true + scrapeMode: adminApi + granularity: partition + allowedGroups: [".*"] + ignoredGroups: [] + + topics: + enabled: true + granularity: partition + allowedTopics: [".*"] + ignoredTopics: [] + infoMetric: + configKeys: ["cleanup.policy"] + + logDirs: + enabled: true + + endToEnd: + enabled: true + probeInterval: 500ms + topicManagement: + enabled: true + name: kminion-end-to-end + replicationFactor: 1 + partitionsPerBroker: 1 + reconciliationInterval: 10s + producer: + ackSla: 5s + requiredAcks: all + consumer: + groupIdPrefix: kminion-end-to-end + deleteStaleConsumerGroups: false + roundtripSla: 10s + commitSla: 5s + +exporter: + namespace: kminion + port: 8080 + +logger: + level: info +