diff --git a/examples/kibana-sample-data-ab-testing/docker-compose.yml b/examples/kibana-sample-data-ab-testing/docker-compose.yml new file mode 100644 index 000000000..e7b173667 --- /dev/null +++ b/examples/kibana-sample-data-ab-testing/docker-compose.yml @@ -0,0 +1,89 @@ +services: + quesma: + image: quesma/quesma:latest + environment: + - QUESMA_elasticsearch_url=http://elasticsearch:9200 + - QUESMA_port=8080 + - QUESMA_logging_path=/var/quesma/logs + - QUESMA_mode=dual-write-query-clickhouse + - QUESMA_CONFIG_FILE=/config/local-dev.yaml + - QUESMA_logging_fileLogging=true + depends_on: + elasticsearch: + condition: service_healthy + ports: + - "9999:9999" + - "9200:8080" + volumes: + - ./quesma/logs/:/var/quesma/logs + - ./quesma/config:/config + deploy: + resources: + limits: + memory: 512M + restart: on-failure + log-generator: + build: log-generator + depends_on: + quesma: + condition: service_healthy + restart: unless-stopped + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:8.11.1 + container_name: elasticsearch + environment: + - discovery.type=single-node + - xpack.security.enabled=false + - "ES_JAVA_OPTS=-Xmx2G" + ports: + - "9201:9200" + - "9300:9300" + healthcheck: + test: curl -s http://elasticsearch:9200 >/dev/null || exit 1 + start_period: 1m + interval: 1s + timeout: 1s + deploy: + resources: + limits: + memory: 4G + kibana: + image: docker.elastic.co/kibana/kibana:8.11.1 + environment: + ELASTICSEARCH_HOSTS: '["http://quesma:8080"]' + XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: 'QUESMAQUESMAQUESMAQUESMAQUESMAQUESMAQUESMAQUESMA' # Just to get rid of annoying ERROR in logs + depends_on: + quesma: + condition: service_healthy + elasticsearch: + condition: service_healthy + ports: + - "5601:5601" + restart: unless-stopped + healthcheck: + test: "curl -s http://localhost:5601/api/status >/dev/null || exit 1" + start_period: 2m + interval: 1s + timeout: 1s + volumes: + - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro + kibana-sidecar: + image: docker.elastic.co/kibana/kibana:8.11.1 + restart: "no" + depends_on: + kibana: + condition: service_healthy + volumes: + - ./kibana/:/local_mount + command: [ "/bin/bash", "-c", "/local_mount/run.sh" ] + clickhouse: + # user: 'default', no password + image: clickhouse/clickhouse-server:24.5.3.5-alpine + ports: + - "8123:8123" + - "9000:9000" + healthcheck: + test: wget --no-verbose --tries=1 --spider http://clickhouse:8123/ping || exit 1 + interval: 1s + timeout: 1s + start_period: 1m diff --git a/examples/kibana-sample-data-ab-testing/kibana/config/kibana.yml b/examples/kibana-sample-data-ab-testing/kibana/config/kibana.yml new file mode 100644 index 000000000..e398f94ab --- /dev/null +++ b/examples/kibana-sample-data-ab-testing/kibana/config/kibana.yml @@ -0,0 +1,23 @@ +server.host: "0.0.0.0" + +xpack.security.enabled: false +xpack.reporting.kibanaServer.hostname: localhost + +# We don't need sophisticated monitoring +monitoring.ui.enabled: false + +newsfeed.enabled: false + +# No telemetry for Elastic +telemetry.optIn: false +telemetry.enabled: false + +# Disable several managment settings +xpack.ilm.ui.enabled: false +xpack.ccr.ui.enabled: false +xpack.remote_clusters.ui.enabled: false +xpack.rollup.ui.enabled: false +xpack.license_management.ui.enabled: false +xpack.snapshot_restore.ui.enabled: false +xpack.upgrade_assistant.ui.enabled: false +xpack.index_management.ui.enabled: false diff --git a/examples/kibana-sample-data-ab-testing/kibana/run.sh b/examples/kibana-sample-data-ab-testing/kibana/run.sh new file mode 100755 index 000000000..122000266 --- /dev/null +++ b/examples/kibana-sample-data-ab-testing/kibana/run.sh @@ -0,0 +1,80 @@ +#!/bin/bash +echo "$@" +DASHBOARD_URL="http://kibana:5601" + +if [ -z "$XSRF_HEADER" ]; then + XSRF_HEADER="kbn-xsrf: true" +fi + +if [ -n "$ELASTICSEARCH_USER" ]; then + echo "Using Basic Authentication" + MAYBE_AUTH="-u $ELASTICSEARCH_USER:$ELASTICSEARCH_PASSWORD" +fi + +wait_until_available() { + local http_code + + echo "Waiting until '$DASHBOARD_URL' is available..." + while [ "$http_code" != "200" ]; do + http_code=$(curl --no-progress-meter -k -s -w "%{http_code}" -XGET "$DASHBOARD_URL/api/status" -o /dev/null ) + echo "HTTP Status Code: $http_code" + + if [ "$http_code" != "200" ]; then + echo "Retrying in a second..." + sleep 1 + fi + done + + echo "'$DASHBOARD_URL' is available" +} + +do_http_post() { + local url=$1 + local body=$2 + + curl --no-progress-meter -k ${MAYBE_AUTH} -X POST "$DASHBOARD_URL/$url" \ + -H "$XSRF_HEADER" \ + -H 'Content-Type: application/json' \ + -d "$body" +} + +do_silent_http_post() { + local url=$1 + local body=$2 + + curl -w "HTTP %{http_code}" -k -o /dev/null --no-progress-meter ${MAYBE_AUTH} -X POST "$DASHBOARD_URL/$url" \ + -H "$XSRF_HEADER" \ + -H 'Content-Type: application/json' \ + -d "$body" +} + +add_sample_dataset() { + local sample_data=$1 + START_TIME=$(date +%s) + echo "Adding $sample_data dataset" + do_http_post "api/sample_data/$sample_data" '' + END_TIME=$(date +%s) + echo -e "\nAdded $sample_data dataset, took $((END_TIME-START_TIME)) seconds" +} + + +wait_until_available + +add_sample_dataset "flights" +add_sample_dataset "logs" +add_sample_dataset "ecommerce" + +echo -n "Adding data view logs-generic... " +do_silent_http_post "api/data_views/data_view" '{ + "data_view": { + "name": "Logs Generator", + "title": "logs-generic-*", + "id": "logs-generic", + "timeFieldName": "@timestamp", + "allowNoIndex": true + }, + "override": true +}' +echo "" +echo -e "\nData views added." + diff --git a/examples/kibana-sample-data-ab-testing/log-generator/Dockerfile b/examples/kibana-sample-data-ab-testing/log-generator/Dockerfile new file mode 100644 index 000000000..b57b646e6 --- /dev/null +++ b/examples/kibana-sample-data-ab-testing/log-generator/Dockerfile @@ -0,0 +1,11 @@ +FROM golang:alpine AS builder + +ADD logger.go /logger.go + +RUN go build -o /service /logger.go + +FROM scratch + +COPY --from=builder /service . + +ENTRYPOINT [ "/service" ] diff --git a/examples/kibana-sample-data-ab-testing/log-generator/logger.go b/examples/kibana-sample-data-ab-testing/log-generator/logger.go new file mode 100644 index 000000000..dc5af551c --- /dev/null +++ b/examples/kibana-sample-data-ab-testing/log-generator/logger.go @@ -0,0 +1,58 @@ +// Copyright Quesma, licensed under the Elastic License 2.0. +// SPDX-License-Identifier: Elastic-2.0 +package main + +import ( + "bytes" + "encoding/json" + "log" + "math/rand" + "net/http" + "time" +) + +const url = "http://quesma:8080/logs-generic-default/_doc" + +func main() { + hostNames := []string{"zeus", "cassandra", "hercules", + "oracle", "athena", "jupiter", "poseidon", "hades", "artemis", "apollo", "demeter", + "dionysus", "hephaestus", "hermes", "hestia", "iris", "nemesis", "pan", "persephone", "prometheus", "selen"} + + serviceNames := []string{"frontend", "backend", "database", "cache", "queue", "monitoring", "loadbalancer", "proxy", + "storage", "auth", "api", "web", "worker", "scheduler", "cron", "admin", "service", "gateway", "service", "service", "service"} + + sourceNames := []string{"kubernetes", "ubuntu", "debian", "centos", "redhat", "fedora", "arch", "gentoo", "alpine", "suse", + "rhel", "coreos", "docker", "rancher", "vmware", "xen", "hyperv", "openstack", "aws", "gcp", "azure", "digitalocean"} + + severityNames := []string{"info", "info", "info", "info", "info", "info", "warning", "error", "critical", "debug", "debug", "debug"} + + messageNames := []string{"User logged in", "User logged out", "User created", "User deleted", "User updated", + "User password changed", "User password reset", "User password reset requested", "User password reset failed"} + + for { + time.Sleep(time.Duration(1000+rand.Intn(2000)) * time.Millisecond) + + body, err := json.Marshal(map[string]string{ + // Please keep using OpenTelemetry names for the fields: + // https://opentelemetry.io/docs/specs/semconv/resource/ + "@timestamp": time.Now().Format("2006-01-02T15:04:05.999Z"), + "message": messageNames[rand.Intn(len(messageNames))], + "severity": severityNames[rand.Intn(len(severityNames))], + "source": sourceNames[rand.Intn(len(sourceNames))], + "service.name": serviceNames[rand.Intn(len(serviceNames))], + "host.name": hostNames[rand.Intn(len(hostNames))], + }) + + if err != nil { + log.Fatal(err) + } + + resp, err := http.Post(url, "application/json", bytes.NewBuffer(body)) + + if err != nil { + log.Fatal(err) + } + + resp.Body.Close() + } +} diff --git a/examples/kibana-sample-data-ab-testing/quesma/config/local-dev.yaml b/examples/kibana-sample-data-ab-testing/quesma/config/local-dev.yaml new file mode 100644 index 000000000..d54fd0e93 --- /dev/null +++ b/examples/kibana-sample-data-ab-testing/quesma/config/local-dev.yaml @@ -0,0 +1,181 @@ +#licenseKey: {your-quesma-license-key-if-needed} # license key is required for paid features such as `hydrolix` backend conector +frontendConnectors: + - name: elastic-ingest + type: elasticsearch-fe-ingest + config: + listenPort: 8080 + disableAuth: true + - name: elastic-query + type: elasticsearch-fe-query + config: + listenPort: 8080 +backendConnectors: + - name: my-minimal-elasticsearch + type: elasticsearch + config: + url: "http://elasticsearch:9200" + adminUrl: "http://localhost:5601" + - name: my-clickhouse-data-source + type: clickhouse-os + config: + url: "clickhouse://clickhouse:9000" + adminUrl: "http://localhost:8123/play" +ingestStatistics: true +logging: + path: "logs" + level: "info" + disableFileLogging: false +processors: + - name: my-query-processor + type: quesma-v1-processor-query + config: + indexes: + kibana_sample_data_ecommerce: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + "geoip.location": + type: geo_point + "products.manufacturer": + type: text + "products.product_name": + type: text + category: + type: text + manufacturer: + type: text + kibana_sample_data_flights: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + "Carrier": + type: keyword + "DestLocation": + type: geo_point + "OriginLocation": + type: geo_point + kibana_sample_data_logs: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + timestamp: + type: alias + targetColumnName: "@timestamp" + ip: + type: ip + clientip: + type: ip + "geo.coordinates": + type: geo_point + "geo.src": + type: keyword + logs-generic-default: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + timestamp: + type: alias + targetColumnName: "@timestamp" + message: + type: text + "host.name": + type: text + "service.name": + type: "keyword" + source: + type: "keyword" + severity: + type: "keyword" + "*": # DO NOT remove, always required + target: + - my-minimal-elasticsearch + - my-clickhouse-data-source + - name: my-ingest-processor + type: quesma-v1-processor-ingest + config: + indexes: + kibana_sample_data_ecommerce: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + "geoip.location": + type: geo_point + "products.manufacturer": + type: text + "products.product_name": + type: text + category: + type: text + manufacturer: + type: text + kibana_sample_data_flights: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + "Carrier": + type: keyword + "DestLocation": + type: geo_point + "OriginLocation": + type: geo_point + kibana_sample_data_logs: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + timestamp: + type: alias + targetColumnName: "@timestamp" + ip: + type: ip + clientip: + type: ip + "geo.coordinates": + type: geo_point + "geo.src": + type: keyword + logs-generic-default: + target: + - my-clickhouse-data-source + - my-minimal-elasticsearch + schemaOverrides: + fields: + timestamp: + type: alias + targetColumnName: "@timestamp" + message: + type: text + "host.name": + type: text + "service.name": + type: "keyword" + source: + type: "keyword" + severity: + type: "keyword" + "*": + target: + - my-minimal-elasticsearch + - my-clickhouse-data-source +pipelines: + - name: my-pipeline-elasticsearch-query-clickhouse + frontendConnectors: [ elastic-query ] + processors: [ my-query-processor ] + backendConnectors: [ my-minimal-elasticsearch, my-clickhouse-data-source ] + - name: my-pipeline-elasticsearch-ingest-to-clickhouse + frontendConnectors: [ elastic-ingest ] + processors: [ my-ingest-processor ] + backendConnectors: [ my-minimal-elasticsearch, my-clickhouse-data-source ] \ No newline at end of file