-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMakefile
More file actions
88 lines (68 loc) · 3.63 KB
/
Makefile
File metadata and controls
88 lines (68 loc) · 3.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
SHELL := /usr/bin/env bash
.DEFAULT_GOAL := help
COMPOSE := docker compose
.PHONY: help up down clean demo demo-fresh cli cli-example query logs ps \
scenario scenario-list test test-unit test-scenarios test-smoke \
lint format
help: ## Show targets
@awk 'BEGIN{FS=":.*##"} /^[a-zA-Z0-9_-]+:.*##/ {printf " \033[1;36m%-20s\033[0m %s\n",$$1,$$2}' $(MAKEFILE_LIST)
demo: ## End-to-end scripted demo: cluster up → browser → scenarios → results
@./scripts/demo.sh
demo-fresh: ## Same as demo, but wipes state first (forces license re-validation)
@./scripts/demo.sh --fresh
up: ## Prompt for email (if needed), write .env, then bring the cluster up
@./scripts/setup.sh
@echo
@echo "============================================================="
@echo " InfluxDB 3 Enterprise cluster (5 nodes) is starting."
@echo " Check the email you provided and CLICK THE VALIDATION LINK."
@echo " The simulator and UI will start automatically once validation"
@echo " completes. UI: http://localhost:8080 API: http://localhost:8181"
@echo "============================================================="
@$(COMPOSE) up -d
down: ## Stop services (preserves data volume)
@$(COMPOSE) down
clean: ## Stop services and drop the data volume (requires re-validation next time)
@$(COMPOSE) down -v
logs: ## Tail all service logs
@$(COMPOSE) logs -f
ps: ## Show service status
@$(COMPOSE) ps
cli: ## Shell into the QUERY node; TOKEN exported, `iql <sql>` runs queries
@$(COMPOSE) exec influxdb3-query bash -c '\
export TOKEN=$$(cat /var/lib/influxdb3/.nt-token-plain); \
iql() { influxdb3 query --database nt --token "$$TOKEN" "$$1"; }; \
export -f iql; \
echo ""; \
echo " TOKEN is exported. Try:"; \
echo " iql \"SELECT COUNT(*) FROM interface_counters\""; \
echo " iql \"SELECT * FROM bgp_sessions LIMIT 5\""; \
echo ""; \
exec bash'
query: ## One-shot query against the query node. Usage: make query sql='SELECT 1'
@test -n "$(sql)" || (echo "usage: make query sql='<SQL>'"; exit 1)
@$(COMPOSE) exec -T -e "SQL=$(sql)" influxdb3-query bash -c 'TOKEN=$$(cat /var/lib/influxdb3/.nt-token-plain); influxdb3 query --database nt --token "$$TOKEN" "$$SQL"'
cli-example: ## Run a named CLI example. Usage: make cli-example name=list-databases
@test -n "$(name)" || (echo "usage: make cli-example name=<example>"; exit 1)
@grep -A 20 "^## $(name)" CLI_EXAMPLES.md | sed -n '/^```bash/,/^```/p' | sed '1d;$$d' \
| while read -r line; do echo "+ $$line"; $(COMPOSE) exec -T influxdb3-query bash -lc "export TOKEN=\$$(cat /var/lib/influxdb3/.nt-token-plain); $$line"; done
scenario: ## Run a scenario. Usage: make scenario name=congestion_hotspot
@test -n "$(name)" || (echo "usage: make scenario name=<scenario>"; exit 1)
@SCENARIO=$(name) $(COMPOSE) --profile scenarios run --rm scenarios
scenario-list: ## List available scenarios
@ls simulator/scenarios/*.py 2>/dev/null | grep -v "_base\|__init__" | xargs -I{} basename {} .py | while read n; do \
desc=$$(grep -m1 '^"""' simulator/scenarios/$$n.py | sed 's/"""//g'); \
printf " %-32s %s\n" "$$n" "$$desc"; done
test: test-unit test-scenarios ## Run unit + scenario tests (skip smoke)
test-unit: ## Plugin + signal + query unit tests (no docker)
@pytest tests -q -m "not scenario and not smoke"
test-scenarios: ## Scenario integration tests (uses testcontainers; multi-node!)
@pytest tests/test_scenarios -q -m scenario
test-smoke: ## End-to-end smoke against the real 5-node compose (slow)
@pytest tests/test_smoke.py -q -m smoke
lint: ## Check formatting and lint
@ruff check .
@ruff format --check .
format: ## Auto-fix formatting
@ruff check --fix .
@ruff format .