Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
3edb205
wip:influxb tools
Shaik-Sirajuddin Feb 25, 2026
9e3d11f
feat(influx): add metadata tools(buckets,measurements,tags,keys)
Shaik-Sirajuddin Feb 28, 2026
e4eb162
chore(docker): add influxdb2,3 for integration tests
Shaik-Sirajuddin Mar 1, 2026
63abec0
wip(influx-tools): add measurement,tag,field list tools
Shaik-Sirajuddin Mar 1, 2026
70da93c
chore(docker): docker container for influxdb integration tests
Shaik-Sirajuddin Mar 2, 2026
da08012
test(influxdb): add query , limit tests
Shaik-Sirajuddin Mar 2, 2026
796c904
feat(influxdb): apply limits for queries
Shaik-Sirajuddin Mar 2, 2026
27f25c9
chore(docker): entrypoints for influxdb init scripts
Shaik-Sirajuddin Mar 2, 2026
e6fd613
chore(influxdb): update hint context keys
Shaik-Sirajuddin Mar 2, 2026
1d2642b
chore(influx): add len checks
Shaik-Sirajuddin Mar 3, 2026
bc6acf9
Merge branch 'main' of https://github.com/grafana/mcp-grafana into in…
Shaik-Sirajuddin Mar 23, 2026
f07bd52
Update tools/influx_db.go
Shaik-Sirajuddin Mar 23, 2026
55f1894
Update tools/influx_db.go
Shaik-Sirajuddin Mar 23, 2026
2846bdf
chore(tools): type response definitions influxdb tools , grafana clie…
Shaik-Sirajuddin Mar 23, 2026
7337fb1
chore(tools): unit tests for influxdb toosl
Shaik-Sirajuddin Mar 23, 2026
366609d
Merge branch 'influx_datasource' of https://github.com/Shaik-Sirajudd…
Shaik-Sirajuddin Mar 23, 2026
04fc22a
chore(tools): apply newline for influxql queries
Shaik-Sirajuddin Mar 23, 2026
d3e4e3a
lint(tools): fix lint errors
Shaik-Sirajuddin Mar 23, 2026
0c2a00b
lint(tools): fix lint errors
Shaik-Sirajuddin Mar 23, 2026
24650b1
refractor(tools): use dsquery response types from grafana package
Shaik-Sirajuddin Mar 24, 2026
18bd52b
refactor: rename fluxql to flux in influxdb tool descriptions and tests
Shaik-Sirajuddin Mar 24, 2026
60ce708
chore(tools): replace manual slices cap increase operation with slice…
Shaik-Sirajuddin Mar 25, 2026
93bd6d7
fix(tools): fix failing integration tests
Shaik-Sirajuddin Mar 25, 2026
88a3eae
Merge branch 'main' of https://github.com/grafana/mcp-grafana into in…
Shaik-Sirajuddin Mar 25, 2026
c400395
chore(tools): enhance query limit application to support cte influx
Shaik-Sirajuddin Mar 30, 2026
ac563f6
refractor(tools): rename tool_name influxdb tools
Shaik-Sirajuddin Mar 30, 2026
41d23d6
chore(tools): escape special characters influxql
Shaik-Sirajuddin Mar 31, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion cmd/mcp-grafana/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ type disabledTools struct {
dashboard, folder, oncall, asserts, sift, admin,
pyroscope, navigation, proxied, annotations, rendering, cloudwatch, write,
examples, clickhouse, searchlogs,
runpanelquery bool
runpanelquery, influxdb bool
}

// Configuration for the Grafana client.
Expand Down Expand Up @@ -89,6 +89,7 @@ func (dt *disabledTools) addFlags() {
flag.BoolVar(&dt.clickhouse, "disable-clickhouse", false, "Disable ClickHouse tools")
flag.BoolVar(&dt.searchlogs, "disable-searchlogs", false, "Disable search logs tools")
flag.BoolVar(&dt.runpanelquery, "disable-runpanelquery", false, "Disable run panel query tools")
flag.BoolVar(&dt.influxdb, "disable-influxdb", false, "Disable InfluxDb tools")
}

func (gc *grafanaConfig) addFlags() {
Expand Down Expand Up @@ -129,6 +130,7 @@ func (dt *disabledTools) addTools(s *server.MCPServer) {
maybeAddTools(s, tools.AddClickHouseTools, enabledTools, dt.clickhouse, "clickhouse")
maybeAddTools(s, tools.AddSearchLogsTools, enabledTools, dt.searchlogs, "searchlogs")
maybeAddTools(s, tools.AddRunPanelQueryTools, enabledTools, dt.runpanelquery, "runpanelquery")
maybeAddTools(s, tools.AddInfluxTools, enabledTools, dt.influxdb, "influxdb")
}

func newServer(transport string, dt disabledTools, obs *observability.Observability) (*server.MCPServer, *mcpgrafana.ToolManager) {
Expand Down Expand Up @@ -183,6 +185,7 @@ Available Capabilities:
- Prometheus & Loki: Run PromQL and LogQL queries, retrieve metric/log metadata, and explore label names/values.
- ClickHouse: Query ClickHouse datasources via Grafana with macro and variable substitution support.
- Elasticsearch: Query Elasticsearch datasources using Lucene syntax or Query DSL for logs and metrics.
- InfluxDB: Query InfluxDB datasources with SQL, InfluxQL, Flux languages
- Incidents: Search, create, update, and resolve incidents in Grafana Incident.
- Sift Investigations: Start and manage Sift investigations, analyze logs/traces, find error patterns, and detect slow requests.
- Alerting: List and fetch alert rules and notification contact points.
Expand Down
40 changes: 40 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -134,3 +134,43 @@ services:
interval: 10s
timeout: 5s
retries: 5
influxdb3:
build:
dockerfile_inline: |
FROM influxdb:3-core@sha256:255268d2a5f42b8c38d373864a4ba72956d91e14a3361019706bfad2f7c039ab
COPY --chmod=777 ./testdata/tools/influxdb/influxdbv3-seed.sh /init.sh
ports:
- "8181:8181"
command: >
/bin/bash -c "/init.sh & influxdb3 serve --node-id=node0 --object-store=file --data-dir=/var/lib/influxdb3 --admin-token-file=/run/secrets/admin-token"
secrets:
- admin-token
influxdb2:
build:
dockerfile_inline: |
FROM influxdb:2
COPY --chmod=777 ./testdata/tools/influxdb/influxdbv2-seed.sh /docker-entrypoint-initdb.d/init.sh
ports:
- "8086:8086"
environment:
DOCKER_INFLUXDB_INIT_MODE: setup
DOCKER_INFLUXDB_INIT_USERNAME_FILE: /run/secrets/influxdb2-admin-username
DOCKER_INFLUXDB_INIT_PASSWORD_FILE: /run/secrets/influxdb2-admin-password
DOCKER_INFLUXDB_INIT_ADMIN_TOKEN_FILE: /run/secrets/influxdb2-admin-token
DOCKER_INFLUXDB_INIT_ORG: system-logs
DOCKER_INFLUXDB_INIT_BUCKET: b-system-logs
secrets:
- influxdb2-admin-username
- influxdb2-admin-password
- influxdb2-admin-token

secrets:
influxdb2-admin-username:
file: ./testdata/tools/influxdb/.env.influxdb2-admin-username
influxdb2-admin-password:
file: ./testdata/tools/influxdb/.env.influxdb2-admin-password
influxdb2-admin-token:
file: ./testdata/tools/influxdb/.env.influxdb2-admin-token
admin-token:
file: ./testdata/tools/influxdb/admin-token.json

47 changes: 47 additions & 0 deletions pkg/grafana/datasource.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package grafana

import "errors"

var ErrNoRows = errors.New("no rows in result set")

type DSQueryPayload struct {
Queries []any `json:"queries"`
From string `json:"from"`
To string `json:"to"`
}

type DSQueryFrameField struct {
Name string `json:"name"`
Type string `json:"type"`
TypeInfo struct {
Frame string `json:"frame,omitempty"`
} `json:"typeInfo,omitempty"`
Labels map[string]string `json:"labels"`
Config map[string]interface{} `json:"config,omitempty"`
}

type DSQueryFrameSchema struct {
Name string `json:"name,omitempty"`
RefID string `json:"refId,omitempty"`
Fields []DSQueryFrameField `json:"fields"`
}

type DSQueryFrameData struct {
Values [][]interface{} `json:"values"`
}

type DSQueryFrame struct {
Schema DSQueryFrameSchema `json:"schema,omitempty"`
Data DSQueryFrameData `json:"data"`
}

type DSQueryResult struct {
Status int `json:"status,omitempty"`
Frames []DSQueryFrame `json:"frames,omitempty"`
Error string `json:"error,omitempty"`
}

// DSQueryResponse represents the raw API response from Grafana's /api/ds/query
type DSQueryResponse struct {
Results map[string]DSQueryResult `json:"results"`
}
37 changes: 37 additions & 0 deletions testdata/provisioning/datasources/datasources.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -87,3 +87,40 @@ datasources:
accessKey: test
secretKey: test
isDefault: false
- name: InfluxDB_v2_Flux
id: 9
uid: influxdb-flux
type: influxdb
access: proxy
url: http://influxdb2:8086
jsonData:
version: Flux
organization: system-logs
defaultBucket: b-system-logs
tlsSkipVerify: true
secureJsonData:
token: admintoken
- name: InfluxDB_v2_InfluxQL
id: 10
uid: influxdb-influxql
type: influxdb
access: proxy
url: http://influxdb2:8086
jsonData:
dbName: b-system-logs
httpHeaderName1: 'Authorization'
secureJsonData:
httpHeaderValue1: 'Token admintoken'
- name: InfluxDB_v3_SQL
id: 11
uid: influxdb-sql
type: influxdb
access: proxy
url: http://influxdb3:8181
jsonData:
version: SQL
dbName: system-logs
httpMode: POST
insecureGrpc: true
secureJsonData:
token: 'apiv3_OgXAgbMRgiGXcAQaFLJoaw=='
1 change: 1 addition & 0 deletions testdata/tools/influxdb/.env.influxdb2-admin-password
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
password
1 change: 1 addition & 0 deletions testdata/tools/influxdb/.env.influxdb2-admin-token
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
admintoken
1 change: 1 addition & 0 deletions testdata/tools/influxdb/.env.influxdb2-admin-username
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
admin
4 changes: 4 additions & 0 deletions testdata/tools/influxdb/admin-token.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"token": "apiv3_OgXAgbMRgiGXcAQaFLJoaw==",
"name": "_admin"
}
83 changes: 83 additions & 0 deletions testdata/tools/influxdb/influxdbv2-seed.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#!/bin/bash
echo "Starting InfluxDB v2 data seeding..."

ADMIN_TOKEN="admintoken"
ORG_NAME="system-logs"
BUCKET_NAME="b-system-logs"

# --- Generate Timestamps ---
NOW=$(date +%s%N)
M1=$((NOW - 7200000000000)) # 2 hours ago
M2=$((NOW - 3600000000000)) # 1 hour ago
M3=$((NOW - 1800000000000)) # 30 min ago
M4=$((NOW - 900000000000)) # 15 min ago
M5=$((NOW - 300000000000)) # 5 min ago

# --- Seed b-system-logs bucket ---
echo "Seeding $BUCKET_NAME bucket..."
influx write \
--token "$ADMIN_TOKEN" \
--org "$ORG_NAME" \
--bucket "$BUCKET_NAME" \
--precision ns \
<<EOF
auth_events,service=ssh,status=fail,ip=192.168.1.50 attempt_count=5i,severity=3i $M1
auth_events,service=ssh,status=fail,ip=192.168.1.51 attempt_count=3i,severity=3i $M2
auth_events,service=ssh,status=fail,ip=10.0.0.99 attempt_count=8i,severity=4i $M3
auth_events,service=web,status=success,ip=10.0.0.15 attempt_count=1i,severity=1i $M4
auth_events,service=web,status=success,ip=172.16.0.5 attempt_count=1i,severity=1i $M5
auth_events,service=vpn,status=fail,ip=203.0.113.42 attempt_count=12i,severity=5i $M2
auth_events,service=vpn,status=success,ip=10.10.0.3 attempt_count=1i,severity=1i $M3
auth_events,service=ftp,status=fail,ip=198.51.100.7 attempt_count=2i,severity=2i $M1
auth_events,service=smtp,status=success,ip=10.0.1.20 attempt_count=1i,severity=1i $M4
auth_events,service=rdp,status=fail,ip=203.0.113.10 attempt_count=20i,severity=5i $M5
resource_usage,host=server-alpha,region=us-east cpu_util=45.2,mem_free_gb=12.5,disk_used_pct=55.0,load_1m=1.05,net_in_mbps=10.2 $M1
resource_usage,host=server-alpha,region=us-east cpu_util=67.8,mem_free_gb=8.3,disk_used_pct=55.3,load_1m=2.80,net_in_mbps=45.6 $M2
resource_usage,host=server-alpha,region=us-east cpu_util=88.9,mem_free_gb=2.1,disk_used_pct=55.5,load_1m=4.20,net_in_mbps=92.1 $M3
resource_usage,host=server-alpha,region=us-east cpu_util=91.2,mem_free_gb=1.5,disk_used_pct=55.7,load_1m=5.10,net_in_mbps=98.4 $M4
resource_usage,host=server-alpha,region=us-east cpu_util=75.4,mem_free_gb=4.2,disk_used_pct=55.8,load_1m=3.30,net_in_mbps=60.3 $M5
resource_usage,host=server-beta,region=us-west cpu_util=12.1,mem_free_gb=30.4,disk_used_pct=22.0,load_1m=0.05,net_in_mbps=5.1 $M1
resource_usage,host=server-beta,region=us-west cpu_util=18.5,mem_free_gb=28.9,disk_used_pct=22.1,load_1m=0.45,net_in_mbps=8.3 $M2
resource_usage,host=server-beta,region=us-west cpu_util=35.7,mem_free_gb=25.1,disk_used_pct=22.2,load_1m=1.20,net_in_mbps=20.7 $M3
resource_usage,host=server-gamma,region=eu-west cpu_util=55.3,mem_free_gb=16.0,disk_used_pct=70.1,load_1m=2.10,net_in_mbps=30.5 $M1
resource_usage,host=server-gamma,region=eu-west cpu_util=60.1,mem_free_gb=14.5,disk_used_pct=70.5,load_1m=2.50,net_in_mbps=35.9 $M2
resource_usage,host=server-gamma,region=eu-west cpu_util=72.4,mem_free_gb=10.2,disk_used_pct=71.0,load_1m=3.10,net_in_mbps=50.4 $M3
resource_usage,host=server-delta,region=ap-south cpu_util=5.2,mem_free_gb=60.1,disk_used_pct=10.0,load_1m=0.02,net_in_mbps=1.2 $M4
resource_usage,host=server-delta,region=ap-south cpu_util=8.9,mem_free_gb=58.4,disk_used_pct=10.1,load_1m=0.10,net_in_mbps=3.4 $M5
syslog,host=server-alpha,level=ERROR,facility=kern pid=4821i $M3
syslog,host=server-alpha,level=WARN,facility=kern pid=1i $M2
syslog,host=server-beta,level=INFO,facility=sshd pid=9201i $M1
syslog,host=server-gamma,level=ERROR,facility=nginx pid=3310i $M4
syslog,host=server-delta,level=INFO,facility=cron pid=7741i $M5
syslog,host=server-alpha,level=CRIT,facility=disk pid=1i $M5
EOF

influx write \
--token "$ADMIN_TOKEN" \
--org "$ORG_NAME" \
--bucket "$BUCKET_NAME" \
--precision ns \
<<EOF
http_requests,app=api-gateway,method=GET,status=200 count=1500i,latency_ms=45.2,error_rate=0.0 $M1
http_requests,app=api-gateway,method=POST,status=201 count=320i,latency_ms=120.5,error_rate=0.0 $M1
http_requests,app=api-gateway,method=GET,status=500 count=12i,latency_ms=5001.0,error_rate=1.0 $M2
http_requests,app=api-gateway,method=GET,status=200 count=1800i,latency_ms=42.1,error_rate=0.0 $M2
http_requests,app=checkout-svc,method=POST,status=200 count=200i,latency_ms=350.0,error_rate=0.0 $M3
http_requests,app=checkout-svc,method=POST,status=500 count=25i,latency_ms=6000.0,error_rate=1.0 $M3
http_requests,app=checkout-svc,method=POST,status=200 count=190i,latency_ms=400.0,error_rate=0.0 $M4
db_queries,app=user-svc,db=postgres,op=SELECT duration_ms=5.2,rows_returned=10i $M1
db_queries,app=user-svc,db=postgres,op=INSERT duration_ms=12.1,rows_returned=1i $M2
db_queries,app=user-svc,db=redis,op=GET duration_ms=0.8,rows_returned=1i $M3
db_queries,app=checkout-svc,db=postgres,op=SELECT duration_ms=250.0,rows_returned=500i $M4
db_queries,app=checkout-svc,db=postgres,op=UPDATE duration_ms=80.5,rows_returned=1i $M5
queue_stats,app=worker,queue=email pending=45i,processed=1200i,failed=3i,dlq_size=3i $M1
queue_stats,app=worker,queue=email pending=120i,processed=1350i,failed=5i,dlq_size=8i $M2
queue_stats,app=worker,queue=sms pending=10i,processed=980i,failed=0i,dlq_size=0i $M3
queue_stats,app=worker,queue=notifications pending=500i,processed=4000i,failed=12i,dlq_size=12i $M4
EOF

echo ""
echo "✅ Seeding complete."
echo " Host: $INFLUX_HOST"
echo " Org: $ORG_NAME"
echo " Buckets: $BUCKET_NAME"
69 changes: 69 additions & 0 deletions testdata/tools/influxdb/influxdbv3-seed.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#!/bin/bash

sleep 5

AUTH_TOKEN="apiv3_OgXAgbMRgiGXcAQaFLJoaw=="
DB_NAME="system-logs"

echo "Creating database: $DB_NAME"
influxdb3 create database $DB_NAME --token $AUTH_TOKEN

# --- Generate Timestamps ---
NOW=$(date +%s%N)
M1=$((NOW - 7200000000000)) # 2 hours ago
M2=$((NOW - 3600000000000)) # 1 hour ago
M3=$((NOW - 1800000000000)) # 30 min ago
M4=$((NOW - 900000000000)) # 15 min ago
M5=$((NOW - 300000000000)) # 5 min ago

echo "Seeding data..."

cat <<EOF | influxdb3 write --database $DB_NAME --token $AUTH_TOKEN
auth_events,service=ssh,status=fail,ip=192.168.1.50 attempt_count=5,severity=3 $M1
auth_events,service=ssh,status=fail,ip=192.168.1.51 attempt_count=3,severity=3 $M2
auth_events,service=ssh,status=fail,ip=10.0.0.99 attempt_count=8,severity=4 $M3
auth_events,service=web,status=success,ip=10.0.0.15 attempt_count=1,severity=1 $M4
auth_events,service=web,status=success,ip=172.16.0.5 attempt_count=1,severity=1 $M5
auth_events,service=vpn,status=fail,ip=203.0.113.42 attempt_count=12,severity=5 $M2
auth_events,service=vpn,status=success,ip=10.10.0.3 attempt_count=1,severity=1 $M3
auth_events,service=ftp,status=fail,ip=198.51.100.7 attempt_count=2,severity=2 $M1
auth_events,service=smtp,status=success,ip=10.0.1.20 attempt_count=1,severity=1 $M4
auth_events,service=rdp,status=fail,ip=203.0.113.10 attempt_count=20,severity=5 $M5
resource_usage,host=server-alpha,region=us-east cpu_util=45.2,mem_free_gb=12.5,disk_used_pct=55.0,load_1m=1.05,net_in_mbps=10.2 $M1
resource_usage,host=server-alpha,region=us-east cpu_util=67.8,mem_free_gb=8.3,disk_used_pct=55.3,load_1m=2.80,net_in_mbps=45.6 $M2
resource_usage,host=server-alpha,region=us-east cpu_util=88.9,mem_free_gb=2.1,disk_used_pct=55.5,load_1m=4.20,net_in_mbps=92.1 $M3
resource_usage,host=server-alpha,region=us-east cpu_util=91.2,mem_free_gb=1.5,disk_used_pct=55.7,load_1m=5.10,net_in_mbps=98.4 $M4
resource_usage,host=server-alpha,region=us-east cpu_util=75.4,mem_free_gb=4.2,disk_used_pct=55.8,load_1m=3.30,net_in_mbps=60.3 $M5
resource_usage,host=server-beta,region=us-west cpu_util=12.1,mem_free_gb=30.4,disk_used_pct=22.0,load_1m=0.05,net_in_mbps=5.1 $M1
resource_usage,host=server-beta,region=us-west cpu_util=18.5,mem_free_gb=28.9,disk_used_pct=22.1,load_1m=0.45,net_in_mbps=8.3 $M2
resource_usage,host=server-beta,region=us-west cpu_util=35.7,mem_free_gb=25.1,disk_used_pct=22.2,load_1m=1.20,net_in_mbps=20.7 $M3
resource_usage,host=server-gamma,region=eu-west cpu_util=55.3,mem_free_gb=16.0,disk_used_pct=70.1,load_1m=2.10,net_in_mbps=30.5 $M1
resource_usage,host=server-gamma,region=eu-west cpu_util=60.1,mem_free_gb=14.5,disk_used_pct=70.5,load_1m=2.50,net_in_mbps=35.9 $M2
resource_usage,host=server-gamma,region=eu-west cpu_util=72.4,mem_free_gb=10.2,disk_used_pct=71.0,load_1m=3.10,net_in_mbps=50.4 $M3
resource_usage,host=server-delta,region=ap-south cpu_util=5.2,mem_free_gb=60.1,disk_used_pct=10.0,load_1m=0.02,net_in_mbps=1.2 $M4
resource_usage,host=server-delta,region=ap-south cpu_util=8.9,mem_free_gb=58.4,disk_used_pct=10.1,load_1m=0.10,net_in_mbps=3.4 $M5
syslog,host=server-alpha,level=ERROR,facility=kern pid=4821 $M3
syslog,host=server-alpha,level=WARN,facility=kern pid=1 $M2
syslog,host=server-beta,level=INFO,facility=sshd pid=9201 $M1
syslog,host=server-gamma,level=ERROR,facility=nginx pid=3310 $M4
syslog,host=server-delta,level=INFO,facility=cron pid=7741 $M5
syslog,host=server-alpha,level=CRIT,facility=disk pid=1 $M5
http_requests,app=api-gateway,method=GET,status=200 count=1500,latency_ms=45.2,error_rate=0.0 $M1
http_requests,app=api-gateway,method=POST,status=201 count=320,latency_ms=120.5,error_rate=0.0 $M1
http_requests,app=api-gateway,method=GET,status=500 count=12,latency_ms=5001.0,error_rate=1.0 $M2
http_requests,app=api-gateway,method=GET,status=200 count=1800,latency_ms=42.1,error_rate=0.0 $M2
http_requests,app=checkout-svc,method=POST,status=200 count=200,latency_ms=350.0,error_rate=0.0 $M3
http_requests,app=checkout-svc,method=POST,status=500 count=25,latency_ms=6000.0,error_rate=1.0 $M3
http_requests,app=checkout-svc,method=POST,status=200 count=190,latency_ms=400.0,error_rate=0.0 $M4
db_queries,app=user-svc,db=postgres,op=SELECT duration_ms=5.2,rows_returned=10 $M1
db_queries,app=user-svc,db=postgres,op=INSERT duration_ms=12.1,rows_returned=1 $M2
db_queries,app=user-svc,db=redis,op=GET duration_ms=0.8,rows_returned=1 $M3
db_queries,app=checkout-svc,db=postgres,op=SELECT duration_ms=250.0,rows_returned=500 $M4
db_queries,app=checkout-svc,db=postgres,op=UPDATE duration_ms=80.5,rows_returned=1 $M5
queue_stats,app=worker,queue=email pending=45,processed=1200,failed=3,dlq_size=3 $M1
queue_stats,app=worker,queue=email pending=120,processed=1350,failed=5,dlq_size=8 $M2
queue_stats,app=worker,queue=sms pending=10,processed=980,failed=0,dlq_size=0 $M3
queue_stats,app=worker,queue=notifications pending=500,processed=4000,failed=12,dlq_size=12 $M4
EOF

echo "✅ Done! Seeded all data into: $DB_NAME"
29 changes: 3 additions & 26 deletions tools/clickhouse.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (
"time"

mcpgrafana "github.com/grafana/mcp-grafana"
"github.com/grafana/mcp-grafana/pkg/grafana"
"github.com/mark3labs/mcp-go/mcp"
"github.com/mark3labs/mcp-go/server"
)
Expand Down Expand Up @@ -50,30 +51,6 @@ type ClickHouseQueryResult struct {
Hints *EmptyResultHints `json:"hints,omitempty"`
}

// clickHouseQueryResponse represents the raw API response from Grafana's /api/ds/query
type clickHouseQueryResponse struct {
Results map[string]struct {
Status int `json:"status,omitempty"`
Frames []struct {
Schema struct {
Name string `json:"name,omitempty"`
RefID string `json:"refId,omitempty"`
Fields []struct {
Name string `json:"name"`
Type string `json:"type"`
TypeInfo struct {
Frame string `json:"frame,omitempty"`
} `json:"typeInfo,omitempty"`
} `json:"fields"`
} `json:"schema"`
Data struct {
Values [][]interface{} `json:"values"`
} `json:"data"`
} `json:"frames,omitempty"`
Error string `json:"error,omitempty"`
} `json:"results"`
}

// clickHouseClient handles communication with Grafana's ClickHouse datasource
type clickHouseClient struct {
httpClient *http.Client
Expand Down Expand Up @@ -119,7 +96,7 @@ func newClickHouseClient(ctx context.Context, uid string) (*clickHouseClient, er
}

// query executes a ClickHouse query via Grafana's /api/ds/query endpoint
func (c *clickHouseClient) query(ctx context.Context, datasourceUID, rawSQL string, from, to time.Time) (*clickHouseQueryResponse, error) {
func (c *clickHouseClient) query(ctx context.Context, datasourceUID, rawSQL string, from, to time.Time) (*grafana.DSQueryResponse, error) {
// Build the query payload
payload := map[string]interface{}{
"queries": []map[string]interface{}{
Expand Down Expand Up @@ -168,7 +145,7 @@ func (c *clickHouseClient) query(ctx context.Context, datasourceUID, rawSQL stri
return nil, fmt.Errorf("reading response body: %w", err)
}

var queryResp clickHouseQueryResponse
var queryResp grafana.DSQueryResponse

if err := unmarshalJSONWithLimitMsg(bodyBytes, &queryResp, int(bytesLimit)); err != nil {
return nil, err
Expand Down
Loading