Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 19 additions & 8 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,34 @@ FROM python:3.12-slim

WORKDIR /app

ENV UV_COMPILE_BYTECODE=1
# Environment variables
ENV UV_COMPILE_BYTECODE=1 \
PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1

# Install uv
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/

WORKDIR /app

COPY pyproject.toml pyproject.toml
COPY uv.lock uv.lock
# Copy dependency files first for better caching
COPY pyproject.toml uv.lock ./

# Install dependencies
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-dev --no-editable

ADD . /app
# Copy source code
COPY src/ src/

ENV PATH="/app/.venv/bin:$PATH"

# Default port for HTTP transports
EXPOSE 8000

ENTRYPOINT [ "uv", "run", "src/server.py" ]
CMD [ "--host", "0.0.0.0", "--port", "8000" ]
# Default transport is stdio for MCP compatibility
# Override with --transport streamable-http or --transport sse for HTTP modes
ENTRYPOINT ["uv", "run", "src/server.py"]

# Default arguments (can be overridden)
# For stdio (default MCP transport): no args needed
# For HTTP: --transport streamable-http --host 0.0.0.0 --port 8000
CMD ["--transport", "stdio"]
70 changes: 69 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,77 @@ services:
trino:
image: trinodb/trino:latest
ports:
- 8080:8080
- "8080:8080"
networks:
- trino-network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/v1/info"]
interval: 10s
timeout: 5s
retries: 5

# MCP Server with Streamable HTTP transport (recommended for web/remote access)
# Usage: docker-compose up mcp-trino-http
mcp-trino-http:
build: .
command: ["--transport", "streamable-http", "--host", "0.0.0.0", "--port", "8000"]
ports:
- "8000:8000"
environment:
- TRINO_HOST=trino
- TRINO_PORT=8080
- TRINO_USER=trino
- TRINO_CATALOG=tpch
- TRINO_SCHEMA=tiny
depends_on:
trino:
condition: service_healthy
networks:
- trino-network
restart: unless-stopped

# MCP Server with SSE transport (legacy HTTP transport)
# Usage: docker-compose --profile sse up mcp-trino-sse
mcp-trino-sse:
build: .
command: ["--transport", "sse", "--host", "0.0.0.0", "--port", "8000"]
ports:
- "8001:8000"
environment:
- TRINO_HOST=trino
- TRINO_PORT=8080
- TRINO_USER=trino
- TRINO_CATALOG=tpch
- TRINO_SCHEMA=tiny
depends_on:
trino:
condition: service_healthy
networks:
- trino-network
profiles:
- sse
restart: unless-stopped

# MCP Server with stdio transport (for testing with docker exec)
# Usage: docker-compose --profile stdio run --rm mcp-trino-stdio
mcp-trino-stdio:
build: .
command: ["--transport", "stdio"]
environment:
- TRINO_HOST=trino
- TRINO_PORT=8080
- TRINO_USER=trino
- TRINO_CATALOG=tpch
- TRINO_SCHEMA=tiny
depends_on:
trino:
condition: service_healthy
networks:
- trino-network
profiles:
- stdio
stdin_open: true
tty: true

networks:
trino-network:
Expand Down
34 changes: 25 additions & 9 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,11 @@
[project]
name = "mcp-trino-python"
version = "0.6.0"
version = "0.7.0"
description = "A Model Context Protocol (MCP) connector for Trino, enabling seamless integration between MCP-compliant services and Trino query engine"
readme = "README.md"
license = { text = "Apache-2.0" }
requires-python = ">=3.12"
keywords = ["trino", "connector", "mcp"]
dependencies = [
"loguru>=0.7.3",
"mcp[cli]>=1.22.0",
"python-dotenv>=1.1.0",
"trino>=0.336.0",
]

keywords = ["trino", "connector", "mcp", "iceberg", "data-catalog"]
classifiers = [
"Programming Language :: Python :: 3.12",
"License :: OSI Approved :: Apache Software License",
Expand All @@ -21,6 +14,29 @@ classifiers = [
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
]
dependencies = [
"loguru>=0.7.3",
"mcp[cli]>=1.22.0",
"python-dotenv>=1.2.1",
"trino>=0.336.0",
]

[project.optional-dependencies]
# For running HTTP-based transports (streamable-http, sse)
http = ["uvicorn>=0.32.0", "starlette>=0.41.0"]

[project.scripts]
mcp-trino = "server:main"
Copy link

Copilot AI Dec 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The script entry point path is incorrect. It should be "server:main""src.server:main" since the server.py file is in the src/ directory, not at the root.

Suggested change
mcp-trino = "server:main"
mcp-trino = "src.server:main"

Copilot uses AI. Check for mistakes.

[tool.uv]
package = true

[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"

[tool.hatch.build.targets.wheel]
packages = ["src"]

[tool.black]
target-version = ["py312"]
Expand Down
64 changes: 56 additions & 8 deletions src/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,21 +535,69 @@ def maintain_iceberg(table: str, catalog: str, schema_name: str) -> list[base.Me

if __name__ == "__main__":
import argparse
import sys

from loguru import logger

logger.info("Starting Trino MCP server...")

parser = argparse.ArgumentParser(description="Run MCP Trino Server")
parser.add_argument("--host", default="127.0.0.1", help="Host to bind to")
parser.add_argument("--port", type=int, default=8000, help="Port to listen on")
# Configure logging
logger.remove()
logger.add(sys.stderr, level="INFO", format="{time:YYYY-MM-DD HH:mm:ss} | {level} | {message}")

parser = argparse.ArgumentParser(
description="MCP Trino Server - Model Context Protocol connector for Trino",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Transport modes:
stdio Standard I/O (default) - for local MCP clients like VS Code
streamable-http HTTP with streaming support (recommended for remote/web access)
sse Server-Sent Events (legacy HTTP transport)

Examples:
# Run with stdio (default, for VS Code integration)
python server.py

# Run with Streamable HTTP on port 8000
python server.py --transport streamable-http --host 0.0.0.0 --port 8000

# Run with SSE transport
python server.py --transport sse --host 127.0.0.1 --port 8001
""",
)
parser.add_argument(
"--host",
default="127.0.0.1",
help="Host to bind to (default: 127.0.0.1, use 0.0.0.0 for all interfaces)",
)
parser.add_argument(
"--port",
type=int,
default=8000,
help="Port to listen on (default: 8000)",
)
parser.add_argument(
"--transport",
choices=["stdio", "streamable-http", "sse"],
default="stdio",
help="Transport type to use (default: stdio)",
help="Transport type (default: stdio)",
)
args = parser.parse_args()

# Run the server with the specified transport
mcp.run(transport=args.transport, host=args.host, port=args.port)
logger.info(f"Starting Trino MCP server with {args.transport} transport")

if args.transport == "stdio":
logger.info("Using stdio transport for local MCP communication")
mcp.run(transport="stdio")
elif args.transport == "streamable-http":
logger.info(f"Starting Streamable HTTP server on http://{args.host}:{args.port}/mcp")
mcp.run(transport="streamable-http", host=args.host, port=args.port)
elif args.transport == "sse":
logger.info(f"Starting SSE server on http://{args.host}:{args.port}/sse")
mcp.run(transport="sse", host=args.host, port=args.port)


def main():
"""Entry point for the MCP Trino server."""
import sys

sys.argv = sys.argv[:1] # Reset args for mcp.run
mcp.run(transport="stdio")
Comment on lines +598 to +603
Copy link

Copilot AI Dec 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The main() function resets sys.argv which will break command-line argument parsing. This function appears to be intended as an entry point but doesn't handle the transport argument properly - it always uses "stdio" regardless of CLI arguments. Consider removing the sys.argv = sys.argv[:1] line or redesigning the entry point to properly handle arguments.

Copilot uses AI. Check for mistakes.
Loading
Loading