Skip to content

Commit 2f861c8

Browse files
williamwclaude
andcommitted
Fix protobuf dependency conflict in e2b sandbox recipe
Use pixi feature environments to isolate dependency conflicts between modular package (protobuf >=6.31.1) and e2b-code-interpreter (protobuf <6.0.0). Create separate environments: - server environment: Uses modular package for MAX Serve - agent environment: Uses e2b-code-interpreter for code execution This allows both components to work without dependency conflicts by running in isolated pixi environments. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent c205d35 commit 2f861c8

File tree

1 file changed

+19
-4
lines changed

1 file changed

+19
-4
lines changed

code-execution-sandbox-agent-with-e2b/pyproject.toml

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@ name = "code-execution-sandbox-agent-with-e2b"
44
requires-python = ">=3.10,<3.13"
55
version = "0.0.0"
66
dependencies = [
7-
"openai>=1.65.1,<2",
8-
"e2b-code-interpreter>=1.0.5,<2",
97
"python-dotenv>=1.0.1,<2",
108
"rich>=13.9.4,<15",
119
]
@@ -28,10 +26,27 @@ platforms = ["linux-64", "linux-aarch64", "osx-arm64"]
2826
code_execution_sandbox_agent_with_e2b = { path = ".", editable = true }
2927

3028
[tool.pixi.dependencies]
29+
python = ">=3.10,<3.13"
30+
31+
# Feature for MAX server (needs modular package with newer protobuf)
32+
[tool.pixi.feature.server.dependencies]
3133
modular = ">=25.5.0.dev2025070905,<26"
3234

35+
[tool.pixi.feature.server.pypi-dependencies]
36+
openai = ">=1.65.1,<2"
37+
38+
# Feature for e2b agent (needs older protobuf)
39+
[tool.pixi.feature.agent.pypi-dependencies]
40+
openai = ">=1.65.1,<2"
41+
e2b-code-interpreter = ">=1.0.5,<2"
42+
43+
# Define environments
44+
[tool.pixi.environments]
45+
server = ["server"]
46+
agent = ["agent"]
47+
3348
[tool.pixi.tasks]
34-
server = "MAX_SERVE_PORT=8010 max serve --model-path modularai/Llama-3.1-8B-Instruct-GGUF --enable-structured-output"
3549
hello = "python hello.py"
36-
agent = "python agent.py"
3750
tests = "echo 'test passed'"
51+
server = "MAX_SERVE_PORT=8010 max serve --model-path modularai/Llama-3.1-8B-Instruct-GGUF --enable-structured-output"
52+
agent = "python agent.py"

0 commit comments

Comments
 (0)