Skip to content

Commit aa080cf

Browse files
committed
support prime sandboxes
1 parent 8a3a661 commit aa080cf

File tree

7 files changed

+697
-8
lines changed

7 files changed

+697
-8
lines changed

examples/prime_repl_example.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
"""
2+
Example script to test Prime Intellect Sandboxes.
3+
4+
Requirements:
5+
1. Install the Prime SDK:
6+
pip install prime
7+
# or for lightweight SDK only:
8+
pip install prime-sandboxes
9+
10+
2. Authenticate with Prime:
11+
prime login
12+
# or set environment variable:
13+
export PRIME_API_KEY="your-api-key"
14+
15+
3. Set your LLM API key (OpenAI in this example):
16+
export OPENAI_API_KEY="your-api-key"
17+
"""
18+
19+
import os
20+
21+
from dotenv import load_dotenv
22+
23+
from rlm import RLM
24+
from rlm.logger import RLMLogger
25+
26+
load_dotenv()
27+
28+
logger = RLMLogger(log_dir="./logs")
29+
30+
rlm = RLM(
31+
backend="openai",
32+
backend_kwargs={
33+
"api_key": os.getenv("OPENAI_API_KEY"),
34+
"model_name": "gpt-5-nano",
35+
},
36+
environment="prime",
37+
environment_kwargs={
38+
"name": "rlm-prime-demo",
39+
"docker_image": "python:3.11-slim",
40+
"timeout_minutes": 30,
41+
},
42+
max_depth=1,
43+
logger=logger,
44+
verbose=True,
45+
)
46+
47+
result = rlm.completion("Using your code, solve 2^(2^(2^(2))). Show your work in Python.")
48+
print(result.response)

examples/rlm_example.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@
99

1010
logger = RLMLogger(log_dir="./logs")
1111
rlm = RLM(
12-
backend="portkey",
12+
backend="openai",
1313
backend_kwargs={
14-
"api_key": os.getenv("PORTKEY_API_KEY"),
15-
"model_name": "@openai/gpt-5-nano",
14+
"api_key": os.getenv("OPENAI_API_KEY"),
15+
"model_name": "gpt-5-nano",
1616
},
1717
environment="local",
1818
environment_kwargs={},

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ dependencies = [
1717

1818
[project.optional-dependencies]
1919
modal = ["modal>=0.73.0", "dill>=0.3.7"]
20+
prime = ["prime-sandboxes>=0.2.0", "dill>=0.3.7"]
2021

2122
[build-system]
2223
requires = ["setuptools>=61.0"]

rlm/core/types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from typing import Any, Literal
44

55
ClientBackend = Literal["openai", "portkey", "openrouter", "vllm", "litellm", "anthropic"]
6-
EnvironmentType = Literal["local", "prime", "modal"]
6+
EnvironmentType = Literal["local", "docker", "modal", "prime"]
77

88

99
def _serialize_value(value: Any) -> Any:

rlm/environments/__init__.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,12 @@
55

66

77
def get_environment(
8-
environment: Literal["local", "modal", "docker"],
8+
environment: Literal["local", "modal", "docker", "prime"],
99
environment_kwargs: dict[str, Any],
1010
) -> BaseEnv:
1111
"""
1212
Routes a specific environment and the args (as a dict) to the appropriate environment if supported.
13-
Currently supported environments: ['local', 'modal', 'docker']
13+
Currently supported environments: ['local', 'modal', 'docker', 'prime']
1414
"""
1515
if environment == "local":
1616
return LocalREPL(**environment_kwargs)
@@ -22,7 +22,11 @@ def get_environment(
2222
from rlm.environments.docker_repl import DockerREPL
2323

2424
return DockerREPL(**environment_kwargs)
25+
elif environment == "prime":
26+
from rlm.environments.prime_repl import PrimeREPL
27+
28+
return PrimeREPL(**environment_kwargs)
2529
else:
2630
raise ValueError(
27-
f"Unknown environment: {environment}. Supported: ['local', 'modal', 'docker']"
31+
f"Unknown environment: {environment}. Supported: ['local', 'modal', 'docker', 'prime']"
2832
)

0 commit comments

Comments
 (0)