Skip to content

Commit 5551f5d

Browse files
committed
Add OpenAIUser and example.
1 parent f600fdd commit 5551f5d

File tree

2 files changed

+97
-0
lines changed

2 files changed

+97
-0
lines changed

Diff for: examples/openai_ex.py

+28
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
# You need to install the openai package and set OPENAI_API_KEY env var to run this
2+
3+
# OpenAIUser tracks the number of output tokens in the response_length field,
4+
# because it is more useful than the actual payload size. This field is available to event handlers,
5+
# but only graphed in Locust Cloud.
6+
7+
from locust import run_single_user, task
8+
from locust.contrib.oai import OpenAIUser
9+
10+
11+
class MyUser(OpenAIUser):
12+
@task
13+
def t(self):
14+
self.client.responses.create(
15+
model="gpt-4o",
16+
instructions="You are a coding assistant that speaks like it were a Monty Python skit.",
17+
input="How do I check if a Python object is an instance of a class?",
18+
)
19+
with self.client.rename_request("mini"): # here's how to rename requests
20+
self.client.responses.create(
21+
model="gpt-4o-mini",
22+
instructions="You are a coding assistant that speaks like it were a Monty Python skit.",
23+
input="How do I check if a Python object is an instance of a class?",
24+
)
25+
26+
27+
if __name__ == "__main__":
28+
run_single_user(MyUser)

Diff for: locust/contrib/oai.py

+69
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
# Note: this User is experimental and may change without notice.
2+
# The filename is oai.py so it doesnt clash with the openai package.
3+
from locust.user import User
4+
5+
import os
6+
import time
7+
from collections.abc import Generator
8+
from contextlib import contextmanager
9+
10+
import httpx
11+
from openai import OpenAI # dont forget to install openai
12+
13+
14+
class OpenAIClient(OpenAI):
15+
def __init__(self, request_event, user, *args, **kwargs):
16+
self.request_name = None # used to override url-based request names
17+
self.user = user # currently unused, but could be useful later
18+
19+
def request_start(request):
20+
request.start_time = time.time()
21+
request.start_perf_counter = time.perf_counter()
22+
23+
def request_end(response):
24+
exception = None
25+
response.read()
26+
response_time = (time.perf_counter() - response.request.start_perf_counter) * 1000
27+
try:
28+
response.raise_for_status()
29+
except httpx.HTTPStatusError as e:
30+
exception = e
31+
request_event.fire(
32+
request_type=response.request.method,
33+
name=self.request_name if self.request_name else response.url.path,
34+
context={},
35+
response=response,
36+
exception=exception,
37+
start_time=response.request.start_time,
38+
response_time=response_time,
39+
# Store the number of output tokens as response_length instead of the actual payload size because it is more useful
40+
response_length=response.json().get("usage", {}).get("output_tokens", 0),
41+
url=response.url,
42+
)
43+
44+
# dont forget to set OPENAI_API_KEY in your environment
45+
super().__init__(
46+
*args,
47+
**kwargs,
48+
http_client=httpx.Client(event_hooks={"request": [request_start], "response": [request_end]}),
49+
# convenience for when running in Locust Cloud, where only LOCUST_* env vars are forwarded:
50+
api_key=os.getenv("LOCUST_OPENAI_API_KEY", None),
51+
)
52+
53+
@contextmanager
54+
def rename_request(self, name: str) -> Generator[None]:
55+
"""Group requests using the "with" keyword"""
56+
57+
self.request_name = name
58+
try:
59+
yield
60+
finally:
61+
self.request_name = None
62+
63+
64+
class OpenAIUser(User):
65+
abstract = True
66+
67+
def __init__(self, *args, **kwargs):
68+
super().__init__(*args, **kwargs)
69+
self.client = OpenAIClient(self.environment.events.request, user=self)

0 commit comments

Comments
 (0)