|
| 1 | +# Note: this User is experimental and may change without notice. |
| 2 | +# The filename is oai.py so it doesnt clash with the openai package. |
| 3 | +from locust.user import User |
| 4 | + |
| 5 | +import os |
| 6 | +import time |
| 7 | +from collections.abc import Generator |
| 8 | +from contextlib import contextmanager |
| 9 | + |
| 10 | +import httpx |
| 11 | +from openai import OpenAI # dont forget to install openai |
| 12 | + |
| 13 | + |
| 14 | +class OpenAIClient(OpenAI): |
| 15 | + def __init__(self, request_event, user, *args, **kwargs): |
| 16 | + self.request_name = None # used to override url-based request names |
| 17 | + self.user = user # currently unused, but could be useful later |
| 18 | + |
| 19 | + def request_start(request): |
| 20 | + request.start_time = time.time() |
| 21 | + request.start_perf_counter = time.perf_counter() |
| 22 | + |
| 23 | + def request_end(response): |
| 24 | + exception = None |
| 25 | + response.read() |
| 26 | + response_time = (time.perf_counter() - response.request.start_perf_counter) * 1000 |
| 27 | + try: |
| 28 | + response.raise_for_status() |
| 29 | + except httpx.HTTPStatusError as e: |
| 30 | + exception = e |
| 31 | + request_event.fire( |
| 32 | + request_type=response.request.method, |
| 33 | + name=self.request_name if self.request_name else response.url.path, |
| 34 | + context={}, |
| 35 | + response=response, |
| 36 | + exception=exception, |
| 37 | + start_time=response.request.start_time, |
| 38 | + response_time=response_time, |
| 39 | + # Store the number of output tokens as response_length instead of the actual payload size because it is more useful |
| 40 | + response_length=response.json().get("usage", {}).get("output_tokens", 0), |
| 41 | + url=response.url, |
| 42 | + ) |
| 43 | + |
| 44 | + # dont forget to set OPENAI_API_KEY in your environment |
| 45 | + super().__init__( |
| 46 | + *args, |
| 47 | + **kwargs, |
| 48 | + http_client=httpx.Client(event_hooks={"request": [request_start], "response": [request_end]}), |
| 49 | + # convenience for when running in Locust Cloud, where only LOCUST_* env vars are forwarded: |
| 50 | + api_key=os.getenv("LOCUST_OPENAI_API_KEY", None), |
| 51 | + ) |
| 52 | + |
| 53 | + @contextmanager |
| 54 | + def rename_request(self, name: str) -> Generator[None]: |
| 55 | + """Group requests using the "with" keyword""" |
| 56 | + |
| 57 | + self.request_name = name |
| 58 | + try: |
| 59 | + yield |
| 60 | + finally: |
| 61 | + self.request_name = None |
| 62 | + |
| 63 | + |
| 64 | +class OpenAIUser(User): |
| 65 | + abstract = True |
| 66 | + |
| 67 | + def __init__(self, *args, **kwargs): |
| 68 | + super().__init__(*args, **kwargs) |
| 69 | + self.client = OpenAIClient(self.environment.events.request, user=self) |
0 commit comments