Skip to content

Commit 27c3f60

Browse files
committed
Add async OpenAI client support in openai_client.py
- Introduced `get_async_openai_client` function for asynchronous OpenAI and Azure client initialization. - Updated documentation to clarify usage of LiteLLM proxy for both chat and embeddings. - Enhanced error handling for missing configuration keys in async client setup.
1 parent 1b760fe commit 27c3f60

1 file changed

Lines changed: 49 additions & 2 deletions

File tree

server/lib/openai_client.py

Lines changed: 49 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
All links and storage that call OpenAI should use get_openai_client(opts) so
99
LLM provider and proxy can be switched in one place.
1010
"""
11-
from openai import OpenAI, AzureOpenAI
11+
from openai import OpenAI, AzureOpenAI, AsyncOpenAI, AsyncAzureOpenAI
1212

1313
from lib.logging_utils import init_logger
1414

@@ -21,11 +21,12 @@
2121
def get_openai_client(opts=None):
2222
"""
2323
Return an OpenAI-compatible client (OpenAI or AzureOpenAI).
24+
Same client is used for chat and embeddings; LiteLLM proxy supports both.
2425
2526
opts: dict of options. All values are read from opts only.
2627
2728
Supported keys in opts:
28-
- LITELLM_PROXY_URL, LITELLM_MASTER_KEY -> use LiteLLM proxy
29+
- LITELLM_PROXY_URL, LITELLM_MASTER_KEY -> use LiteLLM proxy (chat + embeddings)
2930
- AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_VERSION -> Azure
3031
- OPENAI_API_KEY or openai_api_key or api_key -> OpenAI
3132
- organization / organization_key, project / project_key (optional)
@@ -83,3 +84,49 @@ def get_openai_client(opts=None):
8384
"Set LITELLM_PROXY_URL + LITELLM_MASTER_KEY, or "
8485
"AZURE_OPENAI_ENDPOINT + AZURE_OPENAI_API_KEY, or OPENAI_API_KEY (or api_key)"
8586
)
87+
88+
89+
def get_async_openai_client(opts=None):
90+
"""
91+
Return an async OpenAI-compatible client. Same opts semantics as get_openai_client.
92+
LiteLLM proxy is used for both chat and embeddings when configured.
93+
"""
94+
opts = opts or {}
95+
96+
litellm_url = (opts.get("LITELLM_PROXY_URL") or "").strip().rstrip("/")
97+
litellm_key = (opts.get("LITELLM_MASTER_KEY") or "").strip()
98+
if litellm_url and litellm_key:
99+
logger.info("Using LiteLLM proxy at %s (async)", litellm_url)
100+
return AsyncOpenAI(
101+
api_key=litellm_key,
102+
base_url=litellm_url + "/v1",
103+
timeout=120.0,
104+
max_retries=0,
105+
)
106+
107+
azure_endpoint = (opts.get("AZURE_OPENAI_ENDPOINT") or "").strip()
108+
azure_api_key = (opts.get("AZURE_OPENAI_API_KEY") or "").strip()
109+
azure_api_version = opts.get("AZURE_OPENAI_API_VERSION") or DEFAULT_AZURE_OPENAI_API_VERSION
110+
if azure_endpoint and azure_api_key:
111+
logger.info("Using Azure OpenAI client at endpoint: %s (async)", azure_endpoint)
112+
return AsyncAzureOpenAI(
113+
api_key=azure_api_key,
114+
azure_endpoint=azure_endpoint,
115+
api_version=azure_api_version,
116+
timeout=120.0,
117+
max_retries=0,
118+
)
119+
120+
openai_api_key = (
121+
opts.get("OPENAI_API_KEY")
122+
or opts.get("openai_api_key")
123+
or opts.get("api_key")
124+
)
125+
if openai_api_key:
126+
logger.info("Using public OpenAI client (async)")
127+
return AsyncOpenAI(api_key=openai_api_key, timeout=120.0, max_retries=0)
128+
129+
raise ValueError(
130+
"Set LITELLM_PROXY_URL + LITELLM_MASTER_KEY, or "
131+
"AZURE_OPENAI_ENDPOINT + AZURE_OPENAI_API_KEY, or OPENAI_API_KEY (or api_key)"
132+
)

0 commit comments

Comments
 (0)