|
17 | 17 | from .base import BaseChatModel, BaseCompletionModel, BaseModelArgs
|
18 | 18 | from .utils.jsonrepair import JsonRepair
|
19 | 19 |
|
20 |
| -try: |
21 |
| - from openai import AzureOpenAI, AsyncAzureOpenAI |
22 |
| - |
23 |
| - client = AzureOpenAI(api_key=os.environ.get("OPENAI_API_KEY"), |
24 |
| - api_key=os.environ.get("AZURE_OPENAI_API_KEY"), |
25 |
| - azure_endpoint=os.environ.get("AZURE_OPENAI_API_BASE"), |
26 |
| - api_version="2023-05-15", |
27 |
| - azure_endpoint="http://localhost:5000/v1", |
28 |
| - api_key="EMPTY") |
29 |
| - aclient = AsyncAzureOpenAI(api_key=os.environ.get("OPENAI_API_KEY"), |
30 |
| - api_key=os.environ.get("AZURE_OPENAI_API_KEY"), |
31 |
| - azure_endpoint=os.environ.get("AZURE_OPENAI_API_BASE"), |
32 |
| - api_version="2023-05-15", |
33 |
| - azure_endpoint="http://localhost:5000/v1", |
34 |
| - api_key="EMPTY") |
35 |
| - from openai.error import OpenAIError |
36 |
| -except ImportError: |
37 |
| - is_openai_available = False |
38 |
| - logger.warn("openai package is not installed") |
| 20 | +import openai |
| 21 | + |
| 22 | +# openai.proxy = os.environ.get("http_proxy") |
| 23 | +# if openai.proxy is None: |
| 24 | +# openai.proxy = os.environ.get("HTTP_PROXY") |
| 25 | +if os.environ.get("OPENAI_API_KEY") != None: |
| 26 | + openai.api_key = os.environ.get("OPENAI_API_KEY") |
| 27 | + is_openai_available = True |
| 28 | + # set openai api base url if it is set |
| 29 | + if os.environ.get("OPENAI_BASE_URL") != None: |
| 30 | + openai.base_url = os.environ.get("OPENAI_BASE_URL") |
| 31 | + print("use new openai base url", openai.base_url) |
| 32 | +elif os.environ.get("AZURE_OPENAI_API_KEY") != None: |
| 33 | + openai.api_type = "azure" |
| 34 | + openai.api_key = os.environ.get("AZURE_OPENAI_API_KEY") |
| 35 | + openai.api_base = os.environ.get("AZURE_OPENAI_API_BASE") |
| 36 | + openai.api_version = "2023-05-15" |
| 37 | + is_openai_available = True |
39 | 38 | else:
|
40 |
| - # openai.proxy = os.environ.get("http_proxy") |
41 |
| - # if openai.proxy is None: |
42 |
| - # openai.proxy = os.environ.get("HTTP_PROXY") |
43 |
| - if os.environ.get("OPENAI_API_KEY") != None: |
44 |
| - is_openai_available = True |
45 |
| - elif os.environ.get("AZURE_OPENAI_API_KEY") != None: |
46 |
| - is_openai_available = True |
47 |
| - else: |
48 |
| - logger.warn( |
49 |
| - "OpenAI API key is not set. Please set the environment variable OPENAI_API_KEY" |
50 |
| - ) |
51 |
| - is_openai_available = False |
52 |
| - |
| 39 | + logger.warn( |
| 40 | + "OpenAI API key is not set. Please set the environment variable OPENAI_API_KEY" |
| 41 | + ) |
| 42 | + is_openai_available = False |
53 | 43 |
|
54 | 44 | class OpenAIChatArgs(BaseModelArgs):
|
55 | 45 | model: str = Field(default="gpt-3.5-turbo")
|
@@ -120,6 +110,8 @@ def __init__(self, max_retry: int = 3, **kwargs):
|
120 | 110 | if len(kwargs) > 0:
|
121 | 111 | logger.warn(f"Unused arguments: {kwargs}")
|
122 | 112 | if args["model"] in LOCAL_LLMS:
|
| 113 | + openai.api_base = "http://localhost:5000/v1" |
| 114 | + openai.api_key = "EMPTY" |
123 | 115 | super().__init__(args=args, max_retry=max_retry)
|
124 | 116 |
|
125 | 117 | @classmethod
|
|
0 commit comments