From 57d703af933c7ef68af76d5a06edb208698e94cf Mon Sep 17 00:00:00 2001 From: Steve Wang Date: Thu, 14 Sep 2023 14:30:56 +0800 Subject: [PATCH 1/6] Supporting OpenAI Azure deployments. --- voyager/agents/__init__.py | 1 + voyager/agents/action.py | 13 ++++++++++--- voyager/agents/azure_model_config.py | 10 ++++++++++ voyager/agents/critic.py | 13 ++++++++++--- voyager/agents/curriculum.py | 20 +++++++++++++++----- voyager/agents/get_llm.py | 28 ++++++++++++++++++++++++++++ voyager/agents/skill.py | 13 ++++++++++--- voyager/voyager.py | 16 ++++++++++++++++ 8 files changed, 100 insertions(+), 14 deletions(-) create mode 100644 voyager/agents/azure_model_config.py create mode 100644 voyager/agents/get_llm.py diff --git a/voyager/agents/__init__.py b/voyager/agents/__init__.py index 3989162c..e690cbd7 100644 --- a/voyager/agents/__init__.py +++ b/voyager/agents/__init__.py @@ -2,3 +2,4 @@ from .critic import CriticAgent from .curriculum import CurriculumAgent from .skill import SkillManager +from .azure_model_config import AzureModelConfig diff --git a/voyager/agents/action.py b/voyager/agents/action.py index b5b27ac0..4d396751 100644 --- a/voyager/agents/action.py +++ b/voyager/agents/action.py @@ -3,12 +3,13 @@ import voyager.utils as U from javascript import require -from langchain.chat_models import ChatOpenAI from langchain.prompts import SystemMessagePromptTemplate from langchain.schema import AIMessage, HumanMessage, SystemMessage from voyager.prompts import load_prompt from voyager.control_primitives_context import load_control_primitives_context +from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.get_llm import get_llm class ActionAgent: @@ -21,6 +22,9 @@ def __init__( resume=False, chat_log=True, execution_error=True, + openai_api_type="", + azure_gpt_4_config=AzureModelConfig(), + azure_gpt_35_config=AzureModelConfig(), ): self.ckpt_dir = ckpt_dir self.chat_log = chat_log @@ -31,10 +35,13 @@ def __init__( self.chest_memory = U.load_json(f"{ckpt_dir}/action/chest_memory.json") else: self.chest_memory = {} - self.llm = ChatOpenAI( + self.llm = get_llm( model_name=model_name, temperature=temperature, - request_timeout=request_timout, + request_timout=request_timout, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) def update_chest_memory(self, chests): diff --git a/voyager/agents/azure_model_config.py b/voyager/agents/azure_model_config.py new file mode 100644 index 00000000..dfa4044a --- /dev/null +++ b/voyager/agents/azure_model_config.py @@ -0,0 +1,10 @@ +from pydantic import BaseModel + + +class AzureModelConfig(BaseModel): + """AzureChatOpenAI config profile""" + openai_api_base: str + openai_api_version: str + deployment_name: str + openai_api_type: str = "azure" + openai_api_key: str diff --git a/voyager/agents/critic.py b/voyager/agents/critic.py index 34639bb8..cac94e9e 100644 --- a/voyager/agents/critic.py +++ b/voyager/agents/critic.py @@ -1,6 +1,7 @@ from voyager.prompts import load_prompt from voyager.utils.json_utils import fix_and_parse_json -from langchain.chat_models import ChatOpenAI +from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.get_llm import get_llm from langchain.schema import HumanMessage, SystemMessage @@ -11,11 +12,17 @@ def __init__( temperature=0, request_timout=120, mode="auto", + openai_api_type="", + azure_gpt_4_config=AzureModelConfig(), + azure_gpt_35_config=AzureModelConfig(), ): - self.llm = ChatOpenAI( + self.llm = get_llm( model_name=model_name, temperature=temperature, - request_timeout=request_timout, + request_timout=request_timout, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) assert mode in ["auto", "manual"] self.mode = mode diff --git a/voyager/agents/curriculum.py b/voyager/agents/curriculum.py index 769c409f..1758c807 100644 --- a/voyager/agents/curriculum.py +++ b/voyager/agents/curriculum.py @@ -6,7 +6,8 @@ import voyager.utils as U from voyager.prompts import load_prompt from voyager.utils.json_utils import fix_and_parse_json -from langchain.chat_models import ChatOpenAI +from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.get_llm import get_llm from langchain.embeddings.openai import OpenAIEmbeddings from langchain.schema import HumanMessage, SystemMessage from langchain.vectorstores import Chroma @@ -25,16 +26,25 @@ def __init__( mode="auto", warm_up=None, core_inventory_items: str | None = None, + openai_api_type="", + azure_gpt_4_config=AzureModelConfig(), + azure_gpt_35_config=AzureModelConfig(), ): - self.llm = ChatOpenAI( + self.llm = get_llm( model_name=model_name, temperature=temperature, - request_timeout=request_timout, + request_timout=request_timout, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) - self.qa_llm = ChatOpenAI( + self.qa_llm = get_llm( model_name=qa_model_name, temperature=qa_temperature, - request_timeout=request_timout, + request_timout=request_timout, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) assert mode in [ "auto", diff --git a/voyager/agents/get_llm.py b/voyager/agents/get_llm.py new file mode 100644 index 00000000..0a8ab529 --- /dev/null +++ b/voyager/agents/get_llm.py @@ -0,0 +1,28 @@ +from langchain.chat_models import ChatOpenAI, AzureChatOpenAI + +from voyager.agents.azure_model_config import AzureModelConfig + + +def get_llm( + model_name: str = "gpt-3.5-turbo", + temperature: float = 0, + request_timout: float = 120, + azure_gpt_4_config: AzureModelConfig = AzureModelConfig(), + azure_gpt_35_config: AzureModelConfig = AzureModelConfig(), + openai_api_type: str = "", +) -> ChatOpenAI | AzureChatOpenAI: + if openai_api_type == "azure": + azure_model_config = azure_gpt_4_config if model_name == "gpt-4" else azure_gpt_35_config + llm = AzureChatOpenAI( + temperature=temperature, + request_timout=request_timout, + **azure_model_config.dict(), + ) + else: + llm = ChatOpenAI( + model_name=model_name, + temperature=temperature, + request_timeout=request_timout, + ) + + return llm diff --git a/voyager/agents/skill.py b/voyager/agents/skill.py index 08499c78..5ed88a33 100644 --- a/voyager/agents/skill.py +++ b/voyager/agents/skill.py @@ -1,7 +1,8 @@ import os import voyager.utils as U -from langchain.chat_models import ChatOpenAI +from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.get_llm import get_llm from langchain.embeddings.openai import OpenAIEmbeddings from langchain.schema import HumanMessage, SystemMessage from langchain.vectorstores import Chroma @@ -19,11 +20,17 @@ def __init__( request_timout=120, ckpt_dir="ckpt", resume=False, + openai_api_type="", + azure_gpt_4_config=AzureModelConfig(), + azure_gpt_35_config=AzureModelConfig(), ): - self.llm = ChatOpenAI( + self.llm = get_llm( model_name=model_name, temperature=temperature, - request_timeout=request_timout, + request_timout=request_timout, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) U.f_mkdir(f"{ckpt_dir}/skill/code") U.f_mkdir(f"{ckpt_dir}/skill/description") diff --git a/voyager/voyager.py b/voyager/voyager.py index 61a0c207..0f5f7c65 100644 --- a/voyager/voyager.py +++ b/voyager/voyager.py @@ -11,6 +11,7 @@ from .agents import CriticAgent from .agents import CurriculumAgent from .agents import SkillManager +from .agents import AzureModelConfig # TODO: remove event memory @@ -48,6 +49,9 @@ def __init__( ckpt_dir: str = "ckpt", skill_library_dir: str = None, resume: bool = False, + openai_api_type="", + azure_gpt_4_config=AzureModelConfig(), + azure_gpt_35_config=AzureModelConfig(), ): """ The main class for Voyager. @@ -123,6 +127,9 @@ def __init__( resume=resume, chat_log=action_agent_show_chat_log, execution_error=action_agent_show_execution_error, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) self.action_agent_task_max_retries = action_agent_task_max_retries self.curriculum_agent = CurriculumAgent( @@ -136,12 +143,18 @@ def __init__( mode=curriculum_agent_mode, warm_up=curriculum_agent_warm_up, core_inventory_items=curriculum_agent_core_inventory_items, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) self.critic_agent = CriticAgent( model_name=critic_agent_model_name, temperature=critic_agent_temperature, request_timout=openai_api_request_timeout, mode=critic_agent_mode, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) self.skill_manager = SkillManager( model_name=skill_manager_model_name, @@ -150,6 +163,9 @@ def __init__( request_timout=openai_api_request_timeout, ckpt_dir=skill_library_dir if skill_library_dir else ckpt_dir, resume=True if resume or skill_library_dir else False, + openai_api_type=openai_api_type, + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, ) self.recorder = U.EventRecorder(ckpt_dir=ckpt_dir, resume=resume) self.resume = resume From 8d3a1db83bc50869895af3e54942622156f70bdb Mon Sep 17 00:00:00 2001 From: Steve Wang Date: Thu, 14 Sep 2023 14:34:13 +0800 Subject: [PATCH 2/6] Supporting OpenAI Azure deployments. --- voyager/agents/azure_model_config.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/voyager/agents/azure_model_config.py b/voyager/agents/azure_model_config.py index dfa4044a..1405c2c8 100644 --- a/voyager/agents/azure_model_config.py +++ b/voyager/agents/azure_model_config.py @@ -3,8 +3,8 @@ class AzureModelConfig(BaseModel): """AzureChatOpenAI config profile""" - openai_api_base: str - openai_api_version: str - deployment_name: str - openai_api_type: str = "azure" - openai_api_key: str + openai_api_base: str = '' + openai_api_version: str = '' + deployment_name: str = '' + openai_api_type: str = 'azure' + openai_api_key: str = '' From e2b9f40b80baedd183719fa4bfc7cfa7111d804e Mon Sep 17 00:00:00 2001 From: Steve Wang Date: Thu, 14 Sep 2023 15:06:07 +0800 Subject: [PATCH 3/6] Add Azure config for OpenAIEmbeddings. --- voyager/agents/__init__.py | 2 +- voyager/agents/action.py | 6 +++--- voyager/agents/azure_model_config.py | 11 ++++++++++- voyager/agents/critic.py | 6 +++--- voyager/agents/curriculum.py | 9 +++++---- voyager/agents/get_llm.py | 6 +++--- voyager/agents/skill.py | 9 +++++---- voyager/voyager.py | 7 ++++--- 8 files changed, 34 insertions(+), 22 deletions(-) diff --git a/voyager/agents/__init__.py b/voyager/agents/__init__.py index e690cbd7..d74c9c46 100644 --- a/voyager/agents/__init__.py +++ b/voyager/agents/__init__.py @@ -2,4 +2,4 @@ from .critic import CriticAgent from .curriculum import CurriculumAgent from .skill import SkillManager -from .azure_model_config import AzureModelConfig +from .azure_model_config import AzureChatModelConfig, AzureOpenAIEmbeddingsConfig diff --git a/voyager/agents/action.py b/voyager/agents/action.py index 4d396751..d009b855 100644 --- a/voyager/agents/action.py +++ b/voyager/agents/action.py @@ -8,7 +8,7 @@ from voyager.prompts import load_prompt from voyager.control_primitives_context import load_control_primitives_context -from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.azure_model_config import AzureChatModelConfig from voyager.agents.get_llm import get_llm @@ -23,8 +23,8 @@ def __init__( chat_log=True, execution_error=True, openai_api_type="", - azure_gpt_4_config=AzureModelConfig(), - azure_gpt_35_config=AzureModelConfig(), + azure_gpt_4_config=AzureChatModelConfig(), + azure_gpt_35_config=AzureChatModelConfig(), ): self.ckpt_dir = ckpt_dir self.chat_log = chat_log diff --git a/voyager/agents/azure_model_config.py b/voyager/agents/azure_model_config.py index 1405c2c8..49799508 100644 --- a/voyager/agents/azure_model_config.py +++ b/voyager/agents/azure_model_config.py @@ -1,10 +1,19 @@ from pydantic import BaseModel -class AzureModelConfig(BaseModel): +class AzureChatModelConfig(BaseModel): """AzureChatOpenAI config profile""" openai_api_base: str = '' openai_api_version: str = '' deployment_name: str = '' openai_api_type: str = 'azure' openai_api_key: str = '' + + +class AzureOpenAIEmbeddingsConfig(BaseModel): + """OpenAIEmbeddings config profile""" + openai_api_base: str = '' + model: str = '' + openai_api_type: str = 'azure' + deployment: str = '' + openai_api_key: str = '' diff --git a/voyager/agents/critic.py b/voyager/agents/critic.py index cac94e9e..bf742db0 100644 --- a/voyager/agents/critic.py +++ b/voyager/agents/critic.py @@ -1,6 +1,6 @@ from voyager.prompts import load_prompt from voyager.utils.json_utils import fix_and_parse_json -from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.azure_model_config import AzureChatModelConfig from voyager.agents.get_llm import get_llm from langchain.schema import HumanMessage, SystemMessage @@ -13,8 +13,8 @@ def __init__( request_timout=120, mode="auto", openai_api_type="", - azure_gpt_4_config=AzureModelConfig(), - azure_gpt_35_config=AzureModelConfig(), + azure_gpt_4_config=AzureChatModelConfig(), + azure_gpt_35_config=AzureChatModelConfig(), ): self.llm = get_llm( model_name=model_name, diff --git a/voyager/agents/curriculum.py b/voyager/agents/curriculum.py index 1758c807..7b56931f 100644 --- a/voyager/agents/curriculum.py +++ b/voyager/agents/curriculum.py @@ -6,7 +6,7 @@ import voyager.utils as U from voyager.prompts import load_prompt from voyager.utils.json_utils import fix_and_parse_json -from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.azure_model_config import AzureChatModelConfig, AzureOpenAIEmbeddingsConfig from voyager.agents.get_llm import get_llm from langchain.embeddings.openai import OpenAIEmbeddings from langchain.schema import HumanMessage, SystemMessage @@ -27,8 +27,9 @@ def __init__( warm_up=None, core_inventory_items: str | None = None, openai_api_type="", - azure_gpt_4_config=AzureModelConfig(), - azure_gpt_35_config=AzureModelConfig(), + azure_gpt_4_config=AzureChatModelConfig(), + azure_gpt_35_config=AzureChatModelConfig(), + azure_openai_embeddings_config=AzureOpenAIEmbeddingsConfig(), ): self.llm = get_llm( model_name=model_name, @@ -67,7 +68,7 @@ def __init__( # vectordb for qa cache self.qa_cache_questions_vectordb = Chroma( collection_name="qa_cache_questions_vectordb", - embedding_function=OpenAIEmbeddings(), + embedding_function=OpenAIEmbeddings(**azure_openai_embeddings_config.dict()) if openai_api_type == "azure" else OpenAIEmbeddings(), persist_directory=f"{ckpt_dir}/curriculum/vectordb", ) assert self.qa_cache_questions_vectordb._collection.count() == len( diff --git a/voyager/agents/get_llm.py b/voyager/agents/get_llm.py index 0a8ab529..21d90bed 100644 --- a/voyager/agents/get_llm.py +++ b/voyager/agents/get_llm.py @@ -1,14 +1,14 @@ from langchain.chat_models import ChatOpenAI, AzureChatOpenAI -from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.azure_model_config import AzureChatModelConfig def get_llm( model_name: str = "gpt-3.5-turbo", temperature: float = 0, request_timout: float = 120, - azure_gpt_4_config: AzureModelConfig = AzureModelConfig(), - azure_gpt_35_config: AzureModelConfig = AzureModelConfig(), + azure_gpt_4_config: AzureChatModelConfig = AzureChatModelConfig(), + azure_gpt_35_config: AzureChatModelConfig = AzureChatModelConfig(), openai_api_type: str = "", ) -> ChatOpenAI | AzureChatOpenAI: if openai_api_type == "azure": diff --git a/voyager/agents/skill.py b/voyager/agents/skill.py index 5ed88a33..c1c2307e 100644 --- a/voyager/agents/skill.py +++ b/voyager/agents/skill.py @@ -1,7 +1,7 @@ import os import voyager.utils as U -from voyager.agents.azure_model_config import AzureModelConfig +from voyager.agents.azure_model_config import AzureChatModelConfig, AzureOpenAIEmbeddingsConfig from voyager.agents.get_llm import get_llm from langchain.embeddings.openai import OpenAIEmbeddings from langchain.schema import HumanMessage, SystemMessage @@ -21,8 +21,9 @@ def __init__( ckpt_dir="ckpt", resume=False, openai_api_type="", - azure_gpt_4_config=AzureModelConfig(), - azure_gpt_35_config=AzureModelConfig(), + azure_gpt_4_config=AzureChatModelConfig(), + azure_gpt_35_config=AzureChatModelConfig(), + azure_openai_embeddings_config=AzureOpenAIEmbeddingsConfig(), ): self.llm = get_llm( model_name=model_name, @@ -46,7 +47,7 @@ def __init__( self.ckpt_dir = ckpt_dir self.vectordb = Chroma( collection_name="skill_vectordb", - embedding_function=OpenAIEmbeddings(), + embedding_function=OpenAIEmbeddings(**azure_openai_embeddings_config.dict()) if openai_api_type == "azure" else OpenAIEmbeddings(), persist_directory=f"{ckpt_dir}/skill/vectordb", ) assert self.vectordb._collection.count() == len(self.skills), ( diff --git a/voyager/voyager.py b/voyager/voyager.py index 0f5f7c65..bab76f95 100644 --- a/voyager/voyager.py +++ b/voyager/voyager.py @@ -11,7 +11,7 @@ from .agents import CriticAgent from .agents import CurriculumAgent from .agents import SkillManager -from .agents import AzureModelConfig +from .agents import AzureChatModelConfig, AzureOpenAIEmbeddingsConfig # TODO: remove event memory @@ -50,8 +50,9 @@ def __init__( skill_library_dir: str = None, resume: bool = False, openai_api_type="", - azure_gpt_4_config=AzureModelConfig(), - azure_gpt_35_config=AzureModelConfig(), + azure_gpt_4_config=AzureChatModelConfig(), + azure_gpt_35_config=AzureChatModelConfig(), + azure_openai_embeddings_config=AzureOpenAIEmbeddingsConfig(), ): """ The main class for Voyager. From 8a10cebc428a52dfc43d947d7a547e8bede4d3ba Mon Sep 17 00:00:00 2001 From: Steve Wang Date: Thu, 14 Sep 2023 15:10:24 +0800 Subject: [PATCH 4/6] Add Azure config for OpenAIEmbeddings. --- voyager/voyager.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/voyager/voyager.py b/voyager/voyager.py index bab76f95..1a29ecc5 100644 --- a/voyager/voyager.py +++ b/voyager/voyager.py @@ -147,6 +147,7 @@ def __init__( openai_api_type=openai_api_type, azure_gpt_4_config=azure_gpt_4_config, azure_gpt_35_config=azure_gpt_35_config, + azure_openai_embeddings_config=azure_openai_embeddings_config, ) self.critic_agent = CriticAgent( model_name=critic_agent_model_name, @@ -167,6 +168,7 @@ def __init__( openai_api_type=openai_api_type, azure_gpt_4_config=azure_gpt_4_config, azure_gpt_35_config=azure_gpt_35_config, + azure_openai_embeddings_config=azure_openai_embeddings_config, ) self.recorder = U.EventRecorder(ckpt_dir=ckpt_dir, resume=resume) self.resume = resume From 657dabf279541de007b8f6cf4f26105d52d59891 Mon Sep 17 00:00:00 2001 From: Steve Wang Date: Thu, 14 Sep 2023 16:18:04 +0800 Subject: [PATCH 5/6] Add documentations for using Azure API deployments. --- README.md | 2 + .../run_with_azure_api_deployments.md | 52 +++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 installation/run_with_azure_api_deployments.md diff --git a/README.md b/README.md index bc026e71..c9c61172 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,8 @@ voyager.learn() 3. After the world is created, press `Esc` key and press `Open to LAN`. 4. Select `Allow cheats: ON` and press `Start LAN World`. You will see the bot join the world soon. +Or if you are using dedicated OpenAI API deployments on Azure, you can config your APIs by inferring [this](installation/run_with_azure_api_deployments.md). + # Resume from a checkpoint during learning If you stop the learning process and want to resume from a checkpoint later, you can instantiate Voyager by: diff --git a/installation/run_with_azure_api_deployments.md b/installation/run_with_azure_api_deployments.md new file mode 100644 index 00000000..1be5c89f --- /dev/null +++ b/installation/run_with_azure_api_deployments.md @@ -0,0 +1,52 @@ +# Run With Azure API Deployments + +If you are using dedicated OpenAI API deployments on Azure, you can run Voyager by: + +```python +from voyager import Voyager +from voyager.agents import AzureChatModelConfig, AzureOpenAIEmbeddingsConfig + +# You can also use mc_port instead of azure_login, but azure_login is highly recommended +azure_login = { + "client_id": "YOUR_CLIENT_ID", + "redirect_url": "https://127.0.0.1/auth-response", + "secret_value": "[OPTIONAL] YOUR_SECRET_VALUE", + "version": "fabric-loader-0.14.18-1.19", # the version Voyager is tested on +} +openai_api_key = "YOUR_API_KEY" + +# If you are using OpenAI LLM deployments on Azure, you can config them here +azure_gpt_4_config = AzureChatModelConfig( + openai_api_base="BASE_URL_FOR_AZURE_GPT4_DEPLOYMENT", + openai_api_version="GPT4_API_VERSION", + deployment_name="GPT4_DEPLOYMENT_NAME", + openai_api_type="azure", + openai_api_key="YOUR_AZURE_API_KEY", # Not API keys with prefix "sk-" +) +azure_gpt_35_config = AzureChatModelConfig( + openai_api_base="BASE_URL_FOR_AZURE_GPT35_DEPLOYMENT", + openai_api_version="GPT35_API_VERSION", + deployment_name="GPT35_DEPLOYMENT_NAME", + openai_api_type="azure", + openai_api_key="YOUR_AZURE_API_KEY", # Not API keys with prefix "sk-" +) +azure_openai_embeddings_config = AzureOpenAIEmbeddingsConfig( + openai_api_base="BASE_URL_FOR_AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT", + model="MODEL_NAME", # Check https://platform.openai.com/docs/guides/embeddings/embedding-models + openai_api_type="azure", + deployment="YOUR_DEPLOYMENT_NAME", + openai_api_key="YOUR_AZURE_API_KEY", # Not API keys with prefix "sk-" +) + +voyager = Voyager( + azure_login=azure_login, + openai_api_type="azure", + azure_gpt_4_config=azure_gpt_4_config, + azure_gpt_35_config=azure_gpt_35_config, + azure_openai_embeddings_config=azure_openai_embeddings_config, +) + +# start lifelong learning +voyager.learn() +``` + From 32417272603ee3bdcec5a502f9d68086999ced85 Mon Sep 17 00:00:00 2001 From: Steve Wang Date: Thu, 14 Sep 2023 17:21:34 +0800 Subject: [PATCH 6/6] Do not pass argument `request_timout` to AzureChatOpenAI which would cause error. --- voyager/agents/get_llm.py | 1 - 1 file changed, 1 deletion(-) diff --git a/voyager/agents/get_llm.py b/voyager/agents/get_llm.py index 21d90bed..7c0d31f7 100644 --- a/voyager/agents/get_llm.py +++ b/voyager/agents/get_llm.py @@ -15,7 +15,6 @@ def get_llm( azure_model_config = azure_gpt_4_config if model_name == "gpt-4" else azure_gpt_35_config llm = AzureChatOpenAI( temperature=temperature, - request_timout=request_timout, **azure_model_config.dict(), ) else: