Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix: AzureOpenAIInferenceEngine fails if api_version is not set #1680

Merged
merged 6 commits into from
Mar 17, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions src/unitxt/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -1774,7 +1774,7 @@ def _prepare_credentials(self) -> CredentialsOpenAi:
), "Error while trying to run AzureOpenAIInferenceEngine: Missing environment variable param AZURE_OPENAI_HOST or OPENAI_API_VERSION"
api_url = f"{azure_openapi_host}/openai/deployments/{self.model_name}/chat/completions?api-version={api_version}"

return {"api_key": api_key, "api_url": api_url}
return {"api_key": api_key, "api_url": api_url, "api_version": api_version}

def create_client(self):
from openai import AzureOpenAI
Expand All @@ -1783,6 +1783,7 @@ def create_client(self):
return AzureOpenAI(
api_key=self.credentials["api_key"],
base_url=self.credentials["api_url"],
api_version=self.credentials["api_version"],
default_headers=self.get_default_headers(),
)

Expand Down Expand Up @@ -3294,14 +3295,18 @@ class HFOptionSelectingInferenceEngine(InferenceEngine, TorchDeviceMixin):

This class uses models from the HuggingFace Transformers library to calculate log probabilities for text inputs.
"""

label = "hf_option_selection"
model_name: str
batch_size: int

_requirements_list = {
"transformers": "Install huggingface package using 'pip install --upgrade transformers"
}

def get_engine_id(self):
return get_model_and_label_id(self.model, self.label)


def prepare_engine(self):
from transformers import AutoModelForCausalLM, AutoTokenizer

Expand Down
Loading