diff --git a/distribution/Containerfile b/distribution/Containerfile index 70cb42891..9ae8598b0 100644 --- a/distribution/Containerfile +++ b/distribution/Containerfile @@ -15,6 +15,7 @@ RUN pip install \ fastapi \ fire \ httpx \ + ibm_watsonx_ai \ matplotlib \ mcp>=1.8.1 \ nltk \ @@ -40,7 +41,7 @@ RUN pip install \ RUN pip install \ llama_stack_provider_lmeval==0.2.4 RUN pip install \ - llama_stack_provider_trustyai_fms==0.2.1 + llama_stack_provider_trustyai_fms==0.2.2 RUN pip install --extra-index-url https://download.pytorch.org/whl/cpu torch torchao>=0.12.0 torchvision RUN pip install --no-deps sentence-transformers RUN pip install --no-cache llama-stack==0.2.21 diff --git a/distribution/build.yaml b/distribution/build.yaml index 188db93bf..490b2b60c 100644 --- a/distribution/build.yaml +++ b/distribution/build.yaml @@ -6,12 +6,13 @@ distribution_spec: - provider_type: remote::vllm - provider_type: remote::bedrock - provider_type: inline::sentence-transformers + - provider_type: remote::watsonx vector_io: - provider_type: inline::milvus - provider_type: remote::milvus safety: - provider_type: remote::trustyai_fms - module: llama_stack_provider_trustyai_fms==0.2.1 + module: llama_stack_provider_trustyai_fms==0.2.2 agents: - provider_type: inline::meta-reference eval: diff --git a/distribution/run.yaml b/distribution/run.yaml index 0d8eee44d..5a27d66f4 100644 --- a/distribution/run.yaml +++ b/distribution/run.yaml @@ -36,6 +36,12 @@ providers: - provider_id: sentence-transformers provider_type: inline::sentence-transformers config: {} + - provider_id: watsonx + provider_type: remote::watsonx + config: + url: ${env.WATSONX_BASE_URL:=https://us-south.ml.cloud.ibm.com} + api_key: ${env.WATSONX_API_KEY:=} + project_id: ${env.WATSONX_PROJECT_ID:=} vector_io: - provider_id: milvus provider_type: inline::milvus