Skip to content

Commit 5edcb6e

Browse files
authored
Merge pull request trustyai-explainability#30 from adolfo-ab/update-pr-container-build
feat: update pr-container-build workflow
2 parents aee0e6d + e569691 commit 5edcb6e

File tree

2 files changed

+32
-28
lines changed

2 files changed

+32
-28
lines changed

.github/workflows/pr-container-build.yaml

Lines changed: 31 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Build PR LLS Distro Container Image
1+
name: Build and Push Distro Image
22

33
on:
44
pull_request_target:
@@ -8,6 +8,7 @@ env:
88
REGISTRY: quay.io
99
ORG: trustyai_testing
1010
IMAGE_NAME: llama-stack-trustyai-fms
11+
MODULE_VERSION: '0.2.2'
1112

1213
jobs:
1314
build-and-push:
@@ -52,6 +53,7 @@ jobs:
5253
- name: Copy provider code to build context
5354
run: |
5455
cp -r llama_stack_provider_trustyai_fms/ build-context/
56+
cp -r providers.d/ build-context/
5557
cp run.yaml build-context/
5658
cp pyproject.toml build-context/
5759
@@ -60,37 +62,38 @@ jobs:
6062
cat > build-context/Containerfile << 'EOF'
6163
FROM registry.access.redhat.com/ubi9/python-312:latest
6264
WORKDIR /opt/app-root
63-
64-
# Copy the local provider code
65-
COPY llama_stack_provider_trustyai_fms/ /opt/app-root/src/
65+
66+
# Copy the local provider code and pyproject.toml to the same directory
67+
COPY llama_stack_provider_trustyai_fms/ /opt/app-root/llama_stack_provider_trustyai_fms/
6668
COPY pyproject.toml /opt/app-root/
67-
69+
6870
RUN pip install \
69-
aiosqlite \
70-
autoevals \
71-
datasets \
72-
fastapi \
73-
fire \
74-
httpx \
75-
kubernetes \
76-
"openai==1.66.0" \
77-
opentelemetry-exporter-otlp-proto-http \
78-
opentelemetry-sdk \
79-
pandas \
80-
requests \
81-
sqlalchemy[asyncio] \
82-
uvicorn
83-
RUN pip install --index-url https://download.pytorch.org/whl/cpu torch torchvision
84-
RUN pip install --no-deps sentence-transformers
85-
RUN pip install --no-cache llama-stack==0.2.16
86-
87-
# Install the local provider package
88-
RUN pip install -e /opt/app-root/
89-
71+
aiosqlite \
72+
autoevals \
73+
datasets \
74+
fastapi \
75+
fire \
76+
httpx \
77+
kubernetes \
78+
"openai==1.66.0" \
79+
opentelemetry-exporter-otlp-proto-http \
80+
opentelemetry-sdk \
81+
pandas \
82+
requests \
83+
sqlalchemy[asyncio] \
84+
uvicorn
85+
86+
RUN pip install --index-url https://download.pytorch.org/whl/cpu torch torchvision
87+
RUN pip install --no-deps sentence-transformers
88+
RUN pip install --no-cache llama-stack==0.2.22
89+
90+
# Install the local provider package
91+
RUN pip install /opt/app-root/
92+
9093
RUN mkdir -p ${HOME}/.cache
9194
COPY run.yaml ${APP_ROOT}/run.yaml
92-
93-
ENTRYPOINT ["python", "-m", "llama_stack.distribution.server.server", "--config", "/opt/app-root/run.yaml"]
95+
96+
ENTRYPOINT ["python", "-m", "llama_stack.core.server.server", "/opt/app-root/run.yaml"]
9497
EOF
9598
9699
- name: Build Image

run.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ providers:
77
safety:
88
- provider_id: trustyai_fms
99
provider_type: remote::trustyai_fms
10+
module: llama_stack_provider_trustyai_fms==${env.MODULE_VERSION}
1011
config:
1112
orchestrator_url: ${env.FMS_ORCHESTRATOR_URL}
1213
shields:

0 commit comments

Comments
 (0)