Skip to content

Commit 172c59b

Browse files
authored
Merge pull request #23 from red-hat-data-services/add-konflux-configs
feat(build): add konflux dockerfile and conf file
2 parents d3d12e2 + 15efebe commit 172c59b

File tree

2 files changed

+53
-0
lines changed

2 files changed

+53
-0
lines changed

Dockerfile.konflux

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
ARG BASE_IMAGE
2+
FROM ${BASE_IMAGE}
3+
4+
ARG BASE_IMAGE
5+
ARG LLAMA_STACK_VERSION
6+
ARG WHEEL_RELEASE_AARCH64
7+
ARG WHEEL_RELEASE_X86_64
8+
ARG WHEEL_RELEASE_PACKAGE
9+
ARG WHEEL_RELEASE_PROJECT_ID
10+
11+
LABEL com.redhat.component="llama-stack-cpu-rhel9-container" \
12+
name="llama-stack/cpu-rhel9" \
13+
description="Llama Stack ${LLAMA_STACK_VERSION} for CPU on RHEL 9" \
14+
summary="Llama Stack for CPU" \
15+
maintainer="['managed-open-data-hub@redhat.com']" \
16+
io.k8s.display-name="Llama Stack for CPU" \
17+
io.k8s.description="Llama Stack ${LLAMA_STACK_VERSION} for CPU on RHEL 9" \
18+
io.openshift.expose-services="" \
19+
com.redhat.license_terms="https://www.redhat.com/en/about/eulas#RHAIIS" \
20+
com.redhat.aiplatform.image="${BASE_IMAGE}" \
21+
com.redhat.aiplatform.wheel_release="${WHEEL_RELEASE_AARCH64} ${WHEEL_RELEASE_X86_64}"
22+
23+
RUN --mount=type=secret,id=rhel-ai-private-index-auth/BOT_PAT,target=/run/secrets/gitlab_pat,required=true,uid=${CNB_USER_ID},gid=${CNB_GROUP_ID} \
24+
${APP_ROOT}/lib/tools/install-wheel-release.sh
25+
26+
USER 0
27+
# install missing RPMs (online, requires subscription)
28+
# RUN ${APP_ROOT}/lib/tools/fromager-rpm-check.py --check-dnf -y --dnf-clean
29+
# switch from internal mirrors to public vendor repos
30+
RUN ${APP_ROOT}/lib/tools/public-repos.sh
31+
USER ${CNB_USER_ID}:${CNB_GROUP_ID}
32+
33+
# scan for missing RPMs (offline), missing libraries / symbols, and selftest
34+
# AIPCC-3688: do not scan the container for missing libraries and symbols
35+
# re-add ${APP_ROOT}/lib/tools/scanlibs.py below once milvus-lite build is fixed
36+
RUN ${APP_ROOT}/lib/tools/fromager-rpm-check.py --check-rpmdb \
37+
&& ${APP_ROOT}/lib/tools/selftest.py
38+
39+
# LLama Stack configuration
40+
RUN mkdir -p ${HOME}/.llama ${HOME}/.cache
41+
COPY distribution/run.yaml ${APP_ROOT}/run.yaml
42+
COPY --chmod=755 distribution/entrypoint.sh ${APP_ROOT}/entrypoint.sh
43+
44+
# Download embedding model
45+
RUN huggingface-cli download ibm-granite/granite-embedding-125m-english
46+
47+
ENTRYPOINT [ "/opt/app-root/entrypoint.sh" ]

konflux/cpu-ubi9.conf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
BASE_IMAGE=quay.io/aipcc/base-images/cpu:3.0-1757673655
2+
LLAMA_STACK_VERSION=0.0
3+
WHEEL_RELEASE_PROJECT_ID=71275045
4+
WHEEL_RELEASE_PACKAGE=llama-stack-wheels
5+
WHEEL_RELEASE_AARCH64=0.0.272+llama-stack-cpu-ubi9-aarch64
6+
WHEEL_RELEASE_X86_64=0.0.272+llama-stack-cpu-ubi9-x86_64

0 commit comments

Comments
 (0)