forked from opendatahub-io/llama-stack-distribution
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathContainerfile
More file actions
52 lines (49 loc) · 1.41 KB
/
Containerfile
File metadata and controls
52 lines (49 loc) · 1.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# WARNING: This file is auto-generated. Do not modify it manually.
# Generated by: distribution/build.py
FROM registry.access.redhat.com/ubi9/python-312@sha256:95ec8d3ee9f875da011639213fd254256c29bc58861ac0b11f290a291fa04435
WORKDIR /opt/app-root
RUN pip install sqlalchemy # somehow sqlalchemy[asyncio] is not sufficient
RUN pip install \
aiosqlite \
asyncpg \
autoevals \
boto3 \
chardet \
datasets>=4.0.0 \
fastapi \
fire \
httpx \
ibm_watsonx_ai \
matplotlib \
mcp>=1.8.1 \
nltk \
numpy \
openai \
opentelemetry-exporter-otlp-proto-http \
opentelemetry-sdk \
pandas \
pillow \
psycopg2-binary \
pymilvus>=2.4.10 \
pymongo \
pypdf \
redis \
requests \
scikit-learn \
scipy \
sentencepiece \
sqlalchemy[asyncio] \
tqdm \
transformers \
uvicorn
RUN pip install \
llama_stack_provider_lmeval==0.2.4
RUN pip install \
llama_stack_provider_trustyai_fms==0.2.1
RUN pip install --extra-index-url https://download.pytorch.org/whl/cpu torch torchao>=0.12.0 torchvision
RUN pip install --no-deps sentence-transformers
RUN pip install --no-cache llama-stack==0.2.21
RUN mkdir -p ${HOME}/.llama/providers.d ${HOME}/.cache
COPY distribution/run.yaml ${APP_ROOT}/run.yaml
COPY distribution/providers.d/ ${HOME}/.llama/providers.d/
ENTRYPOINT ["python", "-m", "llama_stack.core.server.server", "/opt/app-root/run.yaml"]