Skip to content

Commit be62d8d

Browse files
committed
add openEuler support for llm-docsum
Signed-off-by: zhihang <[email protected]>
1 parent caed793 commit be62d8d

File tree

4 files changed

+93
-14
lines changed

4 files changed

+93
-14
lines changed

.github/workflows/docker/compose/llms-compose.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,10 @@ services:
2323
build:
2424
dockerfile: comps/llms/src/doc-summarization/Dockerfile
2525
image: ${REGISTRY:-opea}/llm-docsum:${TAG:-latest}
26+
llm-docsum-openeuler:
27+
build:
28+
dockerfile: comps/llms/src/doc-summarization/Dockerfile.openEuler
29+
image: ${REGISTRY:-opea}/llm-docsum:${TAG:-latest}-openeuler
2630
llm-faqgen:
2731
build:
2832
dockerfile: comps/llms/src/faq-generation/Dockerfile
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# Copyright (C) 2025 Huawei Technologies Co., Ltd.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
FROM openeuler/python:3.11.13-oe2403lts
5+
6+
ARG ARCH="cpu"
7+
8+
RUN yum update -y && \
9+
yum install -y \
10+
shadow \
11+
jemalloc-devel && \
12+
yum clean all && rm -rf /var/cache/yum
13+
14+
RUN useradd -m -s /bin/bash user && \
15+
mkdir -p /home/user && \
16+
chown -R user /home/user/
17+
18+
COPY comps /home/user/comps
19+
20+
ARG uvpip='uv pip install --system --no-cache-dir'
21+
RUN pip install --no-cache-dir --upgrade pip setuptools uv && \
22+
if [ ${ARCH} = "cpu" ]; then \
23+
$uvpip torch torchvision --index-url https://download.pytorch.org/whl/cpu; \
24+
$uvpip -r /home/user/comps/llms/src/doc-summarization/requirements-cpu.txt; \
25+
else \
26+
$uvpip -r /home/user/comps/llms/src/doc-summarization/requirements-gpu.txt; \
27+
fi
28+
29+
ENV PYTHONPATH=$PYTHONPATH:/home/user
30+
31+
# air gapped support: set model cache dir
32+
RUN mkdir -p /data && chown -R user /data
33+
ENV HF_HUB_CACHE=/data
34+
# air gapped support: pre-download tiktoken bpe files
35+
RUN mkdir -p /opt/tiktoken_cache
36+
ENV TIKTOKEN_CACHE_DIR=/opt/tiktoken_cache
37+
RUN python -c 'import tiktoken; tiktoken.get_encoding("cl100k_base");tiktoken.get_encoding("o200k_base");tiktoken.get_encoding("gpt2");tiktoken.get_encoding("r50k_base");tiktoken.get_encoding("p50k_base");tiktoken.get_encoding("p50k_edit");'
38+
RUN chown -R user /opt/tiktoken_cache
39+
40+
USER user
41+
42+
WORKDIR /home/user/comps/llms/src/doc-summarization
43+
44+
ENTRYPOINT ["python", "opea_docsum_microservice.py"]

tests/llms/test_llms_doc-summarization_tgi.sh

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ service_name="docsum-tgi"
2121

2222
function build_docker_images() {
2323
cd $WORKPATH
24-
docker build --no-cache -t ${REGISTRY:-opea}/llm-docsum:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/doc-summarization/Dockerfile .
24+
dockerfile_name="comps/llms/src/doc-summarization/$1"
25+
docker build --no-cache -t ${REGISTRY:-opea}/llm-docsum:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f "${dockerfile_name}" .
2526
if [ $? -ne 0 ]; then
2627
echo "opea/llm-docsum built fail"
2728
exit 1
@@ -148,29 +149,43 @@ function validate_microservices() {
148149
'{"messages":"Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5.", "max_tokens":32, "language":"en", "summary_type": "refine", "chunk_size": 2000, "timeout":200}'
149150
}
150151

151-
function stop_docker() {
152+
function stop_service() {
152153
cd $WORKPATH/comps/llms/deployment/docker_compose
153154
docker compose -f compose_doc-summarization.yaml down --remove-orphans
154155
}
155156

156157
function main() {
157158

158-
stop_docker
159+
stop_service
159160

160-
build_docker_images
161+
build_docker_images "Dockerfile"
161162

162-
trap stop_docker EXIT
163+
trap stop_service EXIT
163164

164165
echo "Test normal env ..."
165166
start_service
166167
validate_microservices
167-
stop_docker
168+
stop_service
168169

169170
if [[ -n "${DATA_PATH}" ]]; then
170171
echo "Test air gapped env ..."
171172
start_service true
172173
validate_microservices
173-
stop_docker
174+
stop_service
175+
fi
176+
177+
build_docker_images "Dockerfile.openEuler"
178+
179+
echo "Test with openEuler OS ..."
180+
start_service
181+
validate_microservices
182+
stop_service
183+
184+
if [[ -n "${DATA_PATH}" ]]; then
185+
echo "Test air gapped env ..."
186+
start_service true
187+
validate_microservices
188+
stop_service
174189
fi
175190

176191
echo y | docker system prune

tests/llms/test_llms_doc-summarization_vllm.sh

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ function build_docker_images() {
3232
fi
3333

3434
cd $WORKPATH
35-
docker build --no-cache -t ${REGISTRY:-opea}/llm-docsum:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/llms/src/doc-summarization/Dockerfile .
35+
dockerfile_name="comps/llms/src/doc-summarization/$1"
36+
docker build --no-cache -t ${REGISTRY:-opea}/llm-docsum:${TAG:-latest} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f "${dockerfile_name}" .
3637
if [ $? -ne 0 ]; then
3738
echo "opea/llm-docsum built fail"
3839
exit 1
@@ -159,30 +160,45 @@ function validate_microservices() {
159160
'{"messages":"Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5.", "max_tokens":32, "language":"en", "summary_type": "refine", "chunk_size": 2000, "timeout":200}'
160161
}
161162

162-
function stop_docker() {
163+
function stop_service() {
163164
cd $WORKPATH/comps/llms/deployment/docker_compose
164165
docker compose -f compose_doc-summarization.yaml down --remove-orphans
165166
}
166167

167168
function main() {
168169

169-
stop_docker
170+
stop_service
170171

171-
build_docker_images
172+
build_docker_images "Dockerfile"
172173

173-
trap stop_docker EXIT
174+
trap stop_service EXIT
174175

175176
echo "Test normal env ..."
176177
start_service
177178
validate_microservices
178-
stop_docker
179+
stop_service
179180

180181
if [[ -n "${DATA_PATH}" ]]; then
181182
echo "Test air gapped env ..."
182183
start_service true
183184
validate_microservices
184-
stop_docker
185+
stop_service
185186
fi
187+
188+
build_docker_images "Dockerfile.openEuler"
189+
190+
echo "Test with openEuler OS ..."
191+
start_service
192+
validate_microservices
193+
stop_service
194+
195+
if [[ -n "${DATA_PATH}" ]]; then
196+
echo "Test air gapped env ..."
197+
start_service true
198+
validate_microservices
199+
stop_service
200+
fi
201+
186202
echo y | docker system prune
187203

188204
}

0 commit comments

Comments
 (0)