Skip to content

Commit c9f3882

Browse files
committed
add openEuler support for DocSum
Signed-off-by: zhihang <[email protected]>
1 parent c11981f commit c9f3882

File tree

9 files changed

+1143
-0
lines changed

9 files changed

+1143
-0
lines changed

DocSum/Dockerfile.openEuler

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# Copyright (C) 2025 Huawei Technologies Co., Ltd.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
ARG IMAGE_REPO=opea
5+
ARG BASE_TAG=latest
6+
FROM $IMAGE_REPO/comps-base:$BASE_TAG-openeuler
7+
8+
USER root
9+
# FFmpeg needed for media processing
10+
RUN yum update -y && \
11+
yum install -y ffmpeg && \
12+
yum clean all && rm -rf /var/cache/yum
13+
USER user
14+
15+
COPY ./docsum.py $HOME/docsum.py
16+
17+
ENTRYPOINT ["python", "docsum.py"]
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
# Copyright (C) 2025 Huawei Technologies Co., Ltd.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
services:
5+
vllm-service:
6+
image: openeuler/vllm-cpu:0.10.1-oe2403lts
7+
container_name: docsum-xeon-vllm-service
8+
ports:
9+
- ${LLM_ENDPOINT_PORT:-8008}:80
10+
volumes:
11+
- "${MODEL_CACHE:-./data}:/root/.cache/huggingface/hub"
12+
shm_size: 1g
13+
environment:
14+
no_proxy: ${no_proxy}
15+
http_proxy: ${http_proxy}
16+
https_proxy: ${https_proxy}
17+
HF_TOKEN: ${HF_TOKEN}
18+
LLM_MODEL_ID: ${LLM_MODEL_ID}
19+
VLLM_TORCH_PROFILER_DIR: "/mnt"
20+
VLLM_CPU_KVCACHE_SPACE: 40
21+
healthcheck:
22+
test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"]
23+
interval: 10s
24+
timeout: 10s
25+
retries: 100
26+
command: --model $LLM_MODEL_ID --host 0.0.0.0 --port 80
27+
28+
llm-docsum-vllm:
29+
image: ${REGISTRY:-opea}/llm-docsum:${TAG:-latest}-openeuler
30+
container_name: docsum-xeon-llm-server
31+
depends_on:
32+
vllm-service:
33+
condition: service_healthy
34+
ports:
35+
- ${LLM_PORT:-9000}:9000
36+
ipc: host
37+
environment:
38+
no_proxy: ${no_proxy}
39+
http_proxy: ${http_proxy}
40+
https_proxy: ${https_proxy}
41+
LLM_ENDPOINT: ${LLM_ENDPOINT}
42+
LLM_MODEL_ID: ${LLM_MODEL_ID}
43+
HF_TOKEN: ${HF_TOKEN}
44+
MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS}
45+
MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS}
46+
DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME}
47+
LOGFLAG: ${LOGFLAG:-False}
48+
restart: unless-stopped
49+
50+
whisper:
51+
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}-openeuler
52+
container_name: docsum-xeon-whisper-server
53+
ports:
54+
- ${ASR_SERVICE_PORT:-7066}:7066
55+
ipc: host
56+
environment:
57+
no_proxy: ${no_proxy}
58+
http_proxy: ${http_proxy}
59+
https_proxy: ${https_proxy}
60+
restart: unless-stopped
61+
62+
docsum-xeon-backend-server:
63+
image: ${REGISTRY:-opea}/docsum:${TAG:-latest}-openeuler
64+
container_name: docsum-xeon-backend-server
65+
depends_on:
66+
- vllm-service
67+
- llm-docsum-vllm
68+
ports:
69+
- "${BACKEND_SERVICE_PORT:-8888}:8888"
70+
environment:
71+
- no_proxy=${no_proxy}
72+
- https_proxy=${https_proxy}
73+
- http_proxy=${http_proxy}
74+
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
75+
- LLM_SERVICE_HOST_IP=${LLM_SERVICE_HOST_IP}
76+
- LLM_SERVICE_PORT=${LLM_PORT}
77+
- ASR_SERVICE_HOST_IP=${ASR_SERVICE_HOST_IP}
78+
- ASR_SERVICE_PORT=${ASR_SERVICE_PORT}
79+
ipc: host
80+
restart: always
81+
82+
docsum-gradio-ui:
83+
image: ${REGISTRY:-opea}/docsum-gradio-ui:${TAG:-latest}-openeuler
84+
container_name: docsum-xeon-ui-server
85+
depends_on:
86+
- docsum-xeon-backend-server
87+
ports:
88+
- "${FRONTEND_SERVICE_PORT:-5173}:5173"
89+
environment:
90+
- no_proxy=${no_proxy}
91+
- https_proxy=${https_proxy}
92+
- http_proxy=${http_proxy}
93+
- BACKEND_SERVICE_ENDPOINT=${BACKEND_SERVICE_ENDPOINT}
94+
- DOC_BASE_URL=${BACKEND_SERVICE_ENDPOINT}
95+
ipc: host
96+
restart: always
97+
98+
networks:
99+
default:
100+
driver: bridge
Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
# Copyright (C) 2025 Huawei Technologies Co., Ltd.
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
services:
5+
tgi-server:
6+
image: openeuler/text-generation-inference-cpu:2.4.0-oe2403lts
7+
container_name: docsum-xeon-tgi-server
8+
ports:
9+
- ${LLM_ENDPOINT_PORT:-8008}:80
10+
volumes:
11+
- "${MODEL_CACHE:-./data}:/data"
12+
environment:
13+
no_proxy: ${no_proxy}
14+
http_proxy: ${http_proxy}
15+
https_proxy: ${https_proxy}
16+
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
17+
HF_TOKEN: ${HF_TOKEN}
18+
host_ip: ${host_ip}
19+
healthcheck:
20+
test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"]
21+
interval: 10s
22+
timeout: 10s
23+
retries: 100
24+
shm_size: 1g
25+
command: --model-id ${LLM_MODEL_ID} --cuda-graphs 0 --max-input-length ${MAX_INPUT_TOKENS} --max-total-tokens ${MAX_TOTAL_TOKENS}
26+
27+
llm-docsum-tgi:
28+
image: ${REGISTRY:-opea}/llm-docsum:${TAG:-latest}-openeuler
29+
container_name: docsum-xeon-llm-server
30+
depends_on:
31+
tgi-server:
32+
condition: service_healthy
33+
ports:
34+
- ${LLM_PORT:-9000}:9000
35+
ipc: host
36+
environment:
37+
no_proxy: ${no_proxy}
38+
http_proxy: ${http_proxy}
39+
https_proxy: ${https_proxy}
40+
LLM_ENDPOINT: ${LLM_ENDPOINT}
41+
LLM_MODEL_ID: ${LLM_MODEL_ID}
42+
HF_TOKEN: ${HF_TOKEN}
43+
MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS}
44+
MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS}
45+
DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME}
46+
LOGFLAG: ${LOGFLAG:-False}
47+
restart: unless-stopped
48+
49+
whisper:
50+
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}-openeuler
51+
container_name: docsum-xeon-whisper-server
52+
ports:
53+
- ${ASR_SERVICE_PORT:-7066}:7066
54+
ipc: host
55+
environment:
56+
no_proxy: ${no_proxy}
57+
http_proxy: ${http_proxy}
58+
https_proxy: ${https_proxy}
59+
restart: unless-stopped
60+
61+
docsum-xeon-backend-server:
62+
image: ${REGISTRY:-opea}/docsum:${TAG:-latest}-openeuler
63+
container_name: docsum-xeon-backend-server
64+
depends_on:
65+
- tgi-server
66+
- llm-docsum-tgi
67+
ports:
68+
- "${BACKEND_SERVICE_PORT:-8888}:8888"
69+
environment:
70+
- no_proxy=${no_proxy}
71+
- https_proxy=${https_proxy}
72+
- http_proxy=${http_proxy}
73+
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
74+
- LLM_SERVICE_HOST_IP=${LLM_SERVICE_HOST_IP}
75+
- LLM_SERVICE_PORT=${LLM_PORT}
76+
- ASR_SERVICE_HOST_IP=${ASR_SERVICE_HOST_IP}
77+
- ASR_SERVICE_PORT=${ASR_SERVICE_PORT}
78+
ipc: host
79+
restart: always
80+
81+
docsum-gradio-ui:
82+
image: ${REGISTRY:-opea}/docsum-gradio-ui:${TAG:-latest}-openeuler
83+
container_name: docsum-xeon-ui-server
84+
depends_on:
85+
- docsum-xeon-backend-server
86+
ports:
87+
- "${FRONTEND_SERVICE_PORT:-5173}:5173"
88+
environment:
89+
- no_proxy=${no_proxy}
90+
- https_proxy=${https_proxy}
91+
- http_proxy=${http_proxy}
92+
- BACKEND_SERVICE_ENDPOINT=${BACKEND_SERVICE_ENDPOINT}
93+
- DOC_BASE_URL=${BACKEND_SERVICE_ENDPOINT}
94+
ipc: host
95+
restart: always
96+
97+
networks:
98+
default:
99+
driver: bridge

DocSum/docker_image_build/build.yaml

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,17 @@ services:
1313
context: ../
1414
dockerfile: ./Dockerfile
1515
image: ${REGISTRY:-opea}/docsum:${TAG:-latest}
16+
docsum-openeuler:
17+
build:
18+
args:
19+
IMAGE_REPO: ${REGISTRY}
20+
BASE_TAG: ${TAG}
21+
http_proxy: ${http_proxy}
22+
https_proxy: ${https_proxy}
23+
no_proxy: ${no_proxy}
24+
context: ../
25+
dockerfile: ./Dockerfile.openEuler
26+
image: ${REGISTRY:-opea}/docsum:${TAG:-latest}-openeuler
1627
docsum-gradio-ui:
1728
build:
1829
args:
@@ -22,18 +33,39 @@ services:
2233
dockerfile: ./docker/Dockerfile.gradio
2334
extends: docsum
2435
image: ${REGISTRY:-opea}/docsum-gradio-ui:${TAG:-latest}
36+
docsum-gradio-ui-openeuler:
37+
build:
38+
args:
39+
http_proxy: ${http_proxy}
40+
https_proxy: ${https_proxy}
41+
context: ../ui
42+
dockerfile: ./docker/Dockerfile.gradio.openEuler
43+
extends: docsum
44+
image: ${REGISTRY:-opea}/docsum-gradio-ui:${TAG:-latest}-openeuler
2545
docsum-ui:
2646
build:
2747
context: ../ui
2848
dockerfile: ./docker/Dockerfile
2949
extends: docsum
3050
image: ${REGISTRY:-opea}/docsum-ui:${TAG:-latest}
51+
docsum-ui-openeuler:
52+
build:
53+
context: ../ui
54+
dockerfile: ./docker/Dockerfile.openEuler
55+
extends: docsum
56+
image: ${REGISTRY:-opea}/docsum-ui:${TAG:-latest}-openeuler
3157
docsum-react-ui:
3258
build:
3359
context: ../ui
3460
dockerfile: ./docker/Dockerfile.react
3561
extends: docsum
3662
image: ${REGISTRY:-opea}/docsum-react-ui:${TAG:-latest}
63+
docsum-react-ui-openeuler:
64+
build:
65+
context: ../ui
66+
dockerfile: ./docker/Dockerfile.react.openEuler
67+
extends: docsum
68+
image: ${REGISTRY:-opea}/docsum-react-ui:${TAG:-latest}-openeuler
3769
whisper:
3870
build:
3971
args:
@@ -43,12 +75,27 @@ services:
4375
dockerfile: comps/third_parties/whisper/src/Dockerfile
4476
extends: docsum
4577
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}
78+
whisper-openeuler:
79+
build:
80+
args:
81+
http_proxy: ${http_proxy}
82+
https_proxy: ${https_proxy}
83+
context: GenAIComps
84+
dockerfile: comps/third_parties/whisper/src/Dockerfile.openEuler
85+
extends: docsum
86+
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}-openeuler
4687
llm-docsum:
4788
build:
4889
context: GenAIComps
4990
dockerfile: comps/llms/src/doc-summarization/Dockerfile
5091
extends: docsum
5192
image: ${REGISTRY:-opea}/llm-docsum:${TAG:-latest}
93+
llm-docsum:
94+
build:
95+
context: GenAIComps
96+
dockerfile: comps/llms/src/doc-summarization/Dockerfile.openEuler
97+
extends: docsum
98+
image: ${REGISTRY:-opea}/llm-docsum:${TAG:-latest}-openeuler
5299
vllm-rocm:
53100
build:
54101
context: GenAIComps

0 commit comments

Comments
 (0)