Skip to content

Commit 6640548

Browse files
committed
update
2 parents 588197b + a782260 commit 6640548

File tree

65 files changed

+5458
-1982
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

65 files changed

+5458
-1982
lines changed
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
2+
region: &REGION <set-$REGION-in-environment>
3+
framework: &FRAMEWORK autogluon
4+
version: &VERSION 1.1.1
5+
short_version: &SHORT_VERSION 1.1
6+
arch_type: x86
7+
8+
repository_info:
9+
inference_repository: &INFERENCE_REPOSITORY
10+
image_type: &INFERENCE_IMAGE_TYPE inference
11+
root: !join [ *FRAMEWORK, "/", *INFERENCE_IMAGE_TYPE ]
12+
repository_name: &REPOSITORY_NAME !join [pr, "-", *FRAMEWORK, "-", *INFERENCE_IMAGE_TYPE]
13+
repository: &REPOSITORY !join [ *ACCOUNT_ID, .dkr.ecr., *REGION, .amazonaws.com/, *REPOSITORY_NAME ]
14+
15+
context:
16+
inference_context: &INFERENCE_CONTEXT
17+
torchserve-entrypoint:
18+
source: ../build_artifacts/inference/torchserve-entrypoint.py
19+
target: torchserve-entrypoint.py
20+
config:
21+
source: ../build_artifacts/inference/config.properties
22+
target: config.properties
23+
deep_learning_container:
24+
source: ../../src/deep_learning_container.py
25+
target: deep_learning_container.py
26+
27+
images:
28+
BuildAutogluonCPUInferencePy3DockerImage:
29+
<<: *INFERENCE_REPOSITORY
30+
build: &AUTOGLUON_CPU_INFERENCE_PY3 false
31+
image_size_baseline: 6399
32+
device_type: &DEVICE_TYPE cpu
33+
python_version: &DOCKER_PYTHON_VERSION py3
34+
tag_python_version: &TAG_PYTHON_VERSION py311
35+
os_version: &OS_VERSION ubuntu20.04
36+
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
37+
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
38+
context:
39+
<<: *INFERENCE_CONTEXT
40+
41+
BuildAutogluonGPUInferencePy3DockerImage:
42+
<<: *INFERENCE_REPOSITORY
43+
build: &AUTOGLUON_GPU_INFERENCE_PY3 false
44+
image_size_baseline: 19456
45+
device_type: &DEVICE_TYPE gpu
46+
python_version: &DOCKER_PYTHON_VERSION py3
47+
tag_python_version: &TAG_PYTHON_VERSION py311
48+
cuda_version: &CUDA_VERSION cu121
49+
os_version: &OS_VERSION ubuntu20.04
50+
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
51+
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ]
52+
context:
53+
<<: *INFERENCE_CONTEXT

autogluon/inference/buildspec.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
22
region: &REGION <set-$REGION-in-environment>
33
framework: &FRAMEWORK autogluon
4-
version: &VERSION 1.1.1
5-
short_version: &SHORT_VERSION 1.1
4+
version: &VERSION 1.2.0
5+
short_version: &SHORT_VERSION 1.2
66
arch_type: x86
77

88
repository_info:
@@ -32,7 +32,7 @@ images:
3232
device_type: &DEVICE_TYPE cpu
3333
python_version: &DOCKER_PYTHON_VERSION py3
3434
tag_python_version: &TAG_PYTHON_VERSION py311
35-
os_version: &OS_VERSION ubuntu20.04
35+
os_version: &OS_VERSION ubuntu22.04
3636
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
3737
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
3838
context:
@@ -45,8 +45,8 @@ images:
4545
device_type: &DEVICE_TYPE gpu
4646
python_version: &DOCKER_PYTHON_VERSION py3
4747
tag_python_version: &TAG_PYTHON_VERSION py311
48-
cuda_version: &CUDA_VERSION cu121
49-
os_version: &OS_VERSION ubuntu20.04
48+
cuda_version: &CUDA_VERSION cu124
49+
os_version: &OS_VERSION ubuntu22.04
5050
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
5151
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ]
5252
context:
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
ARG PYTHON_VERSION=3.11.9
2+
3+
FROM 763104351884.dkr.ecr.us-west-2.amazonaws.com/pytorch-inference:2.5.1-cpu-py311-ubuntu22.04-sagemaker
4+
5+
# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
6+
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
7+
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
8+
# Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently
9+
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
10+
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true
11+
12+
LABEL maintainer="Amazon AI"
13+
LABEL dlc_major_version="1"
14+
15+
RUN apt-get update \
16+
&& apt-get -y upgrade \
17+
&& apt-get autoremove -y \
18+
&& apt-get install tesseract-ocr -y \
19+
&& apt-get clean \
20+
&& rm -rf /var/lib/apt/lists/*
21+
22+
ARG AUTOGLUON_VERSION=1.2.0
23+
24+
# Upgrading pip and installing/updating Python dependencies
25+
# Comments are added to explain the reason behind each update
26+
RUN pip install --no-cache-dir -U --trusted-host pypi.org --trusted-host files.pythonhosted.org pip \
27+
&& pip install --no-cache-dir -U wheel \
28+
&& pip uninstall -y dataclasses \
29+
&& pip install --no-cache-dir -U numpy numba \
30+
# Install AutoGluon, ensuring no vulnerable dependencies are left behind
31+
&& pip install --no-cache-dir -U autogluon==${AUTOGLUON_VERSION} \
32+
# Capping setuptools to 69.5.1 to fix AutoMM tests
33+
&& pip install --no-cache-dir setuptools==69.5.1 \
34+
# Update urllib3 to fix vulnerability id 71608
35+
&& pip install --no-cache-dir -U urllib3 \
36+
# Cap pillow & ninja to fix sanity test
37+
&& pip install --no-cache-dir "pillow<11.0.0" \
38+
&& pip install --no-cache-dir "ninja<1.11.1.1"
39+
40+
41+
42+
# add TS entrypoint
43+
COPY config.properties /home/model-server
44+
45+
COPY torchserve-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
46+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
47+
48+
RUN HOME_DIR=/root \
49+
&& curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
50+
&& unzip -o ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
51+
&& cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
52+
&& chmod +x /usr/local/bin/testOSSCompliance \
53+
&& chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
54+
&& ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
55+
&& rm -rf ${HOME_DIR}/oss_compliance*
56+
57+
RUN curl -o /licenses-autogluon.txt https://autogluon.s3.us-west-2.amazonaws.com/licenses/THIRD-PARTY-LICENSES.txt
58+
59+
EXPOSE 8080 8081
60+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
61+
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]

0 commit comments

Comments
 (0)