Skip to content

Commit b096a73

Browse files
AutoGluon 1.4.0 DLC Release (#5096)
* AutoGluon 1.4.0: Update TOML for AutoGluon-only build * Add Autogluon v1.4.0 * AutoGluon 1.4.0: Container-specific Dockerfile security fixes * AutoGluon 1.4.0: Revert TOML to full framework build for dlc-pr-quick-checks * resolve comments
1 parent eecd71d commit b096a73

17 files changed

+2139
-4
lines changed
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
2+
region: &REGION <set-$REGION-in-environment>
3+
framework: &FRAMEWORK autogluon
4+
version: &VERSION 1.3.0
5+
short_version: &SHORT_VERSION 1.3
6+
arch_type: x86
7+
8+
repository_info:
9+
inference_repository: &INFERENCE_REPOSITORY
10+
image_type: &INFERENCE_IMAGE_TYPE inference
11+
root: !join [ *FRAMEWORK, "/", *INFERENCE_IMAGE_TYPE ]
12+
repository_name: &REPOSITORY_NAME !join [pr, "-", *FRAMEWORK, "-", *INFERENCE_IMAGE_TYPE]
13+
repository: &REPOSITORY !join [ *ACCOUNT_ID, .dkr.ecr., *REGION, .amazonaws.com/, *REPOSITORY_NAME ]
14+
15+
context:
16+
inference_context: &INFERENCE_CONTEXT
17+
torchserve-entrypoint:
18+
source: ../build_artifacts/inference/torchserve-entrypoint.py
19+
target: torchserve-entrypoint.py
20+
config:
21+
source: ../build_artifacts/inference/config.properties
22+
target: config.properties
23+
deep_learning_container:
24+
source: ../../src/deep_learning_container.py
25+
target: deep_learning_container.py
26+
27+
images:
28+
BuildAutogluonCPUInferencePy3DockerImage:
29+
<<: *INFERENCE_REPOSITORY
30+
build: &AUTOGLUON_CPU_INFERENCE_PY3 false
31+
image_size_baseline: 6399
32+
device_type: &DEVICE_TYPE cpu
33+
python_version: &DOCKER_PYTHON_VERSION py3
34+
tag_python_version: &TAG_PYTHON_VERSION py311
35+
os_version: &OS_VERSION ubuntu22.04
36+
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION ]
37+
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ]
38+
context:
39+
<<: *INFERENCE_CONTEXT
40+
41+
BuildAutogluonGPUInferencePy3DockerImage:
42+
<<: *INFERENCE_REPOSITORY
43+
build: &AUTOGLUON_GPU_INFERENCE_PY3 false
44+
image_size_baseline: 19456
45+
device_type: &DEVICE_TYPE gpu
46+
python_version: &DOCKER_PYTHON_VERSION py3
47+
tag_python_version: &TAG_PYTHON_VERSION py311
48+
cuda_version: &CUDA_VERSION cu124
49+
os_version: &OS_VERSION ubuntu22.04
50+
tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION ]
51+
docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ]
52+
context:
53+
<<: *INFERENCE_CONTEXT

autogluon/inference/buildspec.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
account_id: &ACCOUNT_ID <set-$ACCOUNT_ID-in-environment>
22
region: &REGION <set-$REGION-in-environment>
33
framework: &FRAMEWORK autogluon
4-
version: &VERSION 1.3.0
5-
short_version: &SHORT_VERSION 1.3
4+
version: &VERSION 1.4.0
5+
short_version: &SHORT_VERSION 1.4
66
arch_type: x86
77

88
repository_info:
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
ARG PYTHON_VERSION=3.11.9
2+
3+
FROM 763104351884.dkr.ecr.us-west-2.amazonaws.com/pytorch-inference:2.5.1-cpu-py311-ubuntu22.04-sagemaker-v1.38-2025-07-30-17-53-15
4+
5+
# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
6+
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
7+
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
8+
# Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently
9+
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
10+
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true
11+
12+
LABEL maintainer="Amazon AI"
13+
LABEL dlc_major_version="1"
14+
15+
RUN apt-get update \
16+
&& apt-get -y upgrade \
17+
&& apt-get autoremove -y \
18+
&& apt-get install tesseract-ocr -y \
19+
&& apt-get clean \
20+
&& rm -rf /var/lib/apt/lists/*
21+
22+
ARG AUTOGLUON_VERSION=1.4.0
23+
24+
# Upgrading pip and installing/updating Python dependencies
25+
# Comments are added to explain the reason behind each update
26+
RUN pip install --no-cache-dir -U --trusted-host pypi.org --trusted-host files.pythonhosted.org pip \
27+
&& pip install --no-cache-dir -U wheel \
28+
&& pip uninstall -y dataclasses \
29+
# Install AutoGluon, ensuring no vulnerable dependencies are left behind
30+
&& pip install --no-cache-dir -U autogluon==${AUTOGLUON_VERSION} \
31+
&& pip install --no-cache-dir "ninja<1.11.1.1"
32+
33+
34+
35+
# add TS entrypoint
36+
COPY config.properties /home/model-server
37+
38+
COPY torchserve-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
39+
RUN chmod +x /usr/local/bin/dockerd-entrypoint.py
40+
41+
RUN HOME_DIR=/root \
42+
&& curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \
43+
&& unzip -o ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \
44+
&& cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \
45+
&& chmod +x /usr/local/bin/testOSSCompliance \
46+
&& chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \
47+
&& ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \
48+
&& rm -rf ${HOME_DIR}/oss_compliance*
49+
50+
RUN curl -o /licenses-autogluon.txt https://autogluon.s3.us-west-2.amazonaws.com/licenses/THIRD-PARTY-LICENSES.txt
51+
52+
EXPOSE 8080 8081
53+
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
54+
CMD ["torchserve", "--start", "--ts-config", "/home/model-server/config.properties", "--model-store", "/home/model-server/"]

0 commit comments

Comments
 (0)