Skip to content
This repository was archived by the owner on Oct 19, 2025. It is now read-only.

Commit 0f40a66

Browse files
committed
Add upport for IPEX 2.7
1 parent 0e47b81 commit 0f40a66

File tree

3 files changed

+53
-134
lines changed

3 files changed

+53
-134
lines changed

Dockerfile

Lines changed: 46 additions & 128 deletions
Original file line numberDiff line numberDiff line change
@@ -1,57 +1,10 @@
11
# SPDX-License-Identifier: Apache-2.0
2-
ARG UBUNTU_VERSION=22.04
3-
FROM ubuntu:${UBUNTU_VERSION} AS oneapi-lib-installer
2+
ARG UBUNTU_VERSION=24.04
3+
FROM ubuntu:${UBUNTU_VERSION}
44

55
# Make sure Dockerfile doesn't succeed if there are errors.
66
RUN ["/bin/sh", "-c", "/bin/bash", "-o", "pipefail", "-c"]
77

8-
# Install prerequisites to install oneAPI runtime libraries.
9-
# hadolint ignore=DL3008
10-
RUN apt-get update && \
11-
apt-get install -y --no-install-recommends --fix-missing \
12-
ca-certificates \
13-
gnupg2 \
14-
gpg-agent \
15-
unzip \
16-
wget
17-
18-
# hadolint ignore=DL4006
19-
RUN wget --progress=dot:giga -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \
20-
| gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \
21-
echo 'deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main' \
22-
| tee /etc/apt/sources.list.d/oneAPI.list
23-
24-
# Define and install oneAPI runtime libraries for less space.
25-
ARG DPCPP_VER=2024.2.1-1079
26-
ARG MKL_VER=2024.2.2-15
27-
ARG CMPLR_COMMON_VER=2024.2
28-
# intel-oneapi-compiler-shared-common provides `sycl-ls` and other utilities like the compiler
29-
RUN apt-get update && \
30-
apt-get install -y --no-install-recommends --fix-missing \
31-
intel-oneapi-dpcpp-cpp-${CMPLR_COMMON_VER}=${DPCPP_VER} \
32-
intel-oneapi-compiler-shared-common-${CMPLR_COMMON_VER}=${DPCPP_VER} \
33-
intel-oneapi-runtime-dpcpp-cpp=${DPCPP_VER} \
34-
intel-oneapi-runtime-mkl=${MKL_VER} && \
35-
apt-get clean && \
36-
rm -rf /var/lib/apt/lists/*
37-
38-
# Add and prepare Intel Graphics driver index. This is dependent on being able to pass your GPU with a working driver on the host side where the image will run.
39-
# hadolint ignore=DL4006
40-
RUN wget --progress=dot:giga -qO - https://repositories.intel.com/graphics/intel-graphics.key | \
41-
gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg
42-
# hadolint ignore=DL4006
43-
RUN echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy unified" | \
44-
tee /etc/apt/sources.list.d/intel.gpu.jammy.list
45-
46-
ARG UBUNTU_VERSION=22.04
47-
FROM ubuntu:${UBUNTU_VERSION}
48-
49-
# Copy all the files from the oneAPI runtime libraries image into the actual final image.
50-
RUN mkdir -p /opt/intel
51-
COPY --from=oneapi-lib-installer /opt/intel/ /opt/intel/
52-
COPY --from=oneapi-lib-installer /usr/share/keyrings/intel-graphics.gpg /usr/share/keyrings/intel-graphics.gpg
53-
COPY --from=oneapi-lib-installer /etc/apt/sources.list.d/intel.gpu.jammy.list /etc/apt/sources.list.d/intel.gpu.jammy.list
54-
558
# Set apt install to not be interactive for some packages that require it.
569
ENV DEBIAN_FRONTEND=noninteractive
5710

@@ -62,42 +15,62 @@ ENV LD_LIBRARY_PATH=/opt/intel/oneapi/redist/lib:/opt/intel/oneapi/redist/lib/in
6215
# hadolint ignore=DL3008
6316
RUN apt-get update && \
6417
apt-get install -y --no-install-recommends --fix-missing \
65-
build-essential \
6618
ca-certificates \
6719
fonts-noto \
6820
git \
6921
gnupg2 \
7022
gpg-agent \
7123
software-properties-common \
24+
unzip \
7225
wget && \
7326
apt-get clean && \
7427
rm -rf /var/lib/apt/lists/*
7528

29+
# Add and prepare Intel Graphics driver index. This is dependent on being able to pass your GPU with a working driver on the host side where the image will run.
30+
# hadolint ignore=DL4006
31+
# Install the Intel graphics GPG public key
32+
RUN wget --progress=dot:giga -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \
33+
| gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null && \
34+
echo 'deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main' \
35+
| tee /etc/apt/sources.list.d/oneAPI.list
36+
RUN wget --progress=dot:giga -qO - https://repositories.intel.com/gpu/intel-graphics.key | \
37+
gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
38+
RUN echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu noble unified" | \
39+
tee /etc/apt/sources.list.d/intel-gpu-noble.list
40+
7641
# Sets versions of Level-Zero, OpenCL and memory allocator chosen.
77-
ARG ICD_VER=23.17.26241.33-647~22.04
78-
ARG LEVEL_ZERO_GPU_VER=1.3.26241.33-647~22.04
79-
ARG ALLOCATOR=tcmalloc
80-
ENV ALLOCATOR=${ALLOCATOR}
81-
ARG ALLOCATOR_PACKAGE=libgoogle-perftools-dev
82-
ARG ALLOCATOR_LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libtcmalloc.so
83-
RUN if [ "${ALLOCATOR}" = "jemalloc" ] ; then \
84-
ALLOCATOR_PACKAGE=libjemalloc-dev; \
85-
ALLOCATOR_LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so; \
86-
fi
42+
ARG CMPLR_COMMON_VER=2025.0
43+
ARG DPCPP_VER=2025.0.4-1519
44+
ARG LIBZE_VER=1.20.2.0-1098~24.04
45+
ARG OCLOC_VER=25.05.32567.18-1099~24.04
8746

8847
# Install Level-Zero and OpenCL backends.
8948
RUN apt-get update && \
9049
apt-get install -y --no-install-recommends --fix-missing \
91-
intel-opencl-icd=${ICD_VER} \
92-
intel-level-zero-gpu=${LEVEL_ZERO_GPU_VER} && \
50+
clinfo \
51+
intel-gsc \
52+
intel-oneapi-common-vars \
53+
intel-oneapi-dpcpp-cpp-${CMPLR_COMMON_VER}=${DPCPP_VER} \
54+
intel-oneapi-compiler-shared-common-${CMPLR_COMMON_VER}=${DPCPP_VER} \
55+
intel-oneapi-runtime-dpcpp-cpp=${DPCPP_VER} \
56+
intel-opencl-icd=${OCLOC_VER} \
57+
libze-intel-gpu1=${OCLOC_VER} \
58+
libze-dev=${LIBZE_VER} && \
9359
apt-get clean && \
9460
rm -rf /var/lib/apt/lists/*
9561

62+
# Make sure everything is up to date.
63+
# hadolint ignore=DL3008
64+
RUN apt-get update && \
65+
apt-get upgrade -y --no-install-recommends --fix-missing && \
66+
apt-get autoremove -y && \
67+
apt-get clean && \
68+
rm -rf /var/lib/apt/lists/*
69+
9670
# Install Python and other associated packages from PPA since default is 3.10
97-
ARG PYTHON=python3.11
71+
ARG PYTHON=python3.12
9872
# hadolint ignore=DL3008
99-
RUN add-apt-repository ppa:deadsnakes/ppa && \
100-
apt-get update && \
73+
RUN apt-get update && \
10174
apt-get install -y --no-install-recommends --fix-missing \
10275
${PYTHON} \
10376
${PYTHON}-dev \
@@ -107,67 +80,18 @@ RUN add-apt-repository ppa:deadsnakes/ppa && \
10780
apt-get clean && \
10881
rm -rf /var/lib/apt/lists/*
10982

110-
# Update pip
111-
# hadolint ignore=DL3013
112-
RUN python3 -m pip install -U \
113-
pip \
114-
setuptools
115-
116-
# Softlink Python to make it default.
117-
RUN ln -sf "$(which ${PYTHON})" /usr/local/bin/python && \
118-
ln -sf "$(which ${PYTHON})" /usr/local/bin/python3 && \
119-
ln -sf "$(which ${PYTHON})" /usr/bin/python && \
120-
ln -sf "$(which ${PYTHON})" /usr/bin/python3
121-
122-
# Install Comfy UI/Pytorch dependencies.
123-
# hadolint ignore=DL3008
124-
RUN apt-get update && \
125-
apt-get install -y --no-install-recommends --fix-missing \
126-
${ALLOCATOR_PACKAGE} \
127-
libgl1 \
128-
libglib2.0-0 \
129-
libgomp1 \
130-
numactl && \
131-
apt-get clean && \
132-
rm -rf /var/lib/apt/lists/*
133-
134-
# Getting the latest versions of Intel's Compute Runtime and associated packages on Github and installing it will update everything we installed before.
135-
RUN mkdir neo
136-
WORKDIR /neo
137-
RUN wget --progress=dot:giga https://github.com/intel/intel-graphics-compiler/releases/download/v2.5.6/intel-igc-core-2_2.5.6+18417_amd64.deb && \
138-
wget --progress=dot:giga https://github.com/intel/intel-graphics-compiler/releases/download/v2.5.6/intel-igc-opencl-2_2.5.6+18417_amd64.deb && \
139-
wget --progress=dot:giga https://github.com/intel/compute-runtime/releases/download/24.52.32224.5/intel-level-zero-gpu_1.6.32224.5_amd64.deb && \
140-
wget --progress=dot:giga https://github.com/intel/compute-runtime/releases/download/24.52.32224.5/intel-opencl-icd_24.52.32224.5_amd64.deb && \
141-
wget --progress=dot:giga https://github.com/intel/compute-runtime/releases/download/24.52.32224.5/libigdgmm12_22.5.5_amd64.deb && \
142-
wget --progress=dot:giga https://github.com/oneapi-src/level-zero/releases/download/v1.19.2/level-zero_1.19.2+u22.04_amd64.deb && \
143-
wget --progress=dot:giga https://github.com/oneapi-src/level-zero/releases/download/v1.19.2/level-zero-devel_1.19.2+u22.04_amd64.deb && \
144-
dpkg -i -- *.deb
145-
WORKDIR /
146-
147-
# Make sure everything is up to date.
148-
# hadolint ignore=DL3008
149-
RUN apt-get update && \
150-
apt-get upgrade -y --no-install-recommends --fix-missing && \
151-
apt-get autoremove -y && \
152-
apt-get clean && \
153-
rm -rf /var/lib/apt/lists/*
154-
155-
# Remove linux-libc-dev for security reasons without disturbing anything else.
156-
RUN dpkg -r --force-depends linux-libc-dev
157-
15883
# Copy the startup script to the /bin/ folder and make executable.
15984
COPY startup.sh /bin/
16085
RUN chmod 755 /bin/startup.sh
16186

16287
# Volumes that can be used by the image when making containers.
163-
VOLUME [ "/deps" ]
88+
VOLUME [ "/deps2" ]
16489
VOLUME [ "/ComfyUI" ]
16590
VOLUME [ "/models" ]
16691
VOLUME [ "/root/.cache/huggingface" ]
16792

168-
# Setup location of Python virtual environment and make sure LD_PRELOAD contains the path of the allocator chosen.
169-
ENV VENVDir=/deps/venv
170-
ENV LD_PRELOAD=${ALLOCATOR_LD_PRELOAD}
93+
# Setup location of Python virtual environment
94+
ENV VENVDir=/deps2/venv
17195

17296
# Enable Level Zero system management
17397
# See https://spec.oneapi.io/level-zero/latest/sysman/PROG.html
@@ -178,11 +102,10 @@ ENV ZES_ENABLE_SYSMAN=1
178102
ENV NEOReadDebugKeys=1
179103
ENV ClDeviceGlobalMemSizeAvailablePercent=100
180104

181-
# Enable double precision emulation. Turned on by default to enable torch.compile to work for various kernels. Turn this off if you need to enable attention
182-
# slicing to address the 4GB single allocation limit with Intel Xe GPUs and lower and don't use torch.compile.
105+
# Enable double precision emulation. Turned off by default to enable attention slicing to address the 4GB single allocation limit with Intel Xe GPUs and lower.
183106
# See https://github.com/intel/compute-runtime/blob/master/opencl/doc/FAQ.md#feature-double-precision-emulation-fp64
184-
ENV OverrideDefaultFP64Settings=1
185-
ENV IGC_EnableDPEmulation=1
107+
#ENV OverrideDefaultFP64Settings=1
108+
#ENV IGC_EnableDPEmulation=1
186109

187110
# Enable SYCL variables for cache reuse and single threaded mode.
188111
# See https://github.com/intel/llvm/blob/sycl/sycl/doc/EnvironmentVariables.md
@@ -191,17 +114,12 @@ ENV SYCL_PI_LEVEL_ZERO_SINGLE_THREAD_MODE=1
191114

192115
# Setting to turn on for Intel Xe GPUs that do not have XMX cores which include any iGPUs from Intel Ice Lake to Meteor Lake.
193116
#ENV BIGDL_LLM_XMX_DISABLED=1
117+
194118
# Linux only setting that speeds up compute workload submissions allowing them to run concurrently on a single hardware queue. Turned off by default since
195119
# this was only introduced recently with the Xe graphics driver you need to turn on by default manually and development focus has been on using this feature
196120
# with Data Center GPU Max Series. Only also seems to benefit LLMs mostly when Intel encourages turning it on on Intel Arc cards. Also need kernel 6.2 or up.
197121
# See https://www.intel.com/content/www/us/en/developer/articles/guide/level-zero-immediate-command-lists.html
198122
#ENV SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=1
199-
# Only use if something with Intel's low level libraries aren't working, see https://github.com/intel/xetla/tree/main for more details on what this affects.
200-
#ENV USE_XETLA=OFF
201-
202-
# Set variable for better training performance in case.
203-
# See https://github.com/intel/intel-extension-for-pytorch/issues/296#issuecomment-1461118993
204-
ENV IPEX_XPU_ONEDNN_LAYOUT=1
205123

206124
# Set to false if CPU is to be used to launch ComfyUI. XPU is default.
207125
ARG UseXPU=true
@@ -217,7 +135,7 @@ ENV UseIPEXRUN=${UseIPEXRUN}
217135
ARG IPEXRUNArgs=""
218136
ENV IPEXRUNArgs=${IPEXRUNArgs}
219137

220-
# Pass in ComfyUI arguments as an environment variable so it can be used in startup.sh which passes it on.
138+
# Pass in ComfyUI arguments as an environment variable so it can be used in startup_nightly.sh which passes it on.
221139
ARG ComfyArgs=""
222140
ENV ComfyArgs=${ComfyArgs}
223141

LICENSE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@
186186
same "printed page" as the copyright notice for easier
187187
identification within third-party archives.
188188

189-
Copyright [2023-2024] [Simon Lui]
189+
Copyright [2023-2025] [Simon Lui]
190190

191191
Licensed under the Apache License, Version 2.0 (the "License");
192192
you may not use this file except in compliance with the License.

startup.sh

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,13 @@ fi
2828
if [ "$FirstLaunch" = "true" ]
2929
then
3030
echo "Installing ComfyUI Python dependencies."
31-
python -m pip install torch==2.3.1+cxx11.abi torchvision==0.18.1+cxx11.abi torchaudio==2.3.1+cxx11.abi intel-extension-for-pytorch==2.3.110+xpu oneccl_bind_pt==2.3.100+xpu --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
32-
# Install dependency to make torch.compile work.
33-
pip install --pre pytorch-triton-xpu==3.1.0+91b14bf559 --index-url https://download.pytorch.org/whl/nightly/xpu
31+
python -m pip install uv
32+
uv pip install dpcpp-cpp-rt==2025.0.4 mkl-dpcpp==2025.0.1 oneccl-devel==2021.14.1 impi-devel==2021.14.1 ruamel-yaml
33+
uv pip install torch==2.7.0 torchvision==0.22.0 torchaudio==2.7.0 --index-url https://download.pytorch.org/whl/xpu
34+
uv pip install intel-extension-for-pytorch==2.7.10+xpu oneccl_bind_pt==2.7.0+xpu --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
3435
# Comment out the above command and uncomment the following one instead if you are a user from the PRC.
35-
#python -m pip install torch==2.3.1+cxx11.abi torchvision==0.18.1+cxx11.abi torchaudio==2.3.1+cxx11.abi intel-extension-for-pytorch==2.3.110+xpu oneccl_bind_pt==2.3.100+xpu --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
36-
pip install -r requirements.txt
36+
#uv pip install intel-extension-for-pytorch==2.7.10+xpu oneccl_bind_pt==2.7.0+xpu --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/
37+
uv pip install -r requirements.txt
3738
fi
3839

3940
# Launch ComfyUI based on whether ipexrun is set to be used or not. Explicit string splitting is done by the shell here so shellcheck warning is ignored.

0 commit comments

Comments
 (0)