Skip to content
Draft
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/pins/pti.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
15a201d25e5659692613b98ee33513263b689101
7286b06950ea82edd6dd07367072df65f1a24fc4
19 changes: 4 additions & 15 deletions .github/workflows/build-test-reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -283,21 +283,10 @@ jobs:
- name: Build PTI && Run Proton tests
if: matrix.suite == 'rest'
run: |
# `intel-pti` can be installed in "Setup PyTorch" step with `pytorch_mode==wheels`
pip uninstall intel-pti -y
PTI_COMMIT_ID="$(<.github/pins/pti.txt)"
git clone https://github.com/intel/pti-gpu.git
cd pti-gpu
git checkout $PTI_COMMIT_ID
cd sdk
cmake --preset linux-icpx-release
BUILD_TESTING=1 PTI_BUILD_SAMPLES=1 cmake --build --preset linux-icpx-release

PTI_LIBS_DIR="$(pwd)/build-linux-icpx-release/lib/"
cd ../..

export LD_LIBRARY_PATH=$PTI_LIBS_DIR:$LD_LIBRARY_PATH
export TRITON_XPUPTI_LIB_PATH=$PTI_LIBS_DIR
./scripts/install-pti.sh
LEVEL_ZERO_LIB=$(pwd)/.scripts_cache/level_zero_for_pti/level-zero-1.24.2/build/lib/
ls $LEVEL_ZERO_LIB
export LD_LIBRARY_PATH=$LEVEL_ZERO_LIB:$LD_LIBRARY_PATH
cd third_party/proton/test
pytest test_api.py test_cmd.py test_lib.py test_profile.py test_viewer.py --device xpu -s -v
cd ..
Expand Down
49 changes: 49 additions & 0 deletions scripts/build_pti_data/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
cmake_minimum_required(VERSION 3.20)

project(
pti-python-binaries
VERSION 0.0.1
LANGUAGES C CXX)

include(FetchContent)

if(DEFINED ENV{PTI_PINNED_COMMIT})
set(PTI_PINNED_COMMIT $ENV{PTI_PINNED_COMMIT})
else()
message(FATAL_ERROR "PTI_PINNED_COMMIT env var not defined")
endif()

if(DEFINED ENV{LEVELZERO_INCLUDE_DIR})
set(LevelZero_INCLUDE_DIR "$ENV{LEVELZERO_INCLUDE_DIR}")
else()
message(STATUS "LEVELZERO_INCLUDE_DIR env var not defined; try to use system version")
endif()

if(DEFINED ENV{LEVELZERO_LIBRARY})
set(LevelZero_LIBRARY "$ENV{LEVELZERO_LIBRARY}")
else()
message(STATUS "LEVELZERO_LIBRARY env var not defined; try to use system version")
endif()

FetchContent_Declare(
pti-lib
GIT_REPOSITORY https://github.com/intel/pti-gpu.git
GIT_TAG ${PTI_PINNED_COMMIT}
SOURCE_SUBDIR sdk
)

if(NOT APPLE)
list(APPEND CMAKE_INSTALL_RPATH $ORIGIN)
endif()

# Sets the installation directories to be inside the root of the virtual
# environment, .e.g., .venv/lib/libpti_view.so (note: this is non-standard).
# However, this is what other oneAPI components and PyTorch XPU (Intel backend)
# expects.
set(CMAKE_INSTALL_LIBDIR "${SKBUILD_DATA_DIR}/lib")
set(CMAKE_INSTALL_INCLUDEDIR "${SKBUILD_DATA_DIR}/include")
set(CMAKE_INSTALL_BINDIR "${SKBUILD_DATA_DIR}/bin")
set(CMAKE_INSTALL_DOCDIR "${SKBUILD_DATA_DIR}/share")


FetchContent_MakeAvailable(pti-lib)
Empty file.
54 changes: 54 additions & 0 deletions scripts/build_pti_data/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
[project]
name = "intel-pti"
version = "0.14.0.dev1"
description = "Add your description here"
readme = "README.md"
authors = [
{ name = "Intel Corporation", email = "[email protected]" }
]
license = "MIT"

requires-python = ">=3.9"
dependencies = []
classifiers = [
"Development Status :: 5 - Production/Stable",

# Indicate who your project is intended for
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Intended Audience :: Other Audience",

"Topic :: Software Development :: Libraries",

"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",

"Environment :: GPU",
]

[tool.scikit-build]
build.verbose = true
logging.level = "INFO"
cmake.version = ">=3.20.0"
cmake.build-type = "Release"
cmake.args = ["-GNinja"]
ninja.version = ">=1.13"
minimum-version = "build-system.requires"
sdist.exclude = ["levelzero", "level-zero", "src"]
#experimental = true
#wheel.install-dir = "/data"

[tool.uv]
cache-keys = [{ file = "pyproject.toml" }, { file = "CMakeLists.txt" }]

[build-system]
requires = ["scikit-build-core>=0.10"]
build-backend = "scikit_build_core.build"

[project.urls]
Homepage = "https://github.com/intel/pti-gpu/tree/master/sdk"
Documentation = "https://github.com/intel/pti-gpu/blob/master/sdk/README.md"
Repository = "https://github.com/intel/pti-gpu"
Issues = "https://github.com/intel/pti-gpu/issues"
65 changes: 65 additions & 0 deletions scripts/install-pti.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#!/usr/bin/env bash

set -euo pipefail


# Configure, build and install PyTorch from source.

# intel-xpu-backend-for-triton project root
ROOT=$(cd "$(dirname "$0")/.." && pwd)

SCRIPTS_DIR=$ROOT/scripts
PTI_PROJ=$ROOT/.scripts_cache/pti
LEVEL_ZERO_PROJ=$ROOT/.scripts_cache/level_zero_for_pti
BASE=$(dirname "$PTI_PROJ")

echo "**** BASE is set to $BASE ****"
echo "**** PTI_PROJ is set to $PTI_PROJ ****"
mkdir -p $BASE

function build_level_zero {
rm -rf "$LEVEL_ZERO_PROJ"
mkdir -p "$LEVEL_ZERO_PROJ"
cd "$LEVEL_ZERO_PROJ"
LEVEL_ZERO_VERSION=1.24.2
LEVEL_ZERO_SHA256=b77e6e28623134ee4e99e2321c127b554bdd5bfa3e80064922eba293041c6c52

wget --progress=dot:giga -e use_proxy=yes "https://github.com/oneapi-src/level-zero/archive/refs/tags/v${LEVEL_ZERO_VERSION}.tar.gz"
echo "${LEVEL_ZERO_SHA256} v${LEVEL_ZERO_VERSION}.tar.gz" > "v${LEVEL_ZERO_VERSION}.tar.gz.sha256"
sha256sum -c "v${LEVEL_ZERO_VERSION}.tar.gz.sha256"
tar -xf "v${LEVEL_ZERO_VERSION}.tar.gz"
cd "level-zero-${LEVEL_ZERO_VERSION}"
echo "${LEVEL_ZERO_VERSION}" | awk -F. '{print $3}' > VERSION_PATCH
mkdir build
cd build
cmake .. -DCMAKE_BUILD_TYPE=Release
cmake --build . --config Release --parallel "$(nproc)"
# cmake --build . --config Release --target install
export LEVELZERO_INCLUDE_DIR="$LEVEL_ZERO_PROJ/level-zero-${LEVEL_ZERO_VERSION}"
export LEVELZERO_LIBRARY="$LEVEL_ZERO_PROJ/level-zero-${LEVEL_ZERO_VERSION}/build/lib/libze_loader.so"
}

function build_pti {
rm -rf "$PTI_PROJ"
mkdir -p "$PTI_PROJ"

echo "****** Building $PTI_PROJ ******"
cd "$PTI_PROJ"
cp "$SCRIPTS_DIR"/build_pti_data/* .
pip install uv

export PTI_PINNED_COMMIT="$(<$ROOT/.github/pins/pti.txt)"

uv version 0.14.0.dev1
uv build
}

function install_pti {
echo "****** Installing PTI ******"
cd "$PTI_PROJ"
pip install dist/*.whl
}

build_level_zero
build_pti
install_pti
11 changes: 10 additions & 1 deletion third_party/proton/proton/profile.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
from typing import Optional, Union
import importlib.metadata
import pathlib
import os
import functools
import triton

Expand All @@ -6,7 +10,6 @@
from .flags import flags
from .hooks import HookManager, LaunchHook, InstrumentationHook
from .mode import BaseMode
from typing import Optional, Union

DEFAULT_PROFILE_NAME = "proton"
UTILS_CACHE_PATH = None
Expand All @@ -21,6 +24,12 @@ def _select_backend() -> str:
elif backend == "xpu":
global UTILS_CACHE_PATH
UTILS_CACHE_PATH = triton.runtime.driver.active.build_proton_help_lib()
files = importlib.metadata.files('intel-pti')
if files is not None:
for f in files:
if f.name == 'libpti_view.so':
os.environ["TRITON_XPUPTI_LIB_PATH"] = str(pathlib.Path(f.locate()).parent.resolve())
break
return "xpupti"
else:
raise ValueError("No backend is available for the current target.")
Expand Down
Loading