Skip to content

Commit

Permalink
[mlir-tensorrt] Add additional missing StableHLO patch
Browse files Browse the repository at this point in the history
Added a patch that was missing from the last StableHLO upgrade. This patch
addresses some issues mentioned in openxla/stablehlo#2634.

An additional test is added to mlir-tensorrt as a regression test.

Also makes some minor cleanup to CI/CD config in order to fix the caching
mechanism, using the PR as a test-case.
  • Loading branch information
christopherbate committed Dec 5, 2024
1 parent b71a868 commit b918ad2
Show file tree
Hide file tree
Showing 7 changed files with 167 additions and 298 deletions.
289 changes: 12 additions & 277 deletions .github/workflows/mlir-tensorrt-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ env:

jobs:
mlir-tensorrt-test-pr:
if: github.event_name == 'pull_request' && github.event.pull_request.draft == false
if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
# `ubuntu-latest` is a CPU runner.
# If selected, tests requiring GPU are not run.
runs-on: ubuntu-latest
Expand Down Expand Up @@ -110,320 +110,55 @@ jobs:
- name: Create cache folder
run: |
mkdir -p ${{ github.workspace }}/ccache
mkdir -p ${{ github.workspace }}/.ccache.cpm
mkdir -p ${{ github.workspace }}/.cache.cpm
# Create cache action
- name: Create cache action
id: core-build-cache
uses: actions/cache@v4
with:
key: ${{ runner.os }}-mlir-tensorrt-core-build
key: ${{ runner.os }}-mlir-tensorrt-cache-${{ hashFiles('mlir-tensorrt/**/*.cpp', 'mlir-tensorrt/**/*.h', 'mlir-tensorrt/build_tools/**/*') }}
restore-keys: |
${{ runner.os }}-mlir-tensorrt-cache-
path: |
${{ github.workspace }}/ccache
${{ github.workspace }}/.ccache.cpm
${{ github.workspace }}/.cache.cpm
# Run LIT tests with TensorRT 10
- name: Run MLIR-TensorRT lit tests with TensorRT 10
uses: addnab/docker-run-action@v3
with:
image: ${{ env.DEFAULT_IMAGE }}
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.ccache.cpm:/.ccache.cpm
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.cache.cpm:/.cache.cpm
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
run: |
export CCACHE_DIR="/ccache"
export CCACHE_MAXSIZE=10G
ccache --zero-stats || true
ccache --show-stats || true
cd mlir-tensorrt
cat > build_and_test.sh <<EOF
#!/bin/bash
set -e
python3 -m pip install -r python/requirements-dev.txt
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=10.5 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld \
-DMLIR_EXECUTOR_ENABLE_GPU_INTEGRATION_TESTS=OFF
ninja -C build all
ninja -C build check-mlir-executor
ninja -C build check-mlir-tensorrt-dialect
ninja -C build check-mlir-tensorrt
cd ..
ccache --show-stats || true
EOF
bash build_and_test.sh
# Run LIT tests with TensorRT 10 & ASAN
- name: Run MLIR-TensorRT lit tests with TensorRT 10, ASAN enabled
uses: addnab/docker-run-action@v3
with:
image: ${{ env.DEFAULT_IMAGE }}
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.ccache.cpm:/.ccache.cpm
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
run: |
export CCACHE_DIR="/ccache"
export CCACHE_MAXSIZE=10G
ccache --zero-stats || true
ccache --show-stats || true
cd mlir-tensorrt
cat > build_and_test.sh <<EOF
#!/bin/bash
set -e
python3 -m pip install -r python/requirements-dev.txt
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=10.5 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld \
-DMLIR_EXECUTOR_ENABLE_GPU_INTEGRATION_TESTS=OFF \
-DENABLE_ASAN=ON
ninja -C build all
ninja -C build check-mlir-executor
ninja -C build check-mlir-tensorrt-dialect
ninja -C build check-mlir-tensorrt
cd ..
ccache --show-stats || true
EOF
bash build_and_test.sh
# Run LIT tests with TensorRT 8
- name: Run MLIR-TensorRT lit tests with TensorRT 8
uses: addnab/docker-run-action@v3
with:
image: ${{ env.DEFAULT_IMAGE }}
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.ccache.cpm:/.ccache.cpm
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
run: |
export CCACHE_DIR="/ccache"
export CCACHE_MAXSIZE=10G
ccache --zero-stats || true
ccache --show-stats || true
cd mlir-tensorrt
cat > build_and_test.sh <<EOF
#!/bin/bash
set -e
python3 -m pip install -r python/requirements-dev.txt
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=8.6.1.6 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld \
-DMLIR_EXECUTOR_ENABLE_GPU_INTEGRATION_TESTS=OFF
ninja -C build all
ninja -C build check-mlir-executor
ninja -C build check-mlir-tensorrt-dialect
ninja -C build check-mlir-tensorrt
cd ..
ccache --show-stats || true
EOF
bash build_and_test.sh
mlir-tensorrt-test-main:
if: github.event_name == 'push'
# `ubuntu-latest` is a CPU runner.
# If selected, tests requiring GPU are not run.
runs-on: ubuntu-latest

steps:
# Free some disk space, otherwise we get OOM error.
- name: Free disk space
run: |
sudo rm -rf \
/usr/share/dotnet "$AGENT_TOOLSDIRECTORY" /usr/local/lib/android /opt/ghc \
/usr/local/share/powershell /usr/share/swift /usr/local/.ghcup \
/usr/lib/jvm
sudo apt-get purge microsoft-edge-stable || true
sudo apt-get purge google-cloud-cli || true
sudo apt-get purge dotnet-sdk-* || true
sudo apt-get purge google-chrome-stable || true
sudo apt-get autoremove -y
sudo apt-get autoclean -y
# Value of `github.workspace` is /home/runner/work/{repo_name}/{repo-name}
# i.e. /home/runner/work/TensorRT-Incubator/TensorRT-Incubator in our case.
# After this action, repo is cloned inside above path.
- uses: actions/checkout@v4
with:
fetch-depth: 5

# Create cache folders
- name: Create cache folder
run: |
mkdir -p ${{ github.workspace }}/ccache
mkdir -p ${{ github.workspace }}/.ccache.cpm
# Create cache action
- name: Create cache action
id: core-build-cache
uses: actions/cache@v4
with:
key: ${{ runner.os }}-mlir-tensorrt-core-build
path: |
${{ github.workspace }}/ccache
${{ github.workspace }}/.ccache.cpm
# Run LIT tests with TensorRT 10
- name: Run MLIR-TensorRT lit tests with TensorRT 10
uses: addnab/docker-run-action@v3
with:
image: ${{ env.DEFAULT_IMAGE }}
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.ccache.cpm:/.ccache.cpm
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
run: |
export CCACHE_DIR="/ccache"
export CCACHE_MAXSIZE=10G
ccache --zero-stats || true
ccache --show-stats || true
cd mlir-tensorrt
cat > build_and_test.sh <<EOF
#!/bin/bash
set -e
python3 -m pip install -r python/requirements-dev.txt
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=10.5 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld \
-DMLIR_EXECUTOR_ENABLE_GPU_INTEGRATION_TESTS=OFF
ninja -C build all
ninja -C build check-mlir-executor
ninja -C build check-mlir-tensorrt-dialect
ninja -C build check-mlir-tensorrt
cd ..
ccache --show-stats || true
EOF
bash build_and_test.sh
./build_tools/scripts/cicd_build.sh
# Run LIT tests with TensorRT 10 & ASAN
- name: Run MLIR-TensorRT lit tests with TensorRT 10, ASAN enabled
uses: addnab/docker-run-action@v3
with:
image: ${{ env.DEFAULT_IMAGE }}
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.ccache.cpm:/.ccache.cpm
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.cache.cpm:/.cache.cpm
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
run: |
export CCACHE_DIR="/ccache"
export CCACHE_MAXSIZE=10G
ccache --zero-stats || true
ccache --show-stats || true
cd mlir-tensorrt
cat > build_and_test.sh <<EOF
#!/bin/bash
set -e
python3 -m pip install -r python/requirements-dev.txt
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=10.5 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld \
-DMLIR_EXECUTOR_ENABLE_GPU_INTEGRATION_TESTS=OFF \
-DENABLE_ASAN=ON
ninja -C build all
ninja -C build check-mlir-executor
ninja -C build check-mlir-tensorrt-dialect
ninja -C build check-mlir-tensorrt
cd ..
ccache --show-stats || true
EOF
bash build_and_test.sh
ENABLE_ASAN=ON ./build_tools/scripts/cicd_build.sh
# Run LIT tests with TensorRT 8
- name: Run MLIR-TensorRT lit tests with TensorRT 8
uses: addnab/docker-run-action@v3
with:
image: ${{ env.DEFAULT_IMAGE }}
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.ccache.cpm:/.ccache.cpm
options: -v ${{ github.workspace }}/mlir-tensorrt:/mlir-tensorrt -v ${{ github.workspace }}/ccache:/ccache -v ${{ github.workspace }}/.cache.cpm:/.cache.cpm
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
run: |
export CCACHE_DIR="/ccache"
export CCACHE_MAXSIZE=10G
ccache --zero-stats || true
ccache --show-stats || true
cd mlir-tensorrt
cat > build_and_test.sh <<EOF
#!/bin/bash
set -e
python3 -m pip install -r python/requirements-dev.txt
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=8.6.1.6 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld \
-DMLIR_EXECUTOR_ENABLE_GPU_INTEGRATION_TESTS=OFF
ninja -C build all
ninja -C build check-mlir-executor
ninja -C build check-mlir-tensorrt-dialect
ninja -C build check-mlir-tensorrt
cd ..
ccache --show-stats || true
EOF
bash build_and_test.sh
DOWNLOAD_TENSORRT_VERSION="8.6.1.6" ./build_tools/scripts/cicd_build.sh
1 change: 1 addition & 0 deletions mlir-tensorrt/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ if(MLIR_TRT_ENABLE_HLO AND NOT TARGET StablehloOps)
GIT_REPOSITORY "https://github.com/openxla/stablehlo.git"
PATCHES
"${CMAKE_CURRENT_LIST_DIR}/build_tools/patches/stablehlo/0001-transforms-Fix-simplification-patterns-for-stablehlo.patch"
"${CMAKE_CURRENT_LIST_DIR}/build_tools/patches/stablehlo/0002-Fix-a-couple-missing-checks-for-static-shapes-in-sta.patch"
OPTIONS
"STABLEHLO_ENABLE_BINDINGS_PYTHON ${MLIR_TRT_ENABLE_PYTHON}"
"STABLEHLO_BUILD_EMBEDDED ON"
Expand Down
Loading

0 comments on commit b918ad2

Please sign in to comment.