Skip to content

Commit

Permalink
Merge branch 'main' into fiber-dist-tweak
Browse files Browse the repository at this point in the history
  • Loading branch information
monorimet authored Jan 3, 2025
2 parents 59cef3a + e98e458 commit 2dc5dc0
Show file tree
Hide file tree
Showing 113 changed files with 5,867 additions and 1,335 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/build_packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,9 @@ jobs:
id: version_local
run: |
echo "version_suffix=${version_suffix}" >> $GITHUB_OUTPUT
python3 build_tools/python_deploy/compute_local_version.py --version-suffix=${version_suffix} sharktank
python3 build_tools/python_deploy/compute_local_version.py --version-suffix=${version_suffix} shortfin
python3 build_tools/python_deploy/compute_common_version.py -rc --version-suffix=${version_suffix} --write-json
python3 build_tools/python_deploy/compute_local_version.py --version-suffix=${version_suffix} --write-json sharktank
python3 build_tools/python_deploy/compute_local_version.py --version-suffix=${version_suffix} --write-json shortfin
python3 build_tools/python_deploy/compute_common_version.py --version-suffix=${version_suffix} --write-json
- name: Upload version_local.json files
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
Expand Down
44 changes: 31 additions & 13 deletions .github/workflows/ci-libshortfin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,9 @@ name: CI - shortfin
on:
workflow_dispatch:
pull_request:
paths:
- '.github/workflows/ci-libshortfin.yml'
- 'shortfin/**'
push:
branches:
- main
paths:
- '.github/workflows/ci-libshortfin.yml'
- 'shortfin/**'

permissions:
contents: read
Expand All @@ -44,7 +38,7 @@ jobs:
strategy:
fail-fast: false
matrix:
name: ["Ubuntu (Clang)(full)", "Ubuntu (Clang)(host-only)", "Ubuntu (GCC)", "Windows (MSVC)"]
name: ["Ubuntu (Clang)(full)", "Ubuntu (Clang)(host-only)", "Windows (MSVC)"]
python-version: ["3.10", "3.11", "3.12"]
include:
- name: Ubuntu (Clang)(full)
Expand All @@ -59,16 +53,21 @@ jobs:
cmake-options:
-DCMAKE_C_COMPILER=clang-18 -DCMAKE_CXX_COMPILER=clang++-18 -DCMAKE_LINKER_TYPE=LLD -DSHORTFIN_HAVE_AMDGPU=OFF -DSHORTFIN_BUILD_STATIC=ON -DSHORTFIN_BUILD_DYNAMIC=ON
additional-packages: clang lld
- name: Ubuntu (GCC)
- name: Ubuntu (GCC 13)
runs-on: ubuntu-24.04
# Only test with GCC 13 and Python 3.12
python-version: "3.12"
cmake-options:
-DCMAKE_C_COMPILER=gcc-13 -DCMAKE_CXX_COMPILER=g++-13
- name: Ubuntu (GCC 14)
runs-on: ubuntu-24.04
# Only test with GCC 14 and Python 3.12
python-version: "3.12"
cmake-options:
-DCMAKE_C_COMPILER=gcc-14 -DCMAKE_CXX_COMPILER=g++-14
- name: Windows (MSVC)
runs-on: windows-2022
exclude:
# Only test Python 3.12 with GCC
- name: Ubuntu (GCC)
python-version: "3.10"
- name: Ubuntu (GCC)
python-version: "3.11"
# TODO: Include additional Python versions for Windows after build got fixed
- name: Windows (MSVC)
python-version: "3.10"
Expand Down Expand Up @@ -152,3 +151,22 @@ jobs:
run: |
ctest --timeout 30 --output-on-failure --test-dir build
pytest -s --durations=10
# Depends on all other jobs to provide an aggregate job status.
ci_libshortfin_summary:
if: always()
runs-on: ubuntu-24.04
needs:
- build-and-test
steps:
- name: Getting failed jobs
run: |
echo '${{ toJson(needs) }}'
FAILED_JOBS="$(echo '${{ toJson(needs) }}' \
| jq --raw-output \
'map_values(select(.result!="success" and .result!="skipped")) | keys | join(",")' \
)"
if [[ "${FAILED_JOBS}" != "" ]]; then
echo "The following jobs failed: ${FAILED_JOBS}"
exit 1
fi
8 changes: 5 additions & 3 deletions .github/workflows/ci-llama-large-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ name: Llama Benchmarking Tests
on:
workflow_dispatch:
schedule:
# Weekdays at 4:00 AM UTC = 9:00 PM PST.
- cron: "0 4 * * 1-5"
# Weekdays at 11:00 AM UTC = 03:00 AM PST / 04:00 AM PDT
- cron: "0 11 * * 1-5"

concurrency:
# A PR number if a pull request and otherwise the commit hash. This cancels
Expand Down Expand Up @@ -53,11 +53,11 @@ jobs:
run: |
source ${VENV_DIR}/bin/activate
python -m pip install --no-compile --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
# Install nightly IREE packages.
# We could also pin to a known working or stable version.
Expand All @@ -66,6 +66,8 @@ jobs:
iree-base-runtime \
iree-turbine
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
pip freeze
- name: Run llama tests
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/ci-llama-quick-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ jobs:
run: |
source ${VENV_DIR}/bin/activate
python -m pip install --no-compile --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
# Install nightly IREE packages.
# We could also pin to a known working or stable version.
Expand All @@ -66,6 +66,7 @@ jobs:
iree-base-runtime \
iree-turbine
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
pip freeze
- name: Run llama 8b f16 decomposed test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/ci-sglang-benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ name: SGLang Llama Benchmarking Tests
on:
workflow_dispatch:
schedule:
# Weekdays at 4:00 AM UTC = 9:00 PM PST.
- cron: "0 4 * * 1-5"
# Weekdays at 11:00 AM UTC = 03:00 AM PST / 04:00 AM PDT
- cron: "0 11 * * 1-5"

concurrency:
# A PR number if a pull request and otherwise the commit hash. This cancels
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/ci-sglang-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,11 @@ jobs:
run: |
source ${VENV_DIR}/bin/activate
python -m pip install --no-compile --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -e sharktank/ shortfin/
# Use newest possible releases to be able to track commits that may
# cause errors.
Expand All @@ -64,6 +64,8 @@ jobs:
iree-turbine \
"numpy<2.0"
pip install --no-compile -r requirements.txt -e sharktank/ shortfin/
# Install SGLang and sentence_transformers
pip install "git+https://github.com/nod-ai/sglang.git#subdirectory=python"
pip install sentence_transformers
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/ci-shark-ai.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,11 @@ jobs:
run: |
source ${VENV_DIR}/bin/activate
python -m pip install --no-compile --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -e sharktank/ shortfin/
# Install nightly IREE packages.
# We could also pin to a known working or stable version.
Expand All @@ -62,6 +62,8 @@ jobs:
iree-base-runtime \
iree-turbine
pip install --no-compile -r requirements.txt -e sharktank/ shortfin/
pip freeze
- name: Run LLM Integration Tests
Expand Down
76 changes: 62 additions & 14 deletions .github/workflows/ci-sharktank.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,23 @@ concurrency:

jobs:
test:
name: "Unit Tests and Type Checking"
name: "Unit Tests (${{ matrix.os }}, ${{ matrix.python-version }}, ${{ matrix.torch-version }})"
strategy:
matrix:
version: [3.11]
os: [ubuntu-24.04, windows-2022]
python-version: ["3.11", "3.12"]
torch-version: ["2.3.0", "2.4.1", "2.5.1"]
os: [ubuntu-24.04]
include:
- os: windows-2022
python-version: "3.11"
torch-version: "2.3.0"
- os: windows-2022
python-version: "3.12"
torch-version: "2.4.1"
exclude:
- python-version: "3.12"
# `torch.compile` requires torch>=2.4.0 for Python 3.12+
torch-version: "2.3.0"
fail-fast: false
runs-on: ${{matrix.os}}
defaults:
Expand All @@ -42,7 +54,7 @@ jobs:
id: setup_python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: ${{matrix.version}}
python-version: ${{matrix.python-version}}

- name: Cache Pip Packages
uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
Expand All @@ -51,14 +63,18 @@ jobs:
path: ${{ env.PIP_CACHE_DIR }}
key: pip-${{ steps.setup_python.outputs.python-version }}-${{ hashFiles('*requirements*.txt','sharktank/requirements*.txt') }}

- name: Install numpy
if: ${{ matrix.os == 'windows-2022' && matrix.torch-version == '2.3.0' }}
run: pip install "numpy<2.0"

- name: Install pip deps
run: |
python -m pip install --no-compile --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
pip install --no-compile --index-url https://download.pytorch.org/whl/cpu torch==${{matrix.torch-version}}+cpu
# Install nightly IREE packages.
# We could also pin to a known working or stable version.
Expand All @@ -67,6 +83,8 @@ jobs:
iree-base-runtime \
iree-turbine
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
- name: Run sharktank tests
if: ${{ !cancelled() }}
run: |
Expand All @@ -77,7 +95,7 @@ jobs:
name: "Data-dependent Tests"
strategy:
matrix:
version: [3.11]
python-version: [3.11]
runs-on: [llama-mi300x-3]
fail-fast: false
runs-on: ${{matrix.runs-on}}
Expand All @@ -94,7 +112,7 @@ jobs:
id: setup_python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: ${{matrix.version}}
python-version: ${{matrix.python-version}}

- name: Create Python venv
run: python -m venv ${VENV_DIR}
Expand All @@ -107,7 +125,6 @@ jobs:
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
# Install nightly IREE packages.
# We could also pin to a known working or stable version.
Expand All @@ -116,16 +133,24 @@ jobs:
iree-base-runtime \
iree-turbine
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
- name: Run tests
# TODO: unify with-t5-data and with-clip-data flags into a single flag
# and make it possible to run only tests that require data.
# TODO: unify with-*-data flags into a single flag and make it possible to run
# only tests that require data.
# We would still want the separate flags as we may endup with data being
# scattered on different CI machines.
run: |
source ${VENV_DIR}/bin/activate
pytest \
--with-clip-data \
--with-clip-data \
--with-flux-data \
--with-t5-data \
--with-vae-data \
sharktank/tests/models/clip/clip_test.py \
sharktank/tests/models/t5/t5_test.py \
sharktank/tests/models/flux/flux_test.py \
sharktank/tests/models/vae/vae_test.py \
--durations=0
Expand Down Expand Up @@ -153,19 +178,42 @@ jobs:
- name: Install pip deps
run: |
python -m pip install --no-compile --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --no-compile -r pytorch-cpu-requirements.txt
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
# Install nightly IREE packages.
# We could also pin to a known working or stable version.
pip install -f https://iree.dev/pip-release-links.html --pre --upgrade \
pip install -f https://iree.dev/pip-release-links.html --pre \
iree-base-compiler \
iree-base-runtime \
iree-turbine
pip install --no-compile -r requirements.txt -r sharktank/requirements-tests.txt -e sharktank/
- name: Run punet tests
run: |
pytest -v sharktank/ -m punet_quick \
--durations=0
# Depends on other jobs to provide an aggregate job status.
# TODO(#584): move test_with_data and test_integration to a pkgci integration test workflow?
ci_sharktank_summary:
if: always()
runs-on: ubuntu-24.04
needs:
- test
steps:
- name: Getting failed jobs
run: |
echo '${{ toJson(needs) }}'
FAILED_JOBS="$(echo '${{ toJson(needs) }}' \
| jq --raw-output \
'map_values(select(.result!="success" and .result!="skipped")) | keys | join(",")' \
)"
if [[ "${FAILED_JOBS}" != "" ]]; then
echo "The following jobs failed: ${FAILED_JOBS}"
exit 1
fi
Loading

0 comments on commit 2dc5dc0

Please sign in to comment.