-
Notifications
You must be signed in to change notification settings - Fork 5
165 lines (151 loc) · 6.03 KB
/
install_and_test.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
name: Install and test
on:
pull_request: {}
push:
branches:
- main
tags: [ "v*" ]
schedule:
- cron: "0 5 * * *"
#this is 9PM PST
jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/[email protected]
- name: Setup python
uses: actions/[email protected]
with:
python-version: '3.9.16'
- name: install dependencies
run: |-
pip install -U pip setuptools pre-commit
# Install the hooks now so that they'll be cached
pre-commit install-hooks
- name: Check Code Style using pre-commit
run: |-
SKIP=eslint pre-commit run --show-diff-on-failure --all-files
python_lint:
runs-on: ubuntu-latest
steps:
- uses: actions/[email protected]
- name: Setup python
uses: actions/[email protected]
with:
python-version: '3.9.16'
- name: setup
run: |-
pip install black==23.1.0 --progress-bar off
pip install "isort[pyproject]" numpy --progress-bar off
pip install mypy==0.991 types-mock types-Pillow types-tqdm types-PyYAML --progress-bar off
pip install -r requirements.txt --progress-bar off
- name: run black
run: |-
black --version
ls -la
black --exclude '/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist)' habitat_llm/ dataset_generation/ --diff
black --exclude '/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist)' habitat_llm/ dataset_generation/ --check
- name: run isort
run: |-
isort --version
isort habitat_llm/. dataset_generation/. --diff
isort habitat_llm/. dataset_generation/. --check-only
install_and_test_ubuntu:
runs-on: 4-core-ubuntu-gpu-t4
defaults:
run:
shell: bash -el {0}
steps:
- uses: actions/[email protected]
with:
path: "./partnr-planner"
- uses: "./partnr-planner/.github/actions/install_ubuntu_deps"
- uses: "./partnr-planner/.github/actions/install_ubuntu_gpu_deps"
- name: Install pytorch
run: |-
export PATH=$HOME/miniconda/bin:/usr/local/cuda/bin:$PATH
conda activate partnr
conda install -y pytorch==2.4.1 torchvision==0.19.1 torchaudio==2.4.1 pytorch-cuda=12.4 -c pytorch -c nvidia
echo "Validating Pytorch Installation"
# Check that pytorch is installed with CUDA.
python -c 'import torch; torch.cuda.set_device(0)'
- name: Install habitat-sim version tag
run: |-
#give cmake ownership to the runner for installation
sudo chown runner -R /opt/cmake312/
#activate conda env
export PATH=$HOME/miniconda/bin:/usr/local/cuda/bin:$PATH
conda activate partnr
conda install habitat-sim=0.3.2 withbullet headless -c conda-forge -c aihabitat -y
- name: Download test data
run: |-
# Disable clone protection for git lfs
export GIT_CLONE_PROTECTION_ACTIVE=false
git --version
git-lfs --version
export PATH=$HOME/miniconda/bin:/usr/local/cuda/bin:$PATH
conda init
source ~/.bashrc
conda activate partnr
conda install -y gitpython git-lfs
cd partnr-planner
git lfs install
# get the standard test assets from the downloader
python -m habitat_sim.utils.datasets_download --uids ci_test_assets hab_spot_arm rearrange_task_assets hab3_bench_assets --data-path data/ --no-replace --no-prune
ls -la data/scene_datasets/habitat-test-scenes/
ln -s versioned_data/hab3_bench_assets/humanoids/ data/humanoids
#TODO: replace these specific downloads with dataset downloader calls with next version update
# Get HSSD mini dataset and OVMM_objects for testing
git clone https://huggingface.co/datasets/ai-habitat/OVMM_objects data/objects_ovmm --recursive
cd data/objects_ovmm
git lfs pull
cd ../..
git clone https://huggingface.co/datasets/ai-habitat/hssd-partnr-ci data/versioned_data/hssd-partnr-ci
cd data/versioned_data/hssd-partnr-ci
git lfs pull
cd ../../..
ln -s versioned_data/hssd-partnr-ci data/hssd-partnr-ci
# Get skills and the episode dataset for testing
git clone --single-branch --branch ci https://huggingface.co/datasets/ai-habitat/partnr_episodes data/versioned_data/partnr_episodes
cd data/versioned_data/partnr_episodes
git lfs pull
cd ../../..
#post process symblinking for convenience
cd data
# Create a folder for skills
ln -s versioned_data/partnr_episodes/checkpoints models
# Create a folder for RAG dataset to set RAG
ln -s versioned_data/partnr_episodes/test_rag test_rag
# Create a folder for episode datasets
mkdir -p datasets
ln -s ../versioned_data/partnr_episodes datasets/partnr_episodes
cd ..
- name: Install submodules and partnr
run: |-
#give cmake ownership to the runner for installation
sudo chown runner -R /opt/cmake312/
#activate conda env
export PATH=$HOME/miniconda/bin:/usr/local/cuda/bin:$PATH
conda activate partnr
cd partnr-planner
#clone submodules
git submodule sync
git submodule update --init --recursive
# Install submodules
pip install -e third_party/habitat-lab/habitat-lab
pip install -e third_party/habitat-lab/habitat-baselines
pip install -e third_party/transformers-CFG
# Install requirements
pip install -r requirements.txt
# install the library
pip install -e .
- name: run tests
run: |-
export PATH=$HOME/miniconda/bin:/usr/local/cuda/bin:$PATH
conda activate partnr
cd partnr-planner
python -m pytest habitat_llm/tests
python -m pytest dataset_generation/tests
#NOTE: use the below to debug with ssh: simply move this "job" just before the crashing job to intercept the workflow
#- name: Debugging with tmate
# uses: mxschmitt/[email protected]