Skip to content

Run Scheduled Events Action #36

Run Scheduled Events Action

Run Scheduled Events Action #36

name: Run Scheduled Events Docker
permissions:
actions: write
contents: write
issues: write
pull-requests: write
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *'
jobs:
run-scheduled-events:
runs-on: self-hosted
steps:
- name: Fork Maintenance System
uses: Cemberk/Fork-Maintenance-System@dockerV2
with:
github_token: ${{ secrets.CRED_TOKEN }}
upstream_repo: "https://github.com/huggingface/transformers"
schedule_json: |
{
"upstream_main_branch": "main",
"upstream_release_branch": "v4.43-release",
"downstream_main_branch": "upstream_sync",
"downstream_testing_branch": "rocm6.3_testing_rel4.43_testing",
"downstream_develop_branch": "develop",
"commits": [
"731f0308bb0a2796e28b68664f4ed050eab6dbfd",
"0c1b311bff616a4faf214461584a758fbab14a6f",
"5b2d4fec5abb8ffe755fe66ce0856fc066c561a6"
]
}
pr_branch_prefix: "scheduled-merge"
requirements_command: >
sudo sed -i 's/torchaudio//g' examples/pytorch/_tests_requirements.txt && pip install -r examples/pytorch/_tests_requirements.txt && pip install --no-cache-dir GPUtil azureml azureml-core tokenizers ninja cerberus sympy sacremoses sacrebleu==1.5.1 sentencepiece scipy scikit-learn urllib3 && pip install huggingface_hub datasets && pip install parameterized && pip install -e .
unit_test_command: cd tests; folders=($(python3 -c 'import os; tests = os.getcwd(); model_tests = os.listdir(os.path.join(tests, models)); d1 = sorted(list(filter(os.path.isdir, os.listdir(tests)))); d2 = sorted(list(filter(os.path.isdir, [fmodels{x} for x in model_tests]))); d1.remove(models); d = d2 + d1; print(n.join(d))' )); cd ..; for folder in ${folders[@]}; do pytest tests${folder} -v --make-reports=huggingface_unit_tests_${machine_type}_run_models_gpu_${folder} -rfEs --continue-on-collection-errors -m not not_device_test -p nocacheprovider ; done ; allstats=($(find reports -name stats.txt)); for stat in ${allstats[@]}; do echo $stat; cat $stat; done
performance_test_command: >
echo "python examples/pytorch/language-modeling/run_mlm.py --model_name_or_path bert-base-uncased --dataset_name wikitext --dataset_config_name wikitext-2-raw-v1 --do_train --do_eval --output_dir /tmp/test-mlm --per_device_train_batch_size 8 --per_device_eval_batch_size 8 --max_steps 500"
docker_image: rocm/pytorch:latest
docker_options: --device=/dev/kfd --device=/dev/dri --group-add video --shm-size 16G --network=host