Skip to content

[zero] revert PR #3166, it disabled grad clip for bf16 #1740

[zero] revert PR #3166, it disabled grad clip for bf16

[zero] revert PR #3166, it disabled grad clip for bf16 #1740

Workflow file for this run

name: nv-torch19-p40
on:
pull_request:
paths-ignore:
- 'docs/**'
- 'blogs/**'
schedule:
- cron: "0 0 * * *"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
unit-tests:
runs-on: [self-hosted, nvidia, cu111, p40]
steps:
- uses: actions/checkout@v3
- id: setup-venv
uses: ./.github/workflows/setup-venv
- name: Install pytorch
run: |
pip install --no-cache-dir torch==1.9.0+cu111 torchvision==0.10.0+cu111 -f https://download.pytorch.org/whl/torch_stable.html
python -c "import torch; print('torch:', torch.__version__, torch)"
python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
- name: Install transformers
run: |
git clone https://github.com/huggingface/transformers
cd transformers
# if needed switch to the last known good SHA until transformers@master is fixed
# git checkout 1cc453d33
git rev-parse --short HEAD
pip install .
- name: Install deepspeed
run: |
pip install .[dev,1bit,autotuning] --no-build-isolation
ds_report
- name: Python environment
run: |
pip list
- name: Unit tests
run: |
if [[ -d ./torch-extensions ]]; then rm -rf ./torch-extensions; fi
cd tests
TORCH_EXTENSIONS_DIR=./torch-extensions pytest --forked -n 4 unit/ --torch_ver="1.9" --cuda_ver="11.1"