nv-torch-latest-v100.yml 2.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
name: nv-torch-latest-v100

on:
  push:
    branches:
      - 'master'
      - 'staging**'
    paths-ignore:
      - 'docs/**'
  pull_request:
    paths-ignore:
      - 'docs/**'

concurrency:
  group: ${{ github.workflow }}-${{ github.ref }}
  cancel-in-progress: true

jobs:
  unit-tests:
20
    runs-on: [self-hosted, nvidia, cu116, v100]
21 22 23 24 25 26

    steps:
      - uses: actions/checkout@v2

      - name: environment
        run: |
27
          echo "JobID: $AISC_NODE_INSTANCE_ID"
28 29 30 31 32 33
          nvidia-smi
          which python
          python --version
          which nvcc
          nvcc --version
          pip install --upgrade pip
J
Jeff Rasley 已提交
34
          pip uninstall --yes torch torchvision triton
35
          pip install torch torchvision --extra-index-url https://download.pytorch.org/whl/cu116
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
          python -c "import torch; print('torch:', torch.__version__, torch)"
          python -c "import torch; print('CUDA available:', torch.cuda.is_available())"

      - name: Install transformers
        run: |
          git clone https://github.com/huggingface/transformers
          cd transformers
          # if needed switch to the last known good SHA until transformers@master is fixed
          # git checkout 1cc453d33
          git rev-parse --short HEAD
          pip uninstall --yes transformers
          pip install .

      - name: Install deepspeed
        run: |
          pip uninstall --yes deepspeed
J
Jeff Rasley 已提交
52
          pip install .[dev,1bit,autotuning]
53 54
          ds_report

55 56 57 58
      - name: Python environment
        run: |
          pip list

59 60 61 62 63
      - name: Unit tests
        run: |
          unset TORCH_CUDA_ARCH_LIST # only jit compile for current arch
          if [[ -d ./torch-extensions ]]; then rm -rf ./torch-extensions; fi
          cd tests
64 65
          TORCH_EXTENSIONS_DIR=./torch-extensions pytest --color=yes --durations=0 --verbose --forked -n 4 unit/ --torch_ver="1.13" --cuda_ver="11.6"
          TORCH_EXTENSIONS_DIR=./torch-extensions pytest --color=yes --durations=0 --verbose --forked -m 'sequential' unit/ --torch_ver="1.13" --cuda_ver="11.6"