nv-nightly.yml 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. name: nv-nightly
  2. on:
  3. schedule:
  4. - cron: "0 0 * * *"
  5. concurrency:
  6. group: ${{ github.workflow }}-${{ github.ref }}
  7. cancel-in-progress: true
  8. jobs:
  9. unit-tests:
  10. runs-on: [self-hosted, nvidia, cu116, v100]
  11. steps:
  12. - uses: actions/checkout@v2
  13. - id: setup-venv
  14. uses: ./.github/workflows/setup-venv
  15. - name: Install pytorch
  16. run: |
  17. pip install torch==1.13.1 torchvision --extra-index-url https://download.pytorch.org/whl/cu116
  18. python -c "import torch; print('torch:', torch.__version__, torch)"
  19. python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
  20. - name: Install transformers
  21. run: |
  22. git clone https://github.com/huggingface/transformers
  23. cd transformers
  24. # if needed switch to the last known good SHA until transformers@master is fixed
  25. # git checkout 1cc453d33
  26. git rev-parse --short HEAD
  27. pip install .
  28. - name: Install deepspeed
  29. run: |
  30. pip install .[dev,1bit,autotuning,inf]
  31. ds_report
  32. - name: Python environment
  33. run: |
  34. pip list
  35. - name: Unit tests
  36. run: |
  37. unset TORCH_CUDA_ARCH_LIST # only jit compile for current arch
  38. if [[ -d ./torch-extensions ]]; then rm -rf ./torch-extensions; fi
  39. cd tests
  40. TRANSFORMERS_CACHE=/blob/transformers_cache/ TORCH_EXTENSIONS_DIR=./torch-extensions pytest --forked -m 'nightly' unit/ --torch_ver="1.13" --cuda_ver="11.6"