nv-mii.yml 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. name: nv-mii
  2. on:
  3. push:
  4. branches:
  5. - 'staging**'
  6. paths-ignore:
  7. - 'docs/**'
  8. pull_request:
  9. paths-ignore:
  10. - 'docs/**'
  11. schedule:
  12. - cron: "0 0 * * *"
  13. concurrency:
  14. group: ${{ github.workflow }}-${{ github.ref }}
  15. cancel-in-progress: true
  16. jobs:
  17. unit-tests:
  18. runs-on: [self-hosted, nvidia, cu116, v100]
  19. steps:
  20. - uses: actions/checkout@v2
  21. - id: setup-venv
  22. uses: ./.github/workflows/setup-venv
  23. - name: Install pytorch
  24. run: |
  25. pip install torch==1.13.1 torchvision --extra-index-url https://download.pytorch.org/whl/cu116
  26. python -c "import torch; print('torch:', torch.__version__, torch)"
  27. python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
  28. - name: Install transformers
  29. run: |
  30. git clone https://github.com/huggingface/transformers
  31. cd transformers
  32. # if needed switch to the last known good SHA until transformers@master is fixed
  33. # git checkout 1cc453d33
  34. git rev-parse --short HEAD
  35. pip install .
  36. - name: Install deepspeed
  37. run: |
  38. pip install .[dev]
  39. ds_report
  40. - name: Python environment
  41. run: |
  42. pip list
  43. - name: MII unit tests
  44. run: |
  45. git clone https://github.com/microsoft/DeepSpeed-MII.git
  46. cd DeepSpeed-MII
  47. pip install .[dev]
  48. unset TORCH_CUDA_ARCH_LIST # only jit compile for current arch
  49. if [[ -d ./torch-extensions ]]; then rm -rf ./torch-extensions; fi
  50. cd tests
  51. TRANSFORMERS_CACHE=/blob/transformers_cache/ TORCH_EXTENSIONS_DIR=./torch-extensions pytest --color=yes --durations=0 --forked --verbose -m "CPU or local" ./