cpu-inference.yml 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. name: cpu-inference
  2. on:
  3. pull_request:
  4. paths-ignore:
  5. - 'docs/**'
  6. - 'blogs/**'
  7. merge_group:
  8. branches: [ master ]
  9. concurrency:
  10. group: ${{ github.workflow }}-${{ github.ref }}
  11. cancel-in-progress: true
  12. jobs:
  13. unit-tests:
  14. runs-on: ubuntu-20.04
  15. steps:
  16. - uses: actions/checkout@v2
  17. - id: setup-venv
  18. uses: ./.github/workflows/setup-venv
  19. - name: Detect instruction sets on instance
  20. run: |
  21. lscpu
  22. pip install cmake
  23. git clone https://github.com/intel/intel-extension-for-pytorch
  24. cd intel-extension-for-pytorch/tests/cpu/isa
  25. cmake .
  26. make
  27. ./cpu_features
  28. - name: Install numactl
  29. run: |
  30. sudo apt-get install -y numactl
  31. - name: Install oneCCL Bindings for PyTorch
  32. run: |
  33. python -m pip install intel_extension_for_pytorch
  34. python -m pip install oneccl_bind_pt==2.0 -f https://developer.intel.com/ipex-whl-stable-cpu
  35. - name: Install oneCCL
  36. run: |
  37. git clone https://github.com/oneapi-src/oneCCL
  38. cd oneCCL
  39. mkdir build
  40. cd build
  41. cmake ..
  42. make
  43. make install
  44. #source ./_install/env/setvars.sh
  45. # test whether oneCCL is correctly installed
  46. #mpirun -n 2 ./examples/benchmark/benchmark
  47. - name: Install transformers
  48. run: |
  49. git clone https://github.com/huggingface/transformers
  50. cd transformers
  51. git rev-parse --short HEAD
  52. pip install .
  53. - name: Install deepspeed
  54. run: |
  55. # check why the host does not have AVX2 support
  56. pip install .[dev,1bit,autotuning,inf]
  57. ds_report
  58. - name: Python environment
  59. run: |
  60. pip list
  61. - name: Unit tests
  62. run: |
  63. source oneCCL/build/_install/env/setvars.sh
  64. unset TORCH_CUDA_ARCH_LIST # only jit compile for current arch
  65. cd tests
  66. TRANSFORMERS_CACHE=~/tmp/transformers_cache/ TORCH_EXTENSIONS_DIR=./torch-extensions pytest -m 'seq_inference' -m 'inference_ops' -m 'inference' unit/