faiss/.circleci/config.yml
Chengqi Deng fed61e6d95 Add ARM to CI (#1914)
Summary:
This PR added a CI job for ARM.

Pull Request resolved: https://github.com/facebookresearch/faiss/pull/1914

Reviewed By: beauby

Differential Revision: D28833987

Pulled By: mdouze

fbshipit-source-id: 4977585ea1a0715547ed34f400cf0a3646fca667
2021-06-08 14:53:02 -07:00

460 lines
14 KiB
YAML

version: 2.1
orbs:
win: circleci/windows@2.4.0
jobs:
format:
docker:
- image: ubuntu:20.04
steps:
- checkout
- run:
name: Install clang-format
command: |
apt-get update
apt-get install -y git-core clang-format-11
- run:
name: Verify clang-format
command: |
git ls-files | grep -E '\.(cpp|h|cu|cuh)$' | xargs clang-format-11 -i
if git diff --quiet; then
echo "Formatting OK!"
else
echo "Formatting not OK!"
echo "------------------"
git --no-pager diff --color
exit 1
fi
build_linux:
parameters:
opt_level:
type: string
default: generic
resource_class:
type: string
default: medium
docker:
- image: beauby/faiss-circleci:cpu
resource_class: << parameters.resource_class >>
environment:
OMP_NUM_THREADS: 10
MKL_THREADING_LAYER: GNU
steps:
- checkout
- run:
name: Build faiss library
command: |
cmake -B build -DBUILD_TESTING=ON -DFAISS_ENABLE_GPU=OFF \
-DFAISS_OPT_LEVEL=<< parameters.opt_level >> \
-DFAISS_ENABLE_C_API=ON \
-DCMAKE_BUILD_TYPE=Release -DBLA_VENDOR=Intel10_64_dyn .
make -C build -j3 faiss
- when:
condition:
equal: [ "avx2", << parameters.opt_level >> ]
steps:
- run:
name: Build faiss_avx2 library
command: make -C build -j3 faiss_avx2 swigfaiss_avx2
- run:
name: Test faiss library
command: |
make -C build -j3 faiss_test
export GTEST_OUTPUT="xml:$(realpath .)/test-results/googletest/"
make -C build test
- run:
name: Build python extension
command: |
make -C build -j3 swigfaiss
cd build/faiss/python
python3 setup.py build
- run:
name: Test python extension
command: |
pip3 install pytest
export PYTHONPATH="$(ls -d ./build/faiss/python/build/lib*/)"
pytest --junitxml=test-results/pytest/results.xml tests/test_*.py
pytest --junitxml=test-results/pytest/results-torch.xml tests/torch_*.py
- store_test_results:
path: test-results
- run:
name: Build C API
command: |
make -C build -j faiss_c
build_linux_conda:
docker:
- image: continuumio/miniconda3
steps:
- checkout
- run:
name: Conda build
command: |
conda install -y -q conda-build
cd conda
conda build faiss --python 3.7 -c pytorch
build_osx:
macos:
xcode: 12.4.0
environment:
OMP_NUM_THREADS: 10
steps:
- checkout
- run:
name: Install Homebrew packages
command: |
brew install cmake swig libomp
- run:
name: Install numpy/scipy
command: |
pip3 install numpy scipy
- run:
name: Generate Makefiles
command: |
cmake -B build -DBUILD_TESTING=ON -DFAISS_ENABLE_GPU=OFF \
-DCMAKE_BUILD_TYPE=Release -DFAISS_ENABLE_C_API=ON \
-DPython_EXECUTABLE=/usr/local/bin/python3 .
- run:
name: Build faiss library
command: |
make -C build -j faiss
- run:
name: Test faiss library
command: |
make -C build -j faiss_test
export GTEST_OUTPUT="xml:$PWD/test-results/googletest/"
make -C build test
- run:
name: Build python extension
command: |
make -C build -j swigfaiss
cd build/faiss/python
python3 setup.py build
- run:
name: Test python extension
command: |
pip3 install pytest torch
export PYTHONPATH="$(ls -d ./build/faiss/python/build/lib*/)"
pytest --junitxml=test-results/pytest/results.xml tests/test_*.py
pytest --junitxml=test-results/pytest/results-torch.xml tests/torch_*.py
- store_test_results:
path: test-results
- run:
name: Build C API
command: |
make -C build -j faiss_c
build_windows:
executor:
name: win/default
shell: bash.exe
steps:
- checkout
- run:
name: Build/test
command: |
conda install conda-build
cd conda
conda build faiss --python 3.7 -c pytorch
build_arm:
machine:
image: ubuntu-2004:202101-01
resource_class: arm.medium
parameters:
opt_level:
type: string
default: generic
environment:
OMP_NUM_THREADS: 10
CONDA_HOME: /home/circleci/miniconda3
PYTHON: /home/circleci/miniconda3/bin/python
steps:
- checkout
- run:
name: Install dependencies
command: |
sudo apt-get update && sudo apt-get install -y swig
wget https://repo.anaconda.com/miniconda/Miniconda3-py39_4.9.2-Linux-aarch64.sh
bash Miniconda3-py39_4.9.2-Linux-aarch64.sh -b -p $CONDA_HOME
pip3 install cmake
$CONDA_HOME/bin/conda install -y numpy scipy
$CONDA_HOME/bin/pip install pytest torch
- run:
name: Build faiss library
command: |
cmake -B build -DBUILD_TESTING=ON -DFAISS_ENABLE_GPU=OFF \
-DFAISS_OPT_LEVEL=<< parameters.opt_level >> \
-DFAISS_ENABLE_C_API=ON \
-DCMAKE_BUILD_TYPE=Release \
-DPython_EXECUTABLE=$PYTHON .
make -C build -j3 faiss
- run:
name: Test faiss library
command: |
make -C build -j3 faiss_test
export GTEST_OUTPUT="xml:$(realpath .)/test-results/googletest/"
make -C build test
- run:
name: Build python extension
command: |
make -C build -j3 swigfaiss
cd build/faiss/python
$PYTHON setup.py build
- run:
name: Test python extension
command: |
export PYTHONPATH="$(ls -d ./build/faiss/python/build/lib*/)"
$PYTHON -c "import faiss; assert 'NEON' in faiss.get_compile_options()"
$PYTHON -m pytest --junitxml=test-results/pytest/results.xml tests/test_*.py
$PYTHON -m pytest --junitxml=test-results/pytest/results-torch.xml tests/torch_*.py
- store_test_results:
path: test-results
- run:
name: Build C API
command: |
make -C build -j faiss_c
build_linux_gpu:
machine:
resource_class: gpu.nvidia.medium
image: ubuntu-1604-cuda-10.1:201909-23
docker_layer_caching: true
steps:
- checkout
- run:
name: Build/test
command: |
docker build -t faiss -f .circleci/Dockerfile.faiss_gpu .
docker run --gpus all faiss make -C build test
docker run --gpus all faiss sh -c '(pwd; find)'
docker run --gpus all faiss sh -c '(cd build/faiss/python; python3 setup.py install) && cp tests/common_faiss_tests.py faiss/gpu/test && python3 -m unittest discover -s faiss/gpu/test -p "test_*"'
docker run --gpus all faiss sh -c '(cd build/faiss/python; python3 setup.py install) && cp tests/common_faiss_tests.py faiss/gpu/test && python3 -m unittest discover -s faiss/gpu/test -p "torch_*.py"'
no_output_timeout: 60m
deploy_linux:
parameters:
label:
type: string
default: main
docker:
- image: continuumio/miniconda3
steps:
- checkout
- run:
name: Install conda-build/anaconda-client
command: |
conda install -y -q conda-build anaconda-client
conda config --set anaconda_upload yes
- run:
name: Build packages
environment:
PACKAGE_TYPE: <<parameters.label>>
command: |
cd conda
conda build faiss --user pytorch --label <<parameters.label>>
deploy_linux_gpu:
parameters:
label:
type: string
default: main
cuda:
type: string
cuda_archs:
type: string
machine:
resource_class: gpu.nvidia.medium
image: ubuntu-1604-cuda-10.1:201909-23
docker_layer_caching: true
steps:
- checkout
- run:
name: Build packages
command: |
docker build -t faiss -f conda/Dockerfile.cuda<<parameters.cuda>> .
docker run --gpus all \
-e PACKAGE_TYPE="<<parameters.label>>" \
-e CUDA_ARCHS="<<parameters.cuda_archs>>" \
-e ANACONDA_API_TOKEN=$ANACONDA_API_TOKEN \
faiss \
conda build faiss-gpu --variants '{ "cudatoolkit": "<<parameters.cuda>>" }' \
--user pytorch --label <<parameters.label>>
no_output_timeout: 60m
deploy_osx:
parameters:
label:
type: string
default: main
macos:
xcode: 12.4.0
steps:
- checkout
- run:
name: Install conda
command: |
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh --output miniconda.sh
bash miniconda.sh -b -p $HOME/miniconda
$HOME/miniconda/bin/conda install -y -q conda-build anaconda-client
$HOME/miniconda/bin/conda config --set anaconda_upload yes
- run:
name: Install MacOSX10.9 SDK
command: |
curl -L -o - https://github.com/phracker/MacOSX-SDKs/releases/download/10.15/MacOSX10.9.sdk.tar.xz | sudo tar xJf - -C /opt
- run:
name: Build packages
environment:
PACKAGE_TYPE: <<parameters.label>>
command: |
export PATH=~/miniconda/bin:$PATH
cd conda
conda build faiss --user pytorch --label <<parameters.label>>
deploy_windows:
parameters:
label:
type: string
default: main
executor:
name: win/default
shell: bash.exe
steps:
- checkout
- run:
name: Install conda-build/anaconda-client
command: |
conda install -y -q conda-build anaconda-client
conda config --set anaconda_upload yes
- run:
name: Build packages
environment:
PACKAGE_TYPE: <<parameters.label>>
command: |
cd conda
conda build faiss --user pytorch --label <<parameters.label>>
workflows:
version: 2
build:
jobs:
- format:
name: Format
- build_linux:
name: Linux
- build_linux:
name: Linux (AVX2)
opt_level: "avx2"
resource_class: "medium+"
- build_linux_conda:
name: Linux (conda)
- build_linux_gpu:
name: Linux GPU
requires:
- Linux
- build_osx:
name: OSX
- build_windows:
name: Windows
- build_arm:
name: ARM64
- deploy_linux:
name: Linux packages
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
- deploy_linux_gpu:
name: Linux GPU packages (CUDA 10.1)
cuda: "10.1"
cuda_archs: "35;52;60;61;70;72;75"
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
- deploy_linux_gpu:
name: Linux GPU packages (CUDA 10.2)
cuda: "10.2"
cuda_archs: "35;52;60;61;70;72;75"
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
- deploy_linux_gpu:
name: Linux GPU packages (CUDA 11.0)
cuda: "11.0"
cuda_archs: "60;61;70;72;75;80"
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
- deploy_linux_gpu:
name: Linux GPU packages (CUDA 11.3)
cuda: "11.3"
cuda_archs: "60;61;70;72;75;80;86"
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
- deploy_windows:
name: Windows packages
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
- deploy_osx:
name: OSX packages
filters:
tags:
only: /^v.*/
branches:
ignore: /.*/
nightly:
triggers:
- schedule:
cron: "0 0 * * *"
filters:
branches:
only:
- master
jobs:
- deploy_linux:
name: Linux nightlies
label: nightly
- deploy_linux_gpu:
name: Linux GPU nightlies (CUDA 10.1)
cuda: "10.1"
cuda_archs: "35;52;60;61;70;72;75"
label: nightly
- deploy_linux_gpu:
name: Linux GPU nightlies (CUDA 10.2)
cuda: "10.2"
cuda_archs: "35;52;60;61;70;72;75"
label: nightly
- deploy_linux_gpu:
name: Linux GPU nightlies (CUDA 11.0)
cuda: "11.0"
cuda_archs: "60;61;70;72;75;80"
label: nightly
- deploy_linux_gpu:
name: Linux GPU nightlies (CUDA 11.3)
cuda: "11.3"
cuda_archs: "60;61;70;72;75;80;86"
label: nightly
- deploy_windows:
name: Windows nightlies
label: nightly
- deploy_osx:
name: OSX nightlies
label: nightly