Skip to content

Commit

Permalink
Merge 66b0ebb into 80fd3b3
Browse files Browse the repository at this point in the history
  • Loading branch information
PaliC authored Jul 7, 2022
2 parents 80fd3b3 + 66b0ebb commit 8a40a97
Show file tree
Hide file tree
Showing 6 changed files with 294 additions and 12 deletions.
146 changes: 146 additions & 0 deletions .github/workflows/runtime_nightly_cuda.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
name: Multipy runtime nightly release with cuda

on:
schedule:
- cron: '0 2 * * *' # run at 2 AM UTC
pull_request:
push:
branches:
- main

jobs:
unittest:
strategy:
matrix:
python-version: [3.8]
platform: [linux.4xlarge.nvidia.gpu]
abi: [0,1]
fail-fast: false
runs-on: ${{ matrix.platform }}
steps:
- name: Update pip
run: |
sudo yum update -y
sudo yum -y install git python3-pip
sudo pip3 install --upgrade pip
- name: Setup conda
run: |
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh
bash ~/miniconda.sh -b -p $HOME/miniconda
- name: setup Path
run: |
echo /usr/local/cuda-11.3/bin >> $GITHUB_PATH
echo "/home/ec2-user/miniconda/bin" >> $GITHUB_PATH
echo "CONDA=/home/ec2-user/miniconda" >> $GITHUB_PATH
- name: Checkout MultiPy
uses: actions/checkout@v2
with:
submodules: true

- name: Setup SSH (Click me for login details)
uses: ./.github/actions/setup-ssh
with:
github-secret: ${{ secrets.GITHUB_TOKEN }}

- name: Install C++ toolchain
run: |
sudo yum -y install clang llvm
export CC=clang
export CXX=clang++
sudo yum -y install xz-devel bzip2-devel libnsl2-devel readline-devel expat-devel gdbm-devel glibc-devel gmp-devel libffi-devel libGL-devel libX11-devel ncurses-devel openssl-devel sqlite-devel tcl-devel tix-devel tk-devel
sudo yum -y install lzma
sudo yum -y install uuid
sudo yum -y install openmpi-devel
sudo yum -y install zlib-devel
- name: create conda env
run: |
conda create --name multipy_runtime_env python=${{ matrix.python-version }}
conda info
- name: Install python/pytorch dependencies
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
conda run -n multipy_runtime_env python -m pip install astunparse numpy ninja pyyaml mkl mkl-include setuptools cmake cffi typing_extensions future six requests dataclasses pytest
- name: gen examples
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
# A minor hack to get the CI working as conda doesn't have torch,
# fortunately we can remove this once we have a dynamically linked torch
cd multipy/runtime/example
conda create --name example_env python=${{ matrix.python-version }}
conda run -n example_env python -m pip install torch torchvision torchaudio pathlib
conda run -n example_env python generate_examples.py
- name: Build pytorch with ABI=${{ matrix.abi }}
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
export GLIBCXX_USE_CXX11_ABI=${{ matrix.abi }}
export CXXFLAGS="-D_GLIBCXX_USE_CXX11_ABI=${{ matrix.abi }}"
export TORCH_CXX_FLAGS="-D_GLIBCXX_USE_CXX11_ABI=${{ matrix.abi }}"
cd multipy/runtime/third-party/pytorch
export USE_DEPLOY=1
export USE_CUDA=1
conda run -n multipy_runtime_env python setup.py develop
- name: Build multipy runtime with ABI=${{ matrix.abi }}
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime
mkdir build
cd build
conda run -n multipy_runtime_env cmake -DABI_EQUALS_1=${{ matrix.abi }} ..
conda run -n multipy_runtime_env cmake --build . --config Release
- name: install files
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build
conda run -n multipy_runtime_env cmake --install . --prefix "."
- name: Run unit tests with ABI=${{ matrix.abi }}
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build
./test_deploy
- name: Run unit tests with ABI=${{ matrix.abi }} with gpu
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build
./test_deploy_gpu
- name: create tarball [click me to get a list of files for the nightly release]
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build/dist
tar -czvf multipy_runtime.tar.gz multipy/
- name: Update nightly release
uses: pyTooling/Actions/releaser@main
with:
tag: nightly-runtime-cuda-abi-${{ matrix.abi }}
rm: true
token: ${{ secrets.GITHUB_TOKEN }}
files: |
multipy/runtime/build/dist/multipy_runtime.tar.gz
1 change: 0 additions & 1 deletion .github/workflows/runtime_tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ jobs:
conda run -n multipy_runtime_env cmake --install . --prefix "."
- name: Run unit tests with ABI=${{ matrix.abi }}
if: ${{ matrix.abi }} == 1
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
Expand Down
138 changes: 138 additions & 0 deletions .github/workflows/runtime_tests_cuda.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
name: Multipy runtime tests with cuda

on:
pull_request:
push:
branches:
- main

jobs:
unittest:
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
platform: [linux.4xlarge.nvidia.gpu]
abi: [0,1]
fail-fast: false
runs-on: ${{ matrix.platform }}
steps:
- name: Update pip
run: |
sudo yum update -y
sudo yum -y install git python3-pip
sudo pip3 install --upgrade pip
- name: Setup conda
run: |
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh
bash ~/miniconda.sh -b -p $HOME/miniconda
- name: Install CUDA 11.3
shell: bash
run: |
sudo yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
sudo yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel7/x86_64/cuda-rhel7.repo
sudo yum clean expire-cache
sudo yum install -y nvidia-driver-latest-dkms
sudo yum install -y cuda-11-3
sudo yum install -y cuda-drivers
sudo yum install -y libcudnn8-devel
- name: setup Path
run: |
echo /usr/local/cuda-11.3/bin >> $GITHUB_PATH
echo "/home/ec2-user/miniconda/bin" >> $GITHUB_PATH
echo "CONDA=/home/ec2-user/miniconda" >> $GITHUB_PATH
- name: Checkout MultiPy
uses: actions/checkout@v2
with:
submodules: true

- name: Setup SSH (Click me for login details)
uses: ./.github/actions/setup-ssh
with:
github-secret: ${{ secrets.GITHUB_TOKEN }}

- name: Install C++ toolchain
run: |
sudo yum -y install clang llvm
export CC=clang
export CXX=clang++
sudo yum -y install xz-devel bzip2-devel libnsl2-devel readline-devel expat-devel gdbm-devel glibc-devel gmp-devel libffi-devel libGL-devel libX11-devel ncurses-devel openssl-devel sqlite-devel tcl-devel tix-devel tk-devel
sudo yum -y install lzma
sudo yum -y install uuid
sudo yum -y install openmpi-devel
sudo yum -y install zlib-devel
- name: create conda env
run: |
conda create --name multipy_runtime_env python=${{ matrix.python-version }}
conda info
- name: Install python/pytorch dependencies
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
conda run -n multipy_runtime_env python -m pip install astunparse numpy ninja pyyaml mkl mkl-include setuptools cmake cffi typing_extensions future six requests dataclasses pytest
- name: gen examples
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
# A minor hack to get the CI working as conda doesn't have torch,
# fortunately we can remove this once we have a dynamically linked torch
cd multipy/runtime/example
conda create --name example_env python=${{ matrix.python-version }}
conda run -n example_env python -m pip install torch torchvision torchaudio pathlib
conda run -n example_env python generate_examples.py
- name: Build pytorch with ABI=${{ matrix.abi }}
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
export GLIBCXX_USE_CXX11_ABI=${{ matrix.abi }}
export CXXFLAGS="-D_GLIBCXX_USE_CXX11_ABI=${{ matrix.abi }}"
export TORCH_CXX_FLAGS="-D_GLIBCXX_USE_CXX11_ABI=${{ matrix.abi }}"
cd multipy/runtime/third-party/pytorch
export USE_DEPLOY=1
export USE_CUDA=1
conda run -n multipy_runtime_env python setup.py develop
- name: Build multipy runtime with ABI=${{ matrix.abi }}
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime
mkdir build
cd build
conda run -n multipy_runtime_env cmake -DABI_EQUALS_1=${{ matrix.abi }} ..
conda run -n multipy_runtime_env cmake --build . --config Release
- name: install files
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build
conda run -n multipy_runtime_env cmake --install . --prefix "."
- name: Run unit tests with ABI=${{ matrix.abi }}
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build
./test_deploy
- name: Run unit tests with ABI=${{ matrix.abi }} with gpu
shell: bash -l {0}
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
cd multipy/runtime/build
./test_deploy_gpu
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ internally, please see the related [arXiv paper](https://arxiv.org/pdf/2104.0025

The C++ binaries (`libtorch_interpreter.so`,`libtorch_deploy.a`, `utils.cmake`), and the header files of `multipy::runtime` can be installed from our [nightly release](https://github.com/pytorch/multipy/releases/tag/nightly-runtime-abi-0). The ABI for the nightly release is 0. You can find a version of the release with ABI=1 [here](https://github.com/pytorch/multipy/releases/tag/nightly-runtime-abi-1).

C++ binaries with cuda (11.3) support can also be found for [ABI=0](https://github.com/pytorch/multipy/releases/tag/nightly-runtime-cuda-abi-0) and [ABI=1](https://github.com/pytorch/multipy/releases/tag/nightly-runtime-cuda-abi-1)

```
wget https://github.com/pytorch/multipy/releases/download/nightly-runtime-abi-0/multipy_runtime.tar.gz
tar -xvzf multipy_runtime.tar.gz
Expand Down
16 changes: 7 additions & 9 deletions multipy/runtime/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -86,14 +86,12 @@ target_link_libraries(test_deploy
)
target_include_directories(test_deploy PRIVATE ${CMAKE_SOURCE_DIR}/../..)

# LINK_DIRECTORIES("${PYTORCH_ROOT}/torch/lib")
# add_executable(test_deploy_gpu ${INTERPRETER_TEST_SOURCES_GPU})
# target_compile_definitions(test_deploy_gpu PUBLIC TEST_CUSTOM_LIBRARY)
# target_include_directories(test_deploy_gpu PRIVATE ${PYTORCH_ROOT}/torch)
# target_include_directories(test_deploy_gpu PRIVATE ${CMAKE_SOURCE_DIR}/../..)
# target_link_libraries(test_deploy_gpu
# PUBLIC "-Wl,--no-as-needed -rdynamic" gtest dl torch_deploy_interface c10 torch_cpu
# )
LINK_DIRECTORIES("${PYTORCH_ROOT}/torch/lib")
add_executable(test_deploy_gpu ${INTERPRETER_TEST_SOURCES_GPU})
target_compile_definitions(test_deploy_gpu PUBLIC TEST_CUSTOM_LIBRARY)
target_include_directories(test_deploy_gpu PRIVATE ${PYTORCH_ROOT}/torch)
target_include_directories(test_deploy_gpu PRIVATE ${CMAKE_SOURCE_DIR}/../..)
target_link_libraries(test_deploy_gpu PUBLIC "-Wl,--no-as-needed -rdynamic" gtest dl torch_deploy_interface c10 torch_cpu)

LINK_DIRECTORIES("${PYTORCH_ROOT}/torch/lib")
add_library(test_deploy_lib SHARED test_deploy_lib.cpp)
Expand All @@ -119,7 +117,7 @@ target_link_libraries(interactive_embedded_interpreter
)

install(TARGETS test_deploy DESTINATION tests/bin)
# install(TARGETS test_deploy_gpu DESTINATION tests/bin)
install(TARGETS test_deploy_gpu DESTINATION tests/bin)

install(TARGETS test_deploy DESTINATION tests/bin)

Expand Down
3 changes: 1 addition & 2 deletions multipy/runtime/test_deploy_gpu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,7 @@ TEST(TorchDeployGPUTest, SimpleModel) {

TEST(TorchDeployGPUTest, UsesDistributed) {
const auto model_filename = path(
"USES_DISTRIBUTED",
"torch/csrc/deploy/example/generated/uses_distributed");
"USES_DISTRIBUTED", "multipy/runtime/example/generated/uses_distributed");
torch::deploy::InterpreterManager m(1);
torch::deploy::Package p = m.loadPackage(model_filename);
{
Expand Down

0 comments on commit 8a40a97

Please sign in to comment.