File size: 1,640 Bytes
0e83290 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | """
/*****************************************************************************/
Extension module loader
code referenced from : https://github.com/facebookresearch/maskrcnn-benchmark
/*****************************************************************************/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os.path
import torch
try:
from torch.utils.cpp_extension import load
from torch.utils.cpp_extension import CUDA_HOME
except ImportError:
raise ImportError(
"The cpp layer extensions requires PyTorch 0.4 or higher")
def _load_C_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
this_dir = os.path.join(this_dir, "csrc")
main_file = glob.glob(os.path.join(this_dir, "*.cpp"))
sources_cpu = glob.glob(os.path.join(this_dir, "cpu", "*.cpp"))
sources_cuda = glob.glob(os.path.join(this_dir, "cuda", "*.cu"))
sources = main_file + sources_cpu
extra_cflags = []
extra_cuda_cflags = []
if torch.cuda.is_available() and CUDA_HOME is not None:
sources.extend(sources_cuda)
extra_cflags = ["-O3", "-DWITH_CUDA"]
extra_cuda_cflags = ["--expt-extended-lambda"]
sources = [os.path.join(this_dir, s) for s in sources]
extra_include_paths = [this_dir]
return load(
name="ext_lib",
sources=sources,
extra_cflags=extra_cflags,
extra_include_paths=extra_include_paths,
extra_cuda_cflags=extra_cuda_cflags,
)
_backend = _load_C_extensions()
|