ext/torch/extconf.rb in torch-rb-0.2.0 vs ext/torch/extconf.rb in torch-rb-0.2.1

- old
+ new

@@ -8,23 +8,28 @@ $CXXFLAGS << " -D_GLIBCXX_USE_CXX11_ABI=1" # TODO check compiler name clang = RbConfig::CONFIG["host_os"] =~ /darwin/i +# check omp first if have_library("omp") || have_library("gomp") $CXXFLAGS << " -DAT_PARALLEL_OPENMP=1" $CXXFLAGS << " -Xclang" if clang $CXXFLAGS << " -fopenmp" end -# silence ruby/intern.h warning -$CXXFLAGS << " -Wno-deprecated-register" - -# silence torch warnings if clang + # silence ruby/intern.h warning + $CXXFLAGS << " -Wno-deprecated-register" + + # silence torch warnings $CXXFLAGS << " -Wno-shorten-64-to-32 -Wno-missing-noreturn" else + # silence rice warnings + $CXXFLAGS << " -Wno-noexcept-type" + + # silence torch warnings $CXXFLAGS << " -Wno-duplicated-cond -Wno-suggest-attribute=noreturn" end inc, lib = dir_config("torch") inc ||= "/usr/local/include" @@ -32,18 +37,23 @@ cuda_inc, cuda_lib = dir_config("cuda") cuda_inc ||= "/usr/local/cuda/include" cuda_lib ||= "/usr/local/cuda/lib64" -with_cuda = Dir["#{lib}/*torch_cuda*"].any? && have_library("cuda") && have_library("cudnn") +$LDFLAGS << " -L#{lib}" if Dir.exist?(lib) +abort "LibTorch not found" unless have_library("torch") +with_cuda = false +if Dir["#{lib}/*torch_cuda*"].any? + $LDFLAGS << " -L#{cuda_lib}" if Dir.exist?(cuda_lib) + with_cuda = have_library("cuda") && have_library("cudnn") +end + $INCFLAGS << " -I#{inc}" $INCFLAGS << " -I#{inc}/torch/csrc/api/include" $LDFLAGS << " -Wl,-rpath,#{lib}" $LDFLAGS << ":#{cuda_lib}/stubs:#{cuda_lib}" if with_cuda -$LDFLAGS << " -L#{lib}" -$LDFLAGS << " -L#{cuda_lib}" if with_cuda # https://github.com/pytorch/pytorch/blob/v1.5.0/torch/utils/cpp_extension.py#L1232-L1238 $LDFLAGS << " -lc10 -ltorch_cpu -ltorch" if with_cuda $LDFLAGS << " -lcuda -lnvrtc -lnvToolsExt -lcudart -lc10_cuda -ltorch_cuda -lcufft -lcurand -lcublas -lcudnn"