Skip to content

Commit

Permalink
update CMAKE code(WIP)
Browse files Browse the repository at this point in the history
  • Loading branch information
HydrogenSulfate committed Sep 18, 2024
1 parent 2e79d68 commit 482d588
Show file tree
Hide file tree
Showing 5 changed files with 136 additions and 5 deletions.
119 changes: 119 additions & 0 deletions source/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,125 @@ if(WITH_GPU)
endif()
endif()

macro(safe_set_static_flag)
foreach(flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
if(${flag_var} MATCHES "/MD")
string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
endif(${flag_var} MATCHES "/MD")
endforeach(flag_var)
endmacro()

if(NOT DEFINED PADDLE_LIB)
message(
FATAL_ERROR "please set PADDLE_LIB with -DPADDLE_LIB=/path/paddle/lib")
endif()
set(PADDLE_LIB
${PADDLE_LIB}
CACHE PATH "/path/paddle/lib")

include_directories("${PADDLE_LIB}/")
set(PADDLE_LIB_THIRD_PARTY_PATH "${PADDLE_LIB}/third_party/install/")

include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/include")
include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/include")

link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/lib")
link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/lib")
link_directories("${PADDLE_LIB}/paddle/lib")

# add custom operators
option(USE_TENSORRT "Compile demo with TensorRT." OFF)

if(WITH_GPU)
if(NOT WIN32)
set(CUDA_LIB
"/usr/local/cuda/lib64/"
CACHE STRING "CUDA Library")
else()
if(CUDA_LIB STREQUAL "")
set(CUDA_LIB
"C:\\Program\ Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v8.0\\lib\\x64"
)
endif()
endif(NOT WIN32)
endif()

if(NOT WIN32)
if(USE_TENSORRT AND WITH_GPU)
include_directories("${TENSORRT_INCLUDE_DIR}")
link_directories("${TENSORRT_LIB_DIR}")
endif()
endif(NOT WIN32)

if(WITH_STATIC_LIB)
set(DEPS
${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}
)
else()
if(WIN32)
set(DEPS
${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}
)
else()
set(DEPS
${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}
)
endif()
endif()

if(NOT WIN32)
set(EXTERNAL_LIB "-lrt -ldl -lpthread")
set(DEPS
${DEPS}
${MATH_LIB}
${MKLDNN_LIB}
glog
gflags
protobuf
xxhash
${EXTERNAL_LIB})
else()
set(DEPS
${DEPS}
${MATH_LIB}
${MKLDNN_LIB}
glog
gflags_static
libprotobuf
xxhash
${EXTERNAL_LIB})
set(DEPS ${DEPS} shlwapi.lib)
endif(NOT WIN32)

if(WITH_GPU)
if(NOT WIN32)
if(USE_TENSORRT)
set(DEPS ${DEPS}
${TENSORRT_LIB_DIR}/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
set(DEPS
${DEPS}
${TENSORRT_LIB_DIR}/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
endif()
set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
else()
if(USE_TENSORRT)
set(DEPS ${DEPS}
${TENSORRT_LIB_DIR}/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
set(DEPS ${DEPS}
${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()
set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX})
set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX})
set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()
endif()

option(BUILD_TESTING "Build test and enable converage" OFF)
set(DEEPMD_C_ROOT
""
Expand Down
7 changes: 7 additions & 0 deletions source/api_cc/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,13 @@ if(ENABLE_PYTORCH
target_link_libraries(${libname} PRIVATE "${TORCH_LIBRARIES}")
target_compile_definitions(${libname} PRIVATE BUILD_PYTORCH)
endif()
if(ENABLE_PADDLE
AND "${OP_CXX_ABI_PT}" EQUAL "${OP_CXX_ABI}"
# LAMMPS and i-PI in the Python package are not ready - needs more work
AND NOT BUILD_PY_IF)
target_link_libraries(${libname} PRIVATE "${PADDLE_LIBRARIES}")
target_compile_definitions(${libname} PRIVATE BUILD_PADDLE)
endif()

target_include_directories(
${libname}
Expand Down
3 changes: 2 additions & 1 deletion source/api_cc/src/DeepPot.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ DeepPot::~DeepPot() {}
void DeepPot::init(const std::string& model,
const int& gpu_rank,
const std::string& file_content) {
std::cout << "** access here" << std::endl;
std::cout << "****** access here" << std::endl;
if (inited) {
std::cerr << "WARNING: deepmd-kit should not be initialized twice, do "
"nothing at the second call of initializer"
Expand All @@ -46,6 +46,7 @@ void DeepPot::init(const std::string& model,
backend = deepmd::DPBackend::PyTorch;
} else if (model.length() >= 3 && model.substr(model.length() - 3) == ".pb") {
backend = deepmd::DPBackend::TensorFlow;
// } else if (model.length() >= 3 && (model.substr(model.length() - 5) == ".json" || model.substr(model.length() - 8) == ".pdmodel")) {
} else if (true) {
backend = deepmd::DPBackend::Paddle;
} else {
Expand Down
6 changes: 6 additions & 0 deletions source/config/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@ else()
set(ENABLE_PYTORCH 0)
endif()

if(ENABLE_PADDLE)
set(ENABLE_PADDLE 1)
else()
set(ENABLE_PADDLE 0)
endif()

configure_file("run_config.ini" "${CMAKE_CURRENT_BINARY_DIR}/run_config.ini"
@ONLY)

Expand Down
6 changes: 2 additions & 4 deletions source/lmp/plugin/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,8 @@ if(DEFINED LAMMPS_SOURCE_ROOT OR DEFINED LAMMPS_VERSION)
GIT_REPOSITORY https://github.com/lammps/lammps
GIT_TAG ${LAMMPS_VERSION})
FetchContent_GetProperties(lammps_download)
if(NOT lammps_download_POPULATED)
FetchContent_Populate(lammps_download)
set(LAMMPS_SOURCE_ROOT ${lammps_download_SOURCE_DIR})
endif()
# if(NOT lammps_download_POPULATED) FetchContent_Populate(lammps_download)
# set(LAMMPS_SOURCE_ROOT ${lammps_download_SOURCE_DIR}) endif()
endif()
set(LAMMPS_HEADER_DIR ${LAMMPS_SOURCE_ROOT}/src)
message(STATUS "LAMMPS_HEADER_DIR is ${LAMMPS_HEADER_DIR}")
Expand Down

0 comments on commit 482d588

Please sign in to comment.