From 482d5882189636ec78879be975488132243b1ade Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Wed, 18 Sep 2024 13:44:54 +0800 Subject: [PATCH] update CMAKE code(WIP) --- source/CMakeLists.txt | 119 +++++++++++++++++++++++++++++++ source/api_cc/CMakeLists.txt | 7 ++ source/api_cc/src/DeepPot.cc | 3 +- source/config/CMakeLists.txt | 6 ++ source/lmp/plugin/CMakeLists.txt | 6 +- 5 files changed, 136 insertions(+), 5 deletions(-) diff --git a/source/CMakeLists.txt b/source/CMakeLists.txt index 1bbab7e398..fb9c778462 100644 --- a/source/CMakeLists.txt +++ b/source/CMakeLists.txt @@ -129,6 +129,125 @@ if(WITH_GPU) endif() endif() +macro(safe_set_static_flag) + foreach(flag_var + CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif(${flag_var} MATCHES "/MD") + endforeach(flag_var) +endmacro() + +if(NOT DEFINED PADDLE_LIB) + message( + FATAL_ERROR "please set PADDLE_LIB with -DPADDLE_LIB=/path/paddle/lib") +endif() +set(PADDLE_LIB + ${PADDLE_LIB} + CACHE PATH "/path/paddle/lib") + +include_directories("${PADDLE_LIB}/") +set(PADDLE_LIB_THIRD_PARTY_PATH "${PADDLE_LIB}/third_party/install/") + +include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/include") +include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/include") +include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/include") +include_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/include") + +link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}protobuf/lib") +link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}glog/lib") +link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}gflags/lib") +link_directories("${PADDLE_LIB_THIRD_PARTY_PATH}xxhash/lib") +link_directories("${PADDLE_LIB}/paddle/lib") + +# add custom operators +option(USE_TENSORRT "Compile demo with TensorRT." OFF) + +if(WITH_GPU) + if(NOT WIN32) + set(CUDA_LIB + "/usr/local/cuda/lib64/" + CACHE STRING "CUDA Library") + else() + if(CUDA_LIB STREQUAL "") + set(CUDA_LIB + "C:\\Program\ Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v8.0\\lib\\x64" + ) + endif() + endif(NOT WIN32) +endif() + +if(NOT WIN32) + if(USE_TENSORRT AND WITH_GPU) + include_directories("${TENSORRT_INCLUDE_DIR}") + link_directories("${TENSORRT_LIB_DIR}") + endif() +endif(NOT WIN32) + +if(WITH_STATIC_LIB) + set(DEPS + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX} + ) +else() + if(WIN32) + set(DEPS + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX} + ) + else() + set(DEPS + ${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX} + ) + endif() +endif() + +if(NOT WIN32) + set(EXTERNAL_LIB "-lrt -ldl -lpthread") + set(DEPS + ${DEPS} + ${MATH_LIB} + ${MKLDNN_LIB} + glog + gflags + protobuf + xxhash + ${EXTERNAL_LIB}) +else() + set(DEPS + ${DEPS} + ${MATH_LIB} + ${MKLDNN_LIB} + glog + gflags_static + libprotobuf + xxhash + ${EXTERNAL_LIB}) + set(DEPS ${DEPS} shlwapi.lib) +endif(NOT WIN32) + +if(WITH_GPU) + if(NOT WIN32) + if(USE_TENSORRT) + set(DEPS ${DEPS} + ${TENSORRT_LIB_DIR}/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX}) + set(DEPS + ${DEPS} + ${TENSORRT_LIB_DIR}/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX}) + endif() + set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX}) + else() + if(USE_TENSORRT) + set(DEPS ${DEPS} + ${TENSORRT_LIB_DIR}/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} + ${TENSORRT_LIB_DIR}/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() + set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() +endif() + option(BUILD_TESTING "Build test and enable converage" OFF) set(DEEPMD_C_ROOT "" diff --git a/source/api_cc/CMakeLists.txt b/source/api_cc/CMakeLists.txt index 228a6657d3..80429f0b1d 100644 --- a/source/api_cc/CMakeLists.txt +++ b/source/api_cc/CMakeLists.txt @@ -23,6 +23,13 @@ if(ENABLE_PYTORCH target_link_libraries(${libname} PRIVATE "${TORCH_LIBRARIES}") target_compile_definitions(${libname} PRIVATE BUILD_PYTORCH) endif() +if(ENABLE_PADDLE + AND "${OP_CXX_ABI_PT}" EQUAL "${OP_CXX_ABI}" + # LAMMPS and i-PI in the Python package are not ready - needs more work + AND NOT BUILD_PY_IF) + target_link_libraries(${libname} PRIVATE "${PADDLE_LIBRARIES}") + target_compile_definitions(${libname} PRIVATE BUILD_PADDLE) +endif() target_include_directories( ${libname} diff --git a/source/api_cc/src/DeepPot.cc b/source/api_cc/src/DeepPot.cc index 18ddb6ab6c..81fc594813 100644 --- a/source/api_cc/src/DeepPot.cc +++ b/source/api_cc/src/DeepPot.cc @@ -34,7 +34,7 @@ DeepPot::~DeepPot() {} void DeepPot::init(const std::string& model, const int& gpu_rank, const std::string& file_content) { - std::cout << "** access here" << std::endl; + std::cout << "****** access here" << std::endl; if (inited) { std::cerr << "WARNING: deepmd-kit should not be initialized twice, do " "nothing at the second call of initializer" @@ -46,6 +46,7 @@ void DeepPot::init(const std::string& model, backend = deepmd::DPBackend::PyTorch; } else if (model.length() >= 3 && model.substr(model.length() - 3) == ".pb") { backend = deepmd::DPBackend::TensorFlow; + // } else if (model.length() >= 3 && (model.substr(model.length() - 5) == ".json" || model.substr(model.length() - 8) == ".pdmodel")) { } else if (true) { backend = deepmd::DPBackend::Paddle; } else { diff --git a/source/config/CMakeLists.txt b/source/config/CMakeLists.txt index b1ce17566f..dd005a327b 100644 --- a/source/config/CMakeLists.txt +++ b/source/config/CMakeLists.txt @@ -14,6 +14,12 @@ else() set(ENABLE_PYTORCH 0) endif() +if(ENABLE_PADDLE) + set(ENABLE_PADDLE 1) +else() + set(ENABLE_PADDLE 0) +endif() + configure_file("run_config.ini" "${CMAKE_CURRENT_BINARY_DIR}/run_config.ini" @ONLY) diff --git a/source/lmp/plugin/CMakeLists.txt b/source/lmp/plugin/CMakeLists.txt index f912059261..efeb9af260 100644 --- a/source/lmp/plugin/CMakeLists.txt +++ b/source/lmp/plugin/CMakeLists.txt @@ -9,10 +9,8 @@ if(DEFINED LAMMPS_SOURCE_ROOT OR DEFINED LAMMPS_VERSION) GIT_REPOSITORY https://github.com/lammps/lammps GIT_TAG ${LAMMPS_VERSION}) FetchContent_GetProperties(lammps_download) - if(NOT lammps_download_POPULATED) - FetchContent_Populate(lammps_download) - set(LAMMPS_SOURCE_ROOT ${lammps_download_SOURCE_DIR}) - endif() + # if(NOT lammps_download_POPULATED) FetchContent_Populate(lammps_download) + # set(LAMMPS_SOURCE_ROOT ${lammps_download_SOURCE_DIR}) endif() endif() set(LAMMPS_HEADER_DIR ${LAMMPS_SOURCE_ROOT}/src) message(STATUS "LAMMPS_HEADER_DIR is ${LAMMPS_HEADER_DIR}")