当前位置:   article > 正文

cmake多文件、多文件夹编译(1)_cmake多目录编译

cmake多目录编译

一、完整工程案例

工程目录结构如下:

manyFolderCompile3:工程的根目录。

bin:会生成的可执行文件的目录,需要把图片放到这个目录。在没有改动“main.cpp”文件中的读取图片的名字情况下,图片的名字只能是“123.jpeg”。

build:会进入这个目录执行编译命令。

include:存放头文件的目录。

lib:生成链接库的目录。

src:存放源文件的目录

相应文件的内容如下:

CMakeLists.txt

  1. cmake_minimum_required (VERSION 2.8)
  2. set(CMAKE_POSITION_INDEPENDENT_CODE ON)
  3. set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
  4. set(CUDA_TOOLKIT_ROOT_DIR /usr/local/cuda)
  5. project (testCmake)
  6. add_definitions(-std=c++11)
  7. find_package(OpenCV REQUIRED)
  8. set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib)
  9. set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/bin)
  10. aux_source_directory(. ALL_SRCS) # 添加当前目录下所有的源文件
  11. add_subdirectory(src) # 添加math子目录
  12. add_executable(testCmake ${ALL_SRCS}) # 指定生成目标,注意这里要用${ALL_SRCS}!!!
  13. target_link_libraries(testCmake power) # 添加链接库,power是在math子目录的CMakeLists中定义的
  14. target_link_libraries(testCmake ${OpenCV_LIBRARIES})

power.h

  1. #ifndef POWER_H
  2. #define POWER_H
  3. double power(double base, int exponent);
  4. #endif

main.cpp

  1. #include <iostream>
  2. #include <stdio.h>
  3. #include <stdlib.h>
  4. #include<opencv2/opencv.hpp>
  5. #include "include/power.h" //特别注意自己定义的函数头文件需要用引号括起来
  6. using namespace std;
  7. using namespace cv;
  8. int main(int argc, char *argv[])
  9. {
  10. if (argc < 3){
  11. printf("Usage: %s base exponent \n", argv[0]);
  12. return 1;
  13. }
  14. double base = atof(argv[1]);
  15. int exponent = atoi(argv[2]);
  16. double result = power(base, exponent);
  17. cout << base << ' ' << '^' << ' ' << exponent << ' ' << "is" << ' ' << result << endl;
  18. cout << "yes" << endl;
  19. Mat srcImage=imread("123.jpeg");
  20. imshow("Origin",srcImage);
  21. waitKey(0);
  22. return 0;
  23. }

src/CMakeLists.txt

  1. include_directories(
  2. ${PROJECT_SOURCE_DIR}/include
  3. )
  4. aux_source_directory(. LIB_SRCS) # 添加当前目录下所有的源文件
  5. add_library (power ${LIB_SRCS}) # 当前目录下的文件生成一个链接库

power.cpp

  1. #include "power.h" //特别注意自己定义的函数头文件需要用引号括起来
  2. double power(double base, int exponent)
  3. {
  4. int result = base;
  5. int i;
  6. if (exponent == 0) {
  7. return 1;
  8. }
  9. for(i = 1; i < exponent; ++i){
  10. result = result * base;
  11. }
  12. return result;
  13. }

在创建完该工程后,进入build目录,执行如下命令:

  1. cmake ..
  2. make
  3. cd ..
  4. cd bin
  5. ./testCmake 2 2

其结果如下:

二、学习paddleX的CMakeLists.txt

        原地址是这里

        在外部指令中使用 ${} 进行变量的引用。在 IF 等语句中,是直接使用变量名而不通过 ${} 取值。

工程目录如下(仅仅列出涉及文件):

--cpp:sh scripts/build.sh命令执行路径

        --scripts:

                --build.sh

        --CMakeLists.txt

        --demo:

                --CMakeLists.txt

        --build:

scripts/build.sh 文件内容(该文件与PaddleX的源文件有所不同,修改小部分)

  1. # 是否使用GPU(即是否使用 CUDA)
  2. WITH_GPU=ON
  3. # 使用MKL or openblas
  4. WITH_MKL=ON
  5. # 是否集成 TensorRT(仅WITH_GPU=ON 有效)
  6. WITH_PADDLE_TENSORRT=OFF
  7. # TensorRT 的路径,如果需要集成TensorRT,需修改为您实际安装的TensorRT路径
  8. TENSORRT_DIR=$(pwd)/TensorRT/
  9. # Paddle 预测库路径:本人已编译好Paddle的release/2.4,路径如下
  10. PADDLE_DIR=/paddle/paddlepaddle/Paddle/build/paddle_inference_install_dir
  11. # Paddle 的预测库是否使用静态库来编译
  12. # 使用TensorRT时,Paddle的预测库通常为动态库
  13. WITH_STATIC_LIB=OFF
  14. # CUDA 的 lib 路径
  15. CUDA_LIB=/usr/local/cuda/lib64
  16. # CUDNN 的 lib 路径
  17. CUDNN_LIB=/usr/lib
  18. # 是否加密
  19. WITH_ENCRYPTION=OFF
  20. # OPENSSL 路径
  21. OPENSSL_DIR=$(pwd)/deps/openssl-1.1.0k
  22. {
  23. bash $(pwd)/scripts/bootstrap.sh # 下载预编译版本的加密工具和opencv依赖库
  24. } || {
  25. echo "Fail to execute script/bootstrap.sh"
  26. exit -1
  27. }
  28. # 以下无需改动
  29. rm -rf build
  30. mkdir -p build
  31. cd build
  32. cmake .. \
  33. -DWITH_GPU=${WITH_GPU} \
  34. -DWITH_MKL=${WITH_MKL} \
  35. -DWITH_PADDLE_TENSORRT=${WITH_PADDLE_TENSORRT} \
  36. -DTENSORRT_DIR=${TENSORRT_DIR} \
  37. -DPADDLE_DIR=${PADDLE_DIR} \
  38. -DWITH_STATIC_LIB=${WITH_STATIC_LIB} \
  39. -DCUDA_LIB=${CUDA_LIB} \
  40. -DCUDNN_LIB=${CUDNN_LIB} \
  41. -DWITH_ENCRYPTION=${WITH_ENCRYPTION} \
  42. -DOPENSSL_DIR=${OPENSSL_DIR}
  43. make -j16

./CMakeLists.txt

  1. cmake_minimum_required(VERSION 3.0) # 设置cmake最小版本
  2. project(PaddleDeploy CXX C) # 指定项目的名称
  3. # option(<variable> "<help_text>" [value])
  4. # variable:定义选项名称 help_text:说明选项的含义 value:定义选项默认状态,一般是OFF或者ON,除去ON之外,其他所有值都为认为是OFF
  5. if (WIN32) # WIN32预定义宏
  6. option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON)
  7. else()
  8. option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." OFF)
  9. endif()
  10. # Paddle
  11. option(WITH_MKL "Compile demo with MKL/OpenBlas support,defaultuseMKL." ON)
  12. option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." OFF)
  13. option(WITH_PADDLE_TENSORRT "Compile demo with TensorRT." OFF)
  14. #other engine
  15. option(WITH_OPENVINO "Compile demo with TensorRT." OFF)
  16. option(WITH_ONNX_TENSORRT "Compile demo with TensorRT." OFF)
  17. # set有三种用法普通变量、缓存条目、环境变量,这里是设置缓存条目,
  18. # 除非用户进行设置或使用了选项FORCE,默认情况下缓存条目的值不会被覆盖
  19. # 缓存条目的实质为可以跨层级进行传递的变量,类似于全局变量
  20. # set(<variable> <value>... CACHE <type> <docstring> [FORCE])
  21. SET(DEPS "" CACHE PATH "Location of libraries")
  22. # Paddle
  23. SET(TENSORRT_DIR "" CACHE PATH "Location of libraries")
  24. SET(PADDLE_DIR "" CACHE PATH "Location of libraries")
  25. SET(CUDA_LIB "" CACHE PATH "Location of libraries")
  26. #OpenVINO
  27. SET(GFLAGS_DIR "" CACHE PATH "Location of libraries")
  28. SET(OPENVINO_DIR "" CACHE PATH "Location of libraries")
  29. SET(NGRAPH_LIB "" CACHE PATH "Location of libraries")
  30. SET(PROJECT_ROOT_DIR "." CACHE PATH "root directory of project.")
  31. if (NOT WIN32)
  32. set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
  33. set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
  34. set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/demo)
  35. else()
  36. set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/paddle_deploy)
  37. set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/paddle_deploy)
  38. set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/paddle_deploy)
  39. add_definitions(-DPADDLEX_DEPLOY) # 添加编译器选项定义
  40. endif()
  41. #project
  42. include_directories("${PROJECT_SOURCE_DIR}") # 添加头文件
  43. # aux_source_directory(< dir > < variable >)
  44. # 所有在指定路径下的源文件的文件名,并将输出结果列表储存在指定的变量中 不包括.h
  45. # common
  46. aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/common/src SRC)
  47. aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/utils/src SRC)
  48. # det seg clas pdx src
  49. aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppdet/src DETECTOR_SRC)
  50. aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppseg/src DETECTOR_SRC)
  51. aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/ppclas/src DETECTOR_SRC)
  52. aux_source_directory(${PROJECT_SOURCE_DIR}/model_deploy/paddlex/src DETECTOR_SRC)
  53. #yaml-cpp
  54. if(WIN32)
  55. SET(YAML_BUILD_SHARED_LIBS OFF CACHE BOOL "yaml build shared library.")
  56. else()
  57. SET(YAML_BUILD_SHARED_LIBS ON CACHE BOOL "yaml build shared library.")
  58. endif(WIN32)
  59. include(${PROJECT_SOURCE_DIR}/cmake/yaml-cpp.cmake)
  60. include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/ext-yaml-cpp/include")
  61. link_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/lib")
  62. #OPENCV
  63. find_package(OpenCV REQUIRED)
  64. message(STATUS "OpenCV library status:")
  65. message(STATUS " version: ${OpenCV_VERSION}")
  66. message(STATUS " libraries: ${OpenCV_LIBS}")
  67. message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}")
  68. set(DEPS ${DEPS} ${OpenCV_LIBS})
  69. include_directories(${OpenCV_INCLUDE_DIRS})
  70. # MD
  71. macro(safe_set_static_flag)
  72. foreach(flag_var
  73. CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
  74. CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
  75. if(${flag_var} MATCHES "/MD")
  76. string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
  77. endif(${flag_var} MATCHES "/MD")
  78. endforeach(flag_var)
  79. endmacro()
  80. if (WIN32)
  81. add_definitions("/DGOOGLE_GLOG_DLL_DECL=")
  82. find_package(OpenMP REQUIRED)
  83. if (OPENMP_FOUND)
  84. message("OPENMP FOUND")
  85. set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} ${OpenMP_C_FLAGS}")
  86. set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} ${OpenMP_C_FLAGS}")
  87. set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} ${OpenMP_CXX_FLAGS}")
  88. set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} ${OpenMP_CXX_FLAGS}")
  89. endif()
  90. set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} /bigobj /MTd")
  91. set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT")
  92. set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd")
  93. set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT")
  94. if (WITH_STATIC_LIB)
  95. safe_set_static_flag()
  96. add_definitions(-DSTATIC_LIB)
  97. endif()
  98. else()
  99. set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -o3 -fopenmp -std=c++11")
  100. set(CMAKE_STATIC_LIBRARY_PREFIX "")
  101. set(EXTERNAL_LIB "-ldl -lrt -lgomp -lz -lm -lpthread")
  102. set(DEPS ${DEPS} ${EXTERNAL_LIB})
  103. endif()
  104. # encryption
  105. set(ENCRYPTION_SRC "")
  106. if (WITH_ENCRYPTION)
  107. add_definitions(-DPADDLEX_DEPLOY_ENCRYPTION)
  108. set(CMAKE_C_FLAGS "-fPIC ${CMAKE_C_FLAGS}")
  109. set(CMAKE_CXX_FLAGS "-fPIC ${CMAKE_CXX_FLAGS}")
  110. include_directories("${OPENSSL_DIR}/install-${CMAKE_SYSTEM_PROCESSOR}/include")
  111. link_directories("${OPENSSL_DIR}/install-${CMAKE_SYSTEM_PROCESSOR}/lib")
  112. if (WIN32)
  113. set(DEPS ${DEPS} libssl_static${CMAKE_STATIC_LIBRARY_SUFFIX} libcrypto_static${CMAKE_STATIC_LIBRARY_SUFFIX})
  114. else ()
  115. set(DEPS ${DEPS} libssl${CMAKE_STATIC_LIBRARY_SUFFIX} libcrypto${CMAKE_STATIC_LIBRARY_SUFFIX})
  116. endif()
  117. aux_source_directory(${PROJECT_ROOT_DIR}/encryption/src ENCRYPTION_SRC)
  118. aux_source_directory(${PROJECT_ROOT_DIR}/encryption/util/src ENCRYPTION_SRC)
  119. aux_source_directory(${PROJECT_ROOT_DIR}/encryption/util/src/crypto ENCRYPTION_SRC)
  120. endif()
  121. # select engine
  122. if(WITH_OPENVINO)
  123. add_subdirectory(demo/onnx_openvino)
  124. else ()
  125. add_subdirectory(demo)
  126. endif()

./demo/CMakeLists.txt

  1. #paddle inference
  2. if (NOT DEFINED PADDLE_DIR OR ${PADDLE_DIR} STREQUAL "")
  3. message(FATAL_ERROR "please set PADDLE_DIR with -DPADDLE_DIR=/path/paddle_influence_dir")
  4. endif()
  5. #paddle inference third party
  6. include_directories("${PADDLE_DIR}")
  7. include_directories("${PADDLE_DIR}/third_party/install/protobuf/include")
  8. include_directories("${PADDLE_DIR}/third_party/install/glog/include")
  9. include_directories("${PADDLE_DIR}/third_party/install/gflags/include")
  10. include_directories("${PADDLE_DIR}/third_party/install/xxhash/include")
  11. include_directories("${PADDLE_DIR}/third_party/install/cryptopp/include")
  12. link_directories("${PADDLE_DIR}/paddle/lib/")
  13. link_directories("${PADDLE_DIR}/third_party/install/protobuf/lib")
  14. link_directories("${PADDLE_DIR}/third_party/install/glog/lib")
  15. link_directories("${PADDLE_DIR}/third_party/install/gflags/lib")
  16. link_directories("${PADDLE_DIR}/third_party/install/xxhash/lib")
  17. link_directories("${PADDLE_DIR}/third_party/install/cryptopp/lib")
  18. if (WIN32)
  19. set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/paddle_inference.lib)
  20. set(DEPS ${DEPS} glog gflags_static libprotobuf xxhash cryptopp-static libyaml-cppmt shlwapi)
  21. else()
  22. if (WITH_STATIC_LIB)
  23. set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX})
  24. else()
  25. set(DEPS ${DEPS} ${PADDLE_DIR}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX})
  26. endif()
  27. set(DEPS ${DEPS} glog gflags protobuf xxhash cryptopp yaml-cpp)
  28. endif(WIN32)
  29. #MKL
  30. if(WITH_MKL)
  31. ADD_DEFINITIONS(-DUSE_MKL)
  32. set(MKLML_PATH "${PADDLE_DIR}/third_party/install/mklml")
  33. include_directories("${MKLML_PATH}/include")
  34. if (WIN32)
  35. set(MATH_LIB ${MKLML_PATH}/lib/mklml.lib ${MKLML_PATH}/lib/libiomp5md.lib)
  36. else ()
  37. set(MATH_LIB ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} ${MKLML_PATH}/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX})
  38. execute_process(COMMAND cp -r ${MKLML_PATH}/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} /usr/lib)
  39. endif ()
  40. set(MKLDNN_PATH "${PADDLE_DIR}/third_party/install/mkldnn")
  41. if(EXISTS ${MKLDNN_PATH})
  42. include_directories("${MKLDNN_PATH}/include")
  43. if (WIN32)
  44. set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib)
  45. else ()
  46. set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
  47. endif ()
  48. endif()
  49. else()
  50. set(MATH_LIB ${PADDLE_DIR}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX})
  51. endif()
  52. set(DEPS ${DEPS} ${MATH_LIB} ${MKLDNN_LIB})
  53. #set GPU
  54. if (WITH_PADDLE_TENSORRT AND WITH_GPU)
  55. include_directories("${TENSORRT_DIR}/include")
  56. link_directories("${TENSORRT_DIR}/lib")
  57. file(READ ${TENSORRT_DIR}/include/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
  58. string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
  59. "${TENSORRT_VERSION_FILE_CONTENTS}")
  60. if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
  61. file(READ ${TENSORRT_DIR}/include/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
  62. string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
  63. "${TENSORRT_VERSION_FILE_CONTENTS}")
  64. endif()
  65. if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
  66. message(SEND_ERROR "Failed to detect TensorRT version.")
  67. endif()
  68. string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
  69. TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
  70. message(STATUS "Current TensorRT header is ${TENSORRT_INCLUDE_DIR}/NvInfer.h. "
  71. "Current TensorRT version is v${TENSORRT_MAJOR_VERSION}. ")
  72. endif()
  73. if(WITH_GPU)
  74. if (NOT DEFINED CUDA_LIB OR ${CUDA_LIB} STREQUAL "")
  75. message(FATAL_ERROR "please set CUDA_LIB with -DCUDA_LIB=/path/cuda/lib64")
  76. endif()
  77. if(NOT WIN32)
  78. if (NOT DEFINED CUDNN_LIB)
  79. message(FATAL_ERROR "please set CUDNN_LIB with -DCUDNN_LIB=/path/cudnn/")
  80. endif()
  81. set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX})
  82. set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX})
  83. if (WITH_PADDLE_TENSORRT)
  84. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX})
  85. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX})
  86. endif()
  87. else()
  88. set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} )
  89. set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} )
  90. set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX})
  91. if (WITH_PADDLE_TENSORRT)
  92. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer${CMAKE_STATIC_LIBRARY_SUFFIX})
  93. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/nvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX})
  94. if(${TENSORRT_MAJOR_VERSION} EQUAL 7)
  95. set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/myelin64_1${CMAKE_STATIC_LIBRARY_SUFFIX})
  96. endif()
  97. endif()
  98. endif()
  99. endif()
  100. message("-----DEPS = ${DEPS}")
  101. # engine src
  102. set(ENGINE_SRC ${PROJECT_SOURCE_DIR}/model_deploy/engine/src/ppinference_engine.cpp)
  103. add_executable(model_infer model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  104. ADD_DEPENDENCIES(model_infer ext-yaml-cpp)
  105. target_link_libraries(model_infer ${DEPS})
  106. add_executable(batch_infer batch_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  107. ADD_DEPENDENCIES(batch_infer ext-yaml-cpp)
  108. target_link_libraries(batch_infer ${DEPS})
  109. add_executable(multi_gpu_model_infer multi_gpu_model_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  110. ADD_DEPENDENCIES(multi_gpu_model_infer ext-yaml-cpp)
  111. target_link_libraries(multi_gpu_model_infer ${DEPS})
  112. if (WITH_PADDLE_TENSORRT)
  113. add_executable(tensorrt_infer tensorrt_infer.cpp ${SRC} ${ENGINE_SRC} ${DETECTOR_SRC} ${ENCRYPTION_SRC})
  114. ADD_DEPENDENCIES(tensorrt_infer ext-yaml-cpp)
  115. target_link_libraries(tensorrt_infer ${DEPS})
  116. endif()
  117. if(WIN32)
  118. add_custom_command(TARGET model_infer POST_BUILD
  119. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  120. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  121. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  122. COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE_DIR}/paddle/lib/paddle_inference.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  123. )
  124. if (WITH_PADDLE_TENSORRT)
  125. add_custom_command(TARGET model_infer POST_BUILD
  126. COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  127. COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/nvinfer_plugin.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  128. )
  129. if(${TENSORRT_MAJOR_VERSION} EQUAL 7)
  130. add_custom_command(TARGET model_infer POST_BUILD
  131. COMMAND ${CMAKE_COMMAND} -E copy ${TENSORRT_DIR}/lib/myelin64_1.dll ${CMAKE_BINARY_DIR}/paddle_deploy
  132. )
  133. endif()
  134. endif()
  135. endif()

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小桥流水78/article/detail/839851
推荐阅读
相关标签
  

闽ICP备14008679号