diff --git a/CMakeLists.txt b/CMakeLists.txt index d919dc5ac05592efa65caab8ab69c47b814a7b1a..487aa200d7fc4df5850d9afd35e35fa119ca0e71 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -326,7 +326,6 @@ set(PADDLE_PYTHON_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/python/build") set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "-O3 -g -DNDEBUG") set(CMAKE_C_FLAGS_RELWITHDEBINFO "-O3 -g -DNDEBUG") -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0") if(ON_INFER) # you can trun off the paddle fluid and inference lib by set ON_INFER=OFF diff --git a/cmake/external/ascend.cmake b/cmake/external/ascend.cmake index bcf0c0a0646fc386f41c4b1f35ba773d6a1adb6f..e496ff52155436ae858390d791558ef1b7e3266c 100644 --- a/cmake/external/ascend.cmake +++ b/cmake/external/ascend.cmake @@ -12,50 +12,38 @@ # See the License for the specific language governing permissions and # limitations under the License. -INCLUDE(ExternalProject) -SET(ASCEND_PROJECT "extern_ascend") -IF((NOT DEFINED ASCEND_VER) OR (NOT DEFINED ASCEND_URL)) - MESSAGE(STATUS "use pre defined download url") - SET(ASCEND_VER "0.1.1" CACHE STRING "" FORCE) - SET(ASCEND_NAME "ascend" CACHE STRING "" FORCE) - SET(ASCEND_URL "http://paddle-ascend.bj.bcebos.com/ascend.tar.gz" CACHE STRING "" FORCE) -ENDIF() -MESSAGE(STATUS "ASCEND_NAME: ${ASCEND_NAME}, ASCEND_URL: ${ASCEND_URL}") -SET(ASCEND_SOURCE_DIR "${THIRD_PARTY_PATH}/ascend") -SET(ASCEND_DOWNLOAD_DIR "${ASCEND_SOURCE_DIR}/src/${ASCEND_PROJECT}") -SET(ASCEND_DST_DIR "ascend") -SET(ASCEND_INSTALL_ROOT "${THIRD_PARTY_PATH}/install") -SET(ASCEND_INSTALL_DIR ${ASCEND_INSTALL_ROOT}/${ASCEND_DST_DIR}) -SET(ASCEND_ROOT ${ASCEND_INSTALL_DIR}) -SET(ASCEND_INC_DIR ${ASCEND_ROOT}/include) -SET(ASCEND_LIB_DIR ${ASCEND_ROOT}/lib) -SET(ASCEND_LIB ${ASCEND_LIB_DIR}/libge_runner.so) -SET(ASCEND_GRAPH_LIB ${ASCEND_LIB_DIR}/libgraph.so) -SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${ASCEND_ROOT}/lib") +#NOTE: Logic is from +# https://github.com/mindspore-ai/graphengine/blob/master/CMakeLists.txt +if(DEFINED ENV{ASCEND_CUSTOM_PATH}) + set(ASCEND_DIR $ENV{ASCEND_CUSTOM_PATH}) +else() + set(ASCEND_DIR /usr/local/Ascend) +endif() -INCLUDE_DIRECTORIES(${ASCEND_INC_DIR}) -FILE(WRITE ${ASCEND_DOWNLOAD_DIR}/CMakeLists.txt - "PROJECT(ASCEND)\n" - "cmake_minimum_required(VERSION 3.0)\n" - "install(DIRECTORY ${ASCEND_NAME}/include ${ASCEND_NAME}/lib \n" - " DESTINATION ${ASCEND_DST_DIR})\n") -ExternalProject_Add( - ${ASCEND_PROJECT} - ${EXTERNAL_PROJECT_LOG_ARGS} - PREFIX ${ASCEND_SOURCE_DIR} - DOWNLOAD_DIR ${ASCEND_DOWNLOAD_DIR} - DOWNLOAD_COMMAND wget --no-check-certificate ${ASCEND_URL} -c -q -O ${ASCEND_NAME}.tar.gz - && tar zxvf ${ASCEND_NAME}.tar.gz - DOWNLOAD_NO_PROGRESS 1 - UPDATE_COMMAND "" - CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${ASCEND_INSTALL_ROOT} - CMAKE_CACHE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${ASCEND_INSTALL_ROOT} -) -ADD_LIBRARY(ascend SHARED IMPORTED GLOBAL) -SET_PROPERTY(TARGET ascend PROPERTY IMPORTED_LOCATION ${ASCEND_LIB}) +set(ASCEND_DRIVER_DIR ${ASCEND_DIR}/driver/lib64) +set(ASCEND_DRIVER_COMMON_DIR ${ASCEND_DIR}/driver/lib64/common) +set(ASCEND_DRIVER_SHARE_DIR ${ASCEND_DIR}/driver/lib64/share) +set(ASCEND_RUNTIME_DIR ${ASCEND_DIR}/fwkacllib/lib64) +set(ASCEND_ATC_DIR ${ASCEND_DIR}/atc/lib64) +set(ASCEND_ACL_DIR ${ASCEND_DIR}/acllib/lib64) +set(STATIC_ACL_LIB ${ASCEND_ACL_DIR}) -ADD_LIBRARY(ascend_graph SHARED IMPORTED GLOBAL) -SET_PROPERTY(TARGET ascend_graph PROPERTY IMPORTED_LOCATION ${ASCEND_GRAPH_LIB}) -ADD_DEPENDENCIES(ascend ascend_graph ${ASCEND_PROJECT}) +set(ASCEND_MS_RUNTIME_PATH ${ASCEND_RUNTIME_DIR} ${ASCEND_ACL_DIR} ${ASCEND_ATC_DIR}) +set(ASCEND_MS_DRIVER_PATH ${ASCEND_DRIVER_DIR} ${ASCEND_DRIVER_COMMON_DIR}) +set(ATLAS_RUNTIME_DIR ${ASCEND_DIR}/ascend-toolkit/latest/fwkacllib/lib64) +set(ATLAS_RUNTIME_INC_DIR ${ASCEND_DIR}/ascend-toolkit/latest/fwkacllib/include) +set(ATLAS_ACL_DIR ${ASCEND_DIR}/ascend-toolkit/latest/acllib/lib64) +set(ATLAS_ATC_DIR ${ASCEND_DIR}/ascend-toolkit/latest/atc/lib64) +set(ATLAS_MS_RUNTIME_PATH ${ATLAS_RUNTIME_DIR} ${ATLAS_ACL_DIR} ${ATLAS_ATC_DIR}) + +set(atlas_graph ${ATLAS_RUNTIME_DIR}/libgraph.so) +set(atlas_ge_runner ${ATLAS_RUNTIME_DIR}/libge_runner.so) +INCLUDE_DIRECTORIES(${ATLAS_RUNTIME_INC_DIR}) +ADD_LIBRARY(ascend_ge SHARED IMPORTED GLOBAL) +SET_PROPERTY(TARGET ascend_ge PROPERTY IMPORTED_LOCATION ${atlas_ge_runner}) + +ADD_LIBRARY(ascend_graph SHARED IMPORTED GLOBAL) +SET_PROPERTY(TARGET ascend_graph PROPERTY IMPORTED_LOCATION ${atlas_graph}) +add_custom_target(extern_ascend DEPENDS ascend_ge ascend_graph) diff --git a/cmake/external/cryptopp.cmake b/cmake/external/cryptopp.cmake index a9e1a4d67b1e6a0b7086fe715bd24fc2b9d6ff15..3176e2a665c634d97ee413485003071394c7dedf 100644 --- a/cmake/external/cryptopp.cmake +++ b/cmake/external/cryptopp.cmake @@ -17,7 +17,7 @@ INCLUDE(ExternalProject) SET(CRYPTOPP_PREFIX_DIR ${THIRD_PARTY_PATH}/cryptopp) SET(CRYPTOPP_INSTALL_DIR ${THIRD_PARTY_PATH}/install/cryptopp) SET(CRYPTOPP_INCLUDE_DIR "${CRYPTOPP_INSTALL_DIR}/include" CACHE PATH "cryptopp include directory." FORCE) -SET(CRYPTOPP_REPOSITORY https://gitee.com/tianjianhe/cryptopp.git) +SET(CRYPTOPP_REPOSITORY ${GIT_URL}/weidai11/cryptopp.git) SET(CRYPTOPP_TAG CRYPTOPP_8_2_0) IF(WIN32) @@ -33,7 +33,7 @@ set(CRYPTOPP_CMAKE_ARGS ${COMMON_CMAKE_ARGS} -DCMAKE_INSTALL_LIBDIR=${CRYPTOPP_INSTALL_DIR}/lib -DCMAKE_INSTALL_PREFIX=${CRYPTOPP_INSTALL_DIR} -DCMAKE_BUILD_TYPE=${THIRD_PARTY_BUILD_TYPE} - "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0" + -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} diff --git a/cmake/external/dlpack.cmake b/cmake/external/dlpack.cmake index fa6f8e8d4cdf08ad34b4bdf5c11d37e0e134eed3..87db181d953afb5bfb17d3167f1e5efac3353b79 100644 --- a/cmake/external/dlpack.cmake +++ b/cmake/external/dlpack.cmake @@ -17,7 +17,7 @@ include(ExternalProject) set(DLPACK_PREFIX_DIR ${THIRD_PARTY_PATH}/dlpack) set(DLPACK_SOURCE_DIR ${THIRD_PARTY_PATH}/dlpack/src/extern_dlpack) -set(DLPACK_REPOSITORY https://gitee.com/tianjianhe/dlpack.git) +set(DLPACK_REPOSITORY ${GIT_URL}/dmlc/dlpack.git) set(DLPACK_TAG v0.2) cache_third_party(extern_dlpack diff --git a/cmake/external/gflags.cmake b/cmake/external/gflags.cmake index 8ee0c4cdcda25790b2c63ceb5b7a01fb0fd3c22a..34f5d7e2befa911585892b44898be4f715cc8656 100644 --- a/cmake/external/gflags.cmake +++ b/cmake/external/gflags.cmake @@ -18,8 +18,8 @@ SET(GFLAGS_PREFIX_DIR ${THIRD_PARTY_PATH}/gflags) SET(GFLAGS_SOURCE_DIR ${THIRD_PARTY_PATH}/gflags/src/extern_gflags) SET(GFLAGS_INSTALL_DIR ${THIRD_PARTY_PATH}/install/gflags) SET(GFLAGS_INCLUDE_DIR "${GFLAGS_INSTALL_DIR}/include" CACHE PATH "gflags include directory." FORCE) -set(GFLAGS_REPOSITORY https://gitee.com/tianjianhe/gflags.git) -set(GFLAGS_TAG 77592648e3f3be87d6c7123eb81cbad75f9aef5a) +set(GFLAGS_REPOSITORY ${GIT_URL}/gflags/gflags.git) +set(GFLAGS_TAG "v2.2.2") IF(WIN32) set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/gflags_static.lib" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE) ELSE(WIN32) @@ -48,7 +48,7 @@ ExternalProject_Add( INSTALL_COMMAND ${INSTALL_COMMAND} CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} - "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0" + -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_C_FLAGS=${CMAKE_C_FLAGS} diff --git a/cmake/external/glog.cmake b/cmake/external/glog.cmake index 64410e99bddd434bfcdedb155758a0092f37f1de..05b98e2b56a33a65315d1e4fb1c02c738f93b712 100644 --- a/cmake/external/glog.cmake +++ b/cmake/external/glog.cmake @@ -18,8 +18,8 @@ SET(GLOG_PREFIX_DIR ${THIRD_PARTY_PATH}/glog) SET(GLOG_SOURCE_DIR ${THIRD_PARTY_PATH}/glog/src/extern_glog) SET(GLOG_INSTALL_DIR ${THIRD_PARTY_PATH}/install/glog) SET(GLOG_INCLUDE_DIR "${GLOG_INSTALL_DIR}/include" CACHE PATH "glog include directory." FORCE) -SET(GLOG_REPOSITORY https://gitee.com/tianjianhe/glog.git) -SET(GLOG_TAG v0.3.5) +SET(GLOG_REPOSITORY ${GIT_URL}/google/glog.git) +SET(GLOG_TAG v0.4.0) IF(WIN32) SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/glog.lib" CACHE FILEPATH "glog library." FORCE) @@ -47,7 +47,7 @@ ExternalProject_Add( SOURCE_DIR ${GLOG_SOURCE_DIR} CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} - "-DCMAKE_CXX_FLAGS=${GLOG_CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0" + -DCMAKE_CXX_FLAGS=${GLOG_CMAKE_CXX_FLAGS} -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_C_FLAGS=${CMAKE_C_FLAGS} diff --git a/cmake/external/grpc.cmake b/cmake/external/grpc.cmake index bd2f4d11ed2cb2cb5b40ea237c0cdcbdcdd2bab7..536e95c1dc2a4fe6545bd5d3147631aa26cdda98 100644 --- a/cmake/external/grpc.cmake +++ b/cmake/external/grpc.cmake @@ -28,7 +28,7 @@ IF(APPLE) SET(GRPC_INSTALL_CMD make prefix=${GRPC_INSTALL_DIR} install) ELSE() SET(GRPC_CFLAGS "-Wno-error -std=c11 ${CLFAGS}") - SET(GRPC_CXXFLAGS "-Wno-error -std=c++11 ${CXXFLAGS} -D_GLIBCXX_USE_CXX11_ABI=0") + SET(GRPC_CXXFLAGS "-Wno-error -std=c++11 ${CXXFLAGS}") SET(BUILD_CMD make CFLAGS=${GRPC_CFLAGS} CXXFLAGS=${GRPC_CXXFLAGS} HAS_SYSTEM_PROTOBUF=false -s -j ${NUM_OF_PROCESSOR} static grpc_cpp_plugin) SET(GRPC_INSTALL_CMD make prefix=${GRPC_INSTALL_DIR} install CFLAGS=${GRPC_CFLAGS} CXXFLAGS=${GRPC_CXXFLAGS}) ENDIF() diff --git a/cmake/external/openblas.cmake b/cmake/external/openblas.cmake index f459bbfd474bfd0792b2a904704454605f064442..19ba6d15c59ea802cc94ea6138871c15cb49077b 100644 --- a/cmake/external/openblas.cmake +++ b/cmake/external/openblas.cmake @@ -17,7 +17,7 @@ INCLUDE(ExternalProject) SET(CBLAS_PREFIX_DIR ${THIRD_PARTY_PATH}/openblas) SET(CBLAS_SOURCE_DIR ${THIRD_PARTY_PATH}/openblas/src/extern_openblas) SET(CBLAS_INSTALL_DIR ${THIRD_PARTY_PATH}/install/openblas) -SET(CBLAS_REPOSITORY https://gitee.com/tianjianhe/OpenBLAS.git) +SET(CBLAS_REPOSITORY ${GIT_URL}/xianyi/OpenBLAS.git) SET(CBLAS_TAG v0.3.7) if(WITH_MIPS) SET(CBLAS_TAG v0.3.13) diff --git a/cmake/external/protobuf.cmake b/cmake/external/protobuf.cmake index dd0de0d086eea7414525d5214c35556e9337b4f0..905c17b9304ae7d8e2d7bb4b059ff7208511ec16 100644 --- a/cmake/external/protobuf.cmake +++ b/cmake/external/protobuf.cmake @@ -183,7 +183,7 @@ FUNCTION(build_protobuf TARGET_NAME BUILD_FOR_HOST) "-DCMAKE_C_FLAGS=${CMAKE_C_FLAGS}" "-DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG}" "-DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE}" - "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0" + "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}" "-DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE}" "-DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG}" "-Dprotobuf_WITH_ZLIB=ON" @@ -198,8 +198,8 @@ FUNCTION(build_protobuf TARGET_NAME BUILD_FOR_HOST) "-Dprotobuf_MSVC_STATIC_RUNTIME=${MSVC_STATIC_CRT}") ENDIF() - SET(PROTOBUF_REPOSITORY https://gitee.com/tianjianhe/protobuf.git) - SET(PROTOBUF_TAG v3.8.0) + SET(PROTOBUF_REPOSITORY ${GIT_URL}/protocolbuffers/protobuf.git) + SET(PROTOBUF_TAG 9f75c5aa851cd877fb0d93ccc31b8567a6706546) cache_third_party(${TARGET_NAME} REPOSITORY ${PROTOBUF_REPOSITORY} @@ -234,7 +234,7 @@ FUNCTION(build_protobuf TARGET_NAME BUILD_FOR_HOST) ) ENDFUNCTION() -# SET(PROTOBUF_VERSION 3.1.0) +SET(PROTOBUF_VERSION 3.1.0) IF(NOT PROTOBUF_FOUND) build_protobuf(extern_protobuf FALSE) diff --git a/cmake/external/pybind11.cmake b/cmake/external/pybind11.cmake index c6be74811d0540712f90f8069e323c1024af1278..69bd68c27784974b68208dbdca8c546b410c7baa 100644 --- a/cmake/external/pybind11.cmake +++ b/cmake/external/pybind11.cmake @@ -16,8 +16,8 @@ include(ExternalProject) set(PYBIND_PREFIX_DIR ${THIRD_PARTY_PATH}/pybind) set(PYBIND_SOURCE_DIR ${THIRD_PARTY_PATH}/pybind/src/extern_pybind) -SET(PYBIND_REPOSITORY https://gitee.com/tianjianhe/pybind11.git) -SET(PYBIND_TAG v2.6.0) +SET(PYBIND_REPOSITORY ${GIT_URL}/pybind/pybind11.git) +SET(PYBIND_TAG v2.4.3) cache_third_party(extern_pybind REPOSITORY ${PYBIND_REPOSITORY} diff --git a/cmake/external/warpctc.cmake b/cmake/external/warpctc.cmake index e5d79cf558c1f3fae96d44330c05766f3202e3eb..520e40219061b2c8da469a6ce419d6ee36a8da95 100644 --- a/cmake/external/warpctc.cmake +++ b/cmake/external/warpctc.cmake @@ -19,7 +19,7 @@ SET(WARPCTC_SOURCE_DIR ${THIRD_PARTY_PATH}/warpctc/src/extern_warpctc) SET(WARPCTC_INSTALL_DIR ${THIRD_PARTY_PATH}/install/warpctc) set(WARPCTC_REPOSITORY https://gitee.com/tianjianhe/warp-ctc.git) set(WARPCTC_TAG 95a461eddeabd51099ef059dcfada1117eb1bfb8) -# set(WARPCTC_TAG bc29dcfff07ced1c7a19a4ecee48e5ad583cef8e) +set(WARPCTC_REPOSITORY ${GIT_URL}/baidu-research/warp-ctc.git) SET(WARPCTC_INCLUDE_DIR "${WARPCTC_INSTALL_DIR}/include" CACHE PATH "Warp-ctc Directory" FORCE) @@ -53,7 +53,7 @@ ExternalProject_Add( -DCMAKE_C_FLAGS=${CMAKE_C_FLAGS} -DCMAKE_C_FLAGS_DEBUG=${CMAKE_C_FLAGS_DEBUG} -DCMAKE_C_FLAGS_RELEASE=${CMAKE_C_FLAGS_RELEASE} - "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0" + "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS}" -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} -DCMAKE_INSTALL_PREFIX=${WARPCTC_INSTALL_DIR} diff --git a/cmake/external/xbyak.cmake b/cmake/external/xbyak.cmake index c4c04c98bcba037c9dcaf9404a38242252e8104f..6627c4eed112f2eead20fc5831b190488406b8f3 100644 --- a/cmake/external/xbyak.cmake +++ b/cmake/external/xbyak.cmake @@ -19,7 +19,7 @@ set(XBYAK_PREFIX_DIR ${THIRD_PARTY_PATH}/xbyak) SET(XBYAK_SOURCE_DIR ${THIRD_PARTY_PATH}/xbyak/src/extern_xbyak) set(XBYAK_INSTALL_ROOT ${THIRD_PARTY_PATH}/install/xbyak) set(XBYAK_INC_DIR ${XBYAK_INSTALL_ROOT}/include) -set(XBYAK_REPOSITORY https://gitee.com/tianjianhe/xbyak.git) +set(XBYAK_REPOSITORY ${GIT_URL}/herumi/xbyak.git) set(XBYAK_TAG v5.661) # Jul 26th include_directories(${XBYAK_INC_DIR}) diff --git a/cmake/external/xxhash.cmake b/cmake/external/xxhash.cmake index 4033237b9eeebfaff60a7bccfe018ac2da1b6b2e..bdd7df190ff106178266fbd47716e7d70fd229bd 100644 --- a/cmake/external/xxhash.cmake +++ b/cmake/external/xxhash.cmake @@ -18,7 +18,7 @@ set(XXHASH_PREFIX_DIR ${THIRD_PARTY_PATH}/xxhash) set(XXHASH_SOURCE_DIR ${THIRD_PARTY_PATH}/xxhash/src/extern_xxhash) set(XXHASH_INSTALL_DIR ${THIRD_PARTY_PATH}/install/xxhash) set(XXHASH_INCLUDE_DIR "${XXHASH_INSTALL_DIR}/include") -set(XXHASH_REPOSITORY https://gitee.com/tianjianhe/xxHash.git) +set(XXHASH_REPOSITORY ${GIT_URL}/Cyan4973/xxHash.git) set(XXHASH_TAG v0.6.5) cache_third_party(extern_xxhash diff --git a/cmake/external/zlib.cmake b/cmake/external/zlib.cmake index 334fe5c355af6bd6c85558b1ce0ea97dc0551ffd..4464787a0c2a64066585e8f308c68a62286478e9 100644 --- a/cmake/external/zlib.cmake +++ b/cmake/external/zlib.cmake @@ -19,7 +19,7 @@ SET(ZLIB_SOURCE_DIR ${THIRD_PARTY_PATH}/zlib/src/extern_zlib) SET(ZLIB_INSTALL_DIR ${THIRD_PARTY_PATH}/install/zlib) SET(ZLIB_ROOT ${ZLIB_INSTALL_DIR} CACHE FILEPATH "zlib root directory." FORCE) SET(ZLIB_INCLUDE_DIR "${ZLIB_INSTALL_DIR}/include" CACHE PATH "zlib include directory." FORCE) -set(ZLIB_REPOSITORY https://gitee.com/tianjianhe/zlib.git) +set(ZLIB_REPOSITORY ${GIT_URL}/madler/zlib.git) set(ZLIB_TAG v1.2.8) INCLUDE_DIRECTORIES(${ZLIB_INCLUDE_DIR}) # For zlib code to include its own headers. @@ -41,7 +41,7 @@ ExternalProject_Add( CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_C_FLAGS=${CMAKE_C_FLAGS} - "-DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0" + -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -DCMAKE_INSTALL_PREFIX=${ZLIB_INSTALL_DIR} -DBUILD_SHARED_LIBS=OFF -DCMAKE_POSITION_INDEPENDENT_CODE=ON diff --git a/paddle/fluid/framework/fleet/CMakeLists.txt b/paddle/fluid/framework/fleet/CMakeLists.txt index 4d0cfb629763f72cc5059e37149fd1e676811d42..6052ec4957c523470ba4db2edf2a9b77382c51f0 100644 --- a/paddle/fluid/framework/fleet/CMakeLists.txt +++ b/paddle/fluid/framework/fleet/CMakeLists.txt @@ -33,5 +33,5 @@ cc_library(heter_wrapper SRCS heter_wrapper.cc DEPS framework_proto device_conte cc_test(test_fleet_cc SRCS test_fleet.cc DEPS fleet_wrapper gloo_wrapper fs shell) if(WITH_ASCEND) - cc_library(ascend_wrapper SRCS ascend_wrapper.cc DEPS framework_proto lod_tensor ascend ascend_graph) + cc_library(ascend_wrapper SRCS ascend_wrapper.cc DEPS framework_proto lod_tensor ascend_ge ascend_graph) endif(WITH_ASCEND) diff --git a/paddle/fluid/framework/fleet/ascend_wrapper.h b/paddle/fluid/framework/fleet/ascend_wrapper.h index da79fccb8ca69fac0f34f8092f296b9923e5f849..a44466ca1057b6447009a0be5e7bb269b6484012 100644 --- a/paddle/fluid/framework/fleet/ascend_wrapper.h +++ b/paddle/fluid/framework/fleet/ascend_wrapper.h @@ -37,7 +37,6 @@ limitations under the License. */ namespace paddle { namespace framework { -// typedef std::vector AscendGraphDesc; typedef ge::Graph AscendGraphDesc; class AscendInstance { @@ -45,17 +44,31 @@ class AscendInstance { virtual ~AscendInstance() {} AscendInstance() {} - std::map GetDefaultInitSessionOptions() { - std::map init_options; - init_options["a"] = "b"; - init_options["ge.trainFlag"] = "1"; - return init_options; + std::map GetDefaultInitOptions() { + std::map init_options; + init_options["ge.exec.deviceId"] = "0"; + init_options["ge.graphRunMode"] = "1"; + return init_options; + } + + std::map GetDefaultInitSessionOptions() { + std::map init_options; + init_options["a"] = "b"; + init_options["ge.trainFlag"] = "1"; + return init_options; + } + + ge::Status InitGEForUT(){ + return ge::GEInitialize(GetDefaultInitOptions()); } - // add other parameters here to init void InitGlobalResouces() { - session_.reset(new ge::Session(GetDefaultInitSessionOptions())); - VLOG(1) << "InitGlobalResouces Done"; + LOG(INFO) << "Begin InitGlobalResouces"; + session_.reset(new ge::Session(GetDefaultInitSessionOptions())); + if (session_ == nullptr){ + LOG(FATAL) << "new session error:" << session_; + } + LOG(INFO) << "End InitGlobalResouces"; } static std::shared_ptr GetInstance() { diff --git a/paddle/fluid/pybind/ascend_wrapper_py.cc b/paddle/fluid/pybind/ascend_wrapper_py.cc index 00eca380859527ccf71f03b0e677702750e049b7..26835ac1d70a5cd42b66c124e3a8f7b324b08fc1 100644 --- a/paddle/fluid/pybind/ascend_wrapper_py.cc +++ b/paddle/fluid/pybind/ascend_wrapper_py.cc @@ -33,6 +33,7 @@ limitations under the License. */ #include #include "paddle/fluid/framework/fleet/ascend_wrapper.h" #include "paddle/fluid/pybind/ascend_wrapper_py.h" +#include "paddle/fluid/platform/enforce.h" using namespace ge; // NOLINT namespace py = pybind11; @@ -51,9 +52,22 @@ void BindAscendWrapper(py::module *m) { py::call_guard()); } // end AscendWrapper -Status ge_initialize(std::map &options) { // NOLINT +std::map convert_map(const std::map& options){ + std::map rets; + for (auto &option : options) { + ge::AscendString key = option.first.c_str(); + ge::AscendString val = option.second.c_str(); + rets[key] = val; + } + return rets; +} + +ge::Status ge_initialize(std::map &options) { // NOLINT py::gil_scoped_release release; - Status res = GEInitialize(options); + auto init_options=convert_map(options); + ge::Status res = ge::GEInitialize(init_options); + PADDLE_ENFORCE_EQ(res, + ge::SUCCESS, platform::errors::Fatal("ge init error:%d", res)); py::gil_scoped_acquire acquire; return res; } @@ -214,36 +228,34 @@ void BindAscendGraph(py::module *m) { // 类封装 py::class_(*m, "GESession") - .def(py::init &>()) + .def(py::init([](const std::map & options) { + return std::unique_ptr(new ge::Session(convert_map(options))); + })) .def("add_graph", - (Status (Session::*)(uint32_t, const Graph &)) & Session::AddGraph) + (ge::Status (Session::*)(uint32_t, const Graph &)) & Session::AddGraph) .def("add_graph", - (Status (Session::*)(uint32_t, const Graph &, - const std::map &)) & - Session::AddGraph) + [](Session& ss, uint32_t index, const Graph & graph, + const std::map &options){ + return ss.AddGraph(index, graph, convert_map(options)); + }) .def("remove_graph", &Session::RemoveGraph) .def("run_graph", [](Session &ss, uint32_t graphId, const std::vector &inputs) -> py::tuple { std::vector outputs; - Status res = ss.RunGraph(graphId, inputs, outputs); + ge::Status res = ss.RunGraph(graphId, inputs, outputs); return py::make_tuple(outputs, res); }, py::call_guard()) .def("build_graph", &Session::BuildGraph) .def("run_graph_async", &Session::RunGraphAsync) - .def("register_call_back_func", - (Status (Session::*)( // NOLINT - const std::string &, - std::function ¶ms_list)>)) & - Session::RegisterCallBackFunc) + .def("register_call_back_func", + static_cast(&ge::Session::RegisterCallBackFunc)) .def("is_graph_need_rebuild", &Session::IsGraphNeedRebuild); py::class_(*m, "GEGraph") .def(py::init<>()) - .def(py::init()) + .def(py::init()) .def("set_inputs", &Graph::SetInputs) .def("set_outputs", (Graph & (Graph::*)(const std::vector &)) & Graph::SetOutputs) @@ -253,110 +265,121 @@ void BindAscendGraph(py::module *m) { Graph::SetOutputs) .def("set_outputs", (Graph & - (Graph::*)(const std::vector> + (Graph::*)(const std::vector> &)) & Graph::SetOutputs) .def("set_targets", &Graph::SetTargets) .def("is_valid", &Graph::IsValid) .def("add_op", &Graph::AddOp) .def("find_op_by_name", - [](Graph &graph, const std::string &name) -> py::tuple { + [](Graph &graph, const char* name) -> py::tuple { ge::Operator op; graphStatus status = graph.FindOpByName(name, op); return py::make_tuple(op, status); }) .def("find_op_by_type", - [](Graph &graph, const std::string &type) -> py::tuple { + [](Graph &graph, const char * type) -> py::tuple { std::vector ops; graphStatus status = graph.FindOpByType(type, ops); return py::make_tuple(ops, status); }) .def("get_all_op_name", [](Graph &graph) -> py::tuple { - std::vector op_name; + std::vector op_name; graphStatus status = graph.GetAllOpName(op_name); return py::make_tuple(op_name, status); }) - .def("save_to_file", &Graph::SaveToFile) - .def("load_from_file", &Graph::LoadFromFile) - .def("get_name", &Graph::GetName) + .def("save_to_file", static_cast(&ge::Graph::SaveToFile)) + .def("load_from_file", static_cast(&Graph::LoadFromFile)) + .def("get_name", static_cast(&Graph::GetName)) .def("set_need_iteration", &Graph::SetNeedIteration); py::class_(*m, "GEOperator") .def(py::init<>()) - .def(py::init()) - .def(py::init()) + .def(py::init()) + .def(py::init()) .def("is_empty", &Operator::IsEmpty) - .def("get_name", &Operator::GetName) - .def("get_op_type", &Operator::GetOpType) + .def("get_name", + static_cast(&Operator::GetName)) + .def("get_op_type", + static_cast(&Operator::GetOpType)) .def("set_input", - (Operator & (Operator::*)(const std::string &, const Operator &)) & + (Operator & (Operator::*)(const char*, const Operator &)) & Operator::SetInput) .def("set_input", - (Operator & (Operator::*)(const std::string &, const Operator &, - const std::string &)) & + (Operator & (Operator::*)(const char *, const Operator &, + const char *)) & Operator::SetInput) - .def("set_input", (Operator & (Operator::*)(const std::string &, + .def("set_input", (Operator & (Operator::*)(const char *, const Operator &, uint32_t)) & Operator::SetInput) .def("add_control_input", &Operator::AddControlInput) .def("get_input_const_data", - [](Operator &op, const std::string &dst_name) -> py::tuple { + [](Operator &op, const char* dst_name) -> py::tuple { Tensor data; graphStatus res = op.GetInputConstData(dst_name, data); return py::make_tuple(data, res); }) .def("get_input_desc", - (TensorDesc (Operator::*)(const std::string &) const) & - Operator::GetInputDesc) + (TensorDesc (Operator::*)(uint32_t) const) & Operator::GetInputDesc) .def("get_input_desc", - (TensorDesc (Operator::*)(uint32_t) const) & Operator::GetInputDesc) - .def("get_dynamic_output_num", &Operator::GetDynamicOutputNum) - .def("get_dynamic_input_num", &Operator::GetDynamicInputNum) + [](Operator& op, const std::string& name){ + return op.GetInputDescByName(name.c_str()); + }) + .def("get_dynamic_output_num", static_cast(&Operator::GetDynamicOutputNum)) + .def("get_dynamic_input_num", static_cast(&Operator::GetDynamicInputNum)) .def("try_get_input_desc", - [](Operator &op, const std::string &name) -> py::tuple { + [](Operator &op, const char* name) -> py::tuple { TensorDesc tensor_desc; graphStatus status = op.TryGetInputDesc(name, tensor_desc); return py::make_tuple(tensor_desc, status); }) - .def("update_input_desc", &Operator::UpdateInputDesc) + .def("update_input_desc", + static_cast(&Operator::UpdateInputDesc)) .def("get_output_desc", - (TensorDesc (Operator::*)(const std::string &) const) & - Operator::GetOutputDesc) + [](Operator& op, const std::string& name) { + return op.GetOutputDescByName(name.c_str()); + }) .def("get_output_desc", (TensorDesc (Operator::*)(uint32_t) const) & Operator::GetOutputDesc) - .def("update_output_desc", &Operator::UpdateOutputDesc) - .def("get_dynamic_input_desc", &Operator::GetDynamicInputDesc) - .def("update_dynamic_input_desc", &Operator::UpdateDynamicInputDesc) - .def("get_dynamic_output_desc", &Operator::GetDynamicOutputDesc) - .def("update_dynamic_output_desc", &Operator::UpdateDynamicOutputDesc) + .def("update_output_desc", + static_cast(&Operator::UpdateOutputDesc)) + .def("get_dynamic_input_desc", + static_cast(&Operator::GetDynamicInputDesc)) + .def("update_dynamic_input_desc", + static_cast(&Operator::UpdateDynamicInputDesc)) + .def("get_dynamic_output_desc", + static_cast(&Operator::GetDynamicOutputDesc)) + .def("update_dynamic_output_desc", + static_cast(&Operator::UpdateDynamicOutputDesc)) .def("infer_shape_and_type", &Operator::InferShapeAndType) .def("set_inference_context", &Operator::SetInferenceContext) .def("get_inference_context", &Operator::GetInferenceContext) .def("verify_all_attr", &Operator::VerifyAllAttr) .def("get_inputs_size", &Operator::GetInputsSize) .def("get_outputs_size", &Operator::GetOutputsSize) - .def("get_all_attr_names_and_types", &Operator::GetAllAttrNamesAndTypes) + .def("get_all_attr_names_and_types", + static_cast&) const>(&Operator::GetAllAttrNamesAndTypes)) .def("set_attr_int64", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, int64_t value) -> Operator & { int64_t tar = (int64_t)value; return op.SetAttr(name, tar); }) .def("set_attr_int32", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, int32_t value) -> Operator & { int32_t tar = (int32_t)value; return op.SetAttr(name, tar); }) .def("set_attr_uint32", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, uint32_t value) -> Operator & { uint32_t tar = (uint32_t)value; return op.SetAttr(name, tar); }) .def("set_attr_vec_int64", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -368,7 +391,7 @@ void BindAscendGraph(py::module *m) { return op.SetAttr(name, tar); }) .def("set_attr_vec_int32", - [](Operator &op, const std::string &name, + [](Operator &op, const char * name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -380,7 +403,7 @@ void BindAscendGraph(py::module *m) { return op.SetAttr(name, tar); }) .def("set_attr_vec_uint32", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -392,21 +415,21 @@ void BindAscendGraph(py::module *m) { return op.SetAttr(name, tar); }) .def("set_attr_list_int64", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, std::initializer_list &attrValue) -> Operator & { return op.SetAttr(name, std::move(attrValue)); }) .def("set_attr_attrvalue", - [](Operator &op, const std::string &name, AttrValue &attrValue) + [](Operator &op, const char* name, AttrValue &attrValue) -> Operator & { return op.SetAttr(name, std::move(attrValue)); }) .def( "set_attr_float", - [](Operator &op, const std::string &name, float value) -> Operator & { + [](Operator &op, const char* name, float value) -> Operator & { float tar = static_cast(value); return op.SetAttr(name, tar); }) .def("set_attr_vec_float", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -417,22 +440,22 @@ void BindAscendGraph(py::module *m) { } return op.SetAttr(name, tar); }) - .def("set_attr_string", (Operator & (Operator::*)(const std::string &, - const std::string &)) & + .def("set_attr_string", (Operator & (Operator::*)(const char*, + const char*)) & Operator::SetAttr) .def("set_attr_vec_string", - (Operator & (Operator::*)(const std::string &, - const std::vector &)) & + (Operator & (Operator::*)(const char*, + const std::vector &)) & Operator::SetAttr) .def("set_attr_bool", - [](Operator &op, const std::string &name, bool value) -> Operator & { + [](Operator &op, const char* name, bool value) -> Operator & { if (value) return op.SetAttr(name, true); else return op.SetAttr(name, false); }) .def("set_attr_vec_bool", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -445,14 +468,14 @@ void BindAscendGraph(py::module *m) { return op.SetAttr(name, tar); }) .def("set_attr_tensor", - (Operator & (Operator::*)(const std::string &, const Tensor &)) & + (Operator & (Operator::*)(const char* , const Tensor &)) & Operator::SetAttr) .def("set_attr_vec_tensor", (Operator & - (Operator::*)(const std::string &, const std::vector &)) & + (Operator::*)(const char *, const std::vector &)) & Operator::SetAttr) .def("set_attr_vec_uint8", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -465,11 +488,11 @@ void BindAscendGraph(py::module *m) { }) .def("set_attr_vec_vec_int64", (Operator & - (Operator::*)(const std::string &, + (Operator::*)(const char*, const std::vector> &)) & Operator::SetAttr) .def("set_attr_vec_dtype", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const std::vector &value) -> Operator & { int len = value.size(); std::vector tar; @@ -481,14 +504,14 @@ void BindAscendGraph(py::module *m) { return op.SetAttr(name, tar); }) .def("set_attr_dtype", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, const DataType &value) -> Operator & { ge::DataType tar = (ge::DataType)value; return op.SetAttr(name, tar); }) .def("get_attr", - [](Operator &op, const std::string &name, + [](Operator &op, const char* name, AttrType type) -> py::tuple { graphStatus res = -1; switch (type) { @@ -538,12 +561,12 @@ void BindAscendGraph(py::module *m) { return py::make_tuple(o_av, res); } break; case AT_STRING: { - std::string s_av; + ge::AscendString s_av; res = op.GetAttr(name, s_av); return py::make_tuple(s_av, res); } break; case AT_LIST_STRING: { - std::vector v_s_av; + std::vector v_s_av; res = op.GetAttr(name, v_s_av); return py::make_tuple(v_s_av, res); } break; @@ -594,11 +617,11 @@ void BindAscendGraph(py::module *m) { }) .def("break_connect", &Operator::BreakConnect) .def("get_subgraph_names_count", &Operator::GetSubgraphNamesCount) - .def("get_subgraph_names", &Operator::GetSubgraphNames) - .def("get_subgraph_builder", &Operator::GetSubgraphBuilder) - .def("get_subgraph", &Operator::GetSubgraph) - .def("get_dynamic_subgraph_builder", &Operator::GetDynamicSubgraphBuilder) - .def("get_dynamic_subgraph", &Operator::GetDynamicSubgraph); + .def("get_subgraph_names", static_cast &) const>(&Operator::GetSubgraphNames)) + .def("get_subgraph_builder", static_cast(&Operator::GetSubgraphBuilder)) + .def("get_subgraph", static_cast(&Operator::GetSubgraph)) + .def("get_dynamic_subgraph_builder", static_cast(&Operator::GetDynamicSubgraphBuilder)) + .def("get_dynamic_subgraph", static_cast(&Operator::GetDynamicSubgraph)); py::class_(*m, "GETensor") .def(py::init<>()) @@ -614,9 +637,9 @@ void BindAscendGraph(py::module *m) { .def("set_data", (graphStatus (Tensor::*)(const uint8_t *, size_t)) & Tensor::SetData) .def("set_data", - (graphStatus (Tensor::*)(const std::string &)) & Tensor::SetData) + (graphStatus (Tensor::*)(const char*)) & Tensor::SetData) .def("set_data", - (graphStatus (Tensor::*)(const std::vector &)) & + (graphStatus (Tensor::*)(const std::vector &)) & Tensor::SetData) .def("get_data", @@ -639,7 +662,7 @@ void BindAscendGraph(py::module *m) { py::arg("format") = FORMAT_ND, py::arg("dt") = DT_FLOAT) .def(py::init()) .def("update", - (void (TensorDesc::*)(Shape, Format, DataType)) & TensorDesc::Update, + (void (TensorDesc::*)(const Shape&, Format, DataType)) & TensorDesc::Update, py::arg("shape"), py::arg("format") = FORMAT_ND, py::arg("dt") = DT_FLOAT) .def("set_shape", &TensorDesc::SetShape) @@ -660,8 +683,8 @@ void BindAscendGraph(py::module *m) { .def("get_origin_format", &TensorDesc::GetOriginFormat) .def("set_data_type", &TensorDesc::SetDataType) .def("get_data_type", &TensorDesc::GetDataType) - .def("set_name", &TensorDesc::SetName) - .def("get_name", &TensorDesc::GetName) + .def("set_name", static_cast(&TensorDesc::SetName)) + .def("get_name", static_cast(&TensorDesc::GetName)) .def("set_size", &TensorDesc::SetSize) .def("get_size", &TensorDesc::GetSize) .def("set_real_dim_cnt", &TensorDesc::SetRealDimCnt) @@ -679,14 +702,16 @@ void BindAscendGraph(py::module *m) { py::class_(*m, "GEAttrValue").def(py::init<>()); py::class_(*m, "GEOperatorFactory") - .def("create_operator", &OperatorFactory::CreateOperator) + .def_static("create_operator", + static_cast(&ge::OperatorFactory::CreateOperator)) .def("get_ops_type_list", []() -> py::tuple { - std::vector all_ops; + std::vector all_ops; graphStatus status = OperatorFactory::GetOpsTypeList(all_ops); return py::make_tuple(all_ops, status); }) - .def("is_exist_op", &OperatorFactory::IsExistOp); + .def_static("is_exist_op", + static_cast(&OperatorFactory::IsExistOp)); } } // end namespace pybind diff --git a/paddle/fluid/pybind/op_function_generator.cc b/paddle/fluid/pybind/op_function_generator.cc index 349162c2e5aeb55b7dfc79a482a54118614051d9..3ac1116fde5d538e190431d3a96dd737761181f1 100644 --- a/paddle/fluid/pybind/op_function_generator.cc +++ b/paddle/fluid/pybind/op_function_generator.cc @@ -16,6 +16,7 @@ #include #include #include +#include #include "paddle/fluid/framework/op_info.h" #include "paddle/fluid/framework/op_registry.h" @@ -23,6 +24,9 @@ #include "paddle/fluid/framework/variable.h" #include "paddle/fluid/pybind/pybind.h" #include "paddle/fluid/string/string_helper.h" +#ifdef PADDLE_WITH_ASCEND +#include "paddle/fluid/framework/fleet/ascend_wrapper.h" +#endif // NOTE(zhiqiu): Commonly, the inputs in auto-generated OP function are // determined by the OP`s proto automatically, i.e., all the inputs registered @@ -444,6 +448,11 @@ int main(int argc, char* argv[]) { return -1; } +#ifdef PADDLE_WITH_ASCEND + auto ascend_ptr = paddle::framework::AscendInstance::GetInstance(); + ascend_ptr->InitGEForUT(); +#endif + std::vector headers{"\"paddle/fluid/imperative/tracer.h\""}; std::ofstream out(argv[1], std::ios::out); @@ -473,5 +482,9 @@ int main(int argc, char* argv[]) { << "} // namespace paddle\n"; out.close(); + +#ifdef PADDLE_WITH_ASCEND + ge::GEFinalize(); +#endif return 0; }