diff --git a/.gitignore b/.gitignore index f963c2660dfa2f69f47b2c8f1092d42bb5d485ee..6aae076a49012b032b8fc0f1dc02c2714fb7b4a3 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ build/ .pydevproject Makefile .test_env/ +third_party/ *~ bazel-* diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index f635e65784af47a21df80cc92073ef14eba9a731..0000000000000000000000000000000000000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "warp-ctc"] - path = warp-ctc - url = https://github.com/baidu-research/warp-ctc.git diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b9902a863d864b28f0fad0fefe64248e356010e4..a6e45028ebc3f53ea20806f0dd2a7acc820607fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ sha: c25201a00e6b0514370501050cf2a8538ac12270 hooks: - id: remove-crlf - files: (?!.*warp-ctc)^.*$ + files: (?!.*third_party)^.*$ - repo: https://github.com/reyoung/mirrors-yapf.git sha: v0.13.2 hooks: @@ -15,7 +15,7 @@ - id: check-merge-conflict - id: check-symlinks - id: detect-private-key - files: (?!.*warp-ctc)^.*$ + files: (?!.*third_party)^.*$ - id: end-of-file-fixer - repo: https://github.com/PaddlePaddle/clang-format-pre-commit-hook.git sha: 28c0ea8a67a3e2dbbf4822ef44e85b63a0080a29 diff --git a/.travis.yml b/.travis.yml index 047ca6ffe79bdaf013f6ef6dbf1a82bdb2f1f2b3..426f0eb746d6f59503cc9a4cf886b094d376aed1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,16 +28,8 @@ addons: - python - python-pip - python2.7-dev - - m4 - - python-numpy - - python-wheel - - libgoogle-glog-dev - - libgflags-dev - - libgtest-dev - curl - - lcov - graphviz - - swig - clang-format-3.8 - automake - libtool @@ -53,10 +45,10 @@ before_install: fi fi fi - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo paddle/scripts/travis/before_install.linux.sh; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then paddle/scripts/travis/before_install.osx.sh; fi - if [[ "$JOB" == "PRE_COMMIT" ]]; then sudo ln -s /usr/bin/clang-format-3.8 /usr/bin/clang-format; fi - - pip install wheel protobuf sphinx recommonmark virtualenv numpy sphinx_rtd_theme pre-commit requests==2.9.2 LinkChecker + - pip install --upgrade pip + - pip install wheel protobuf sphinx recommonmark sphinx_rtd_theme virtualenv pre-commit requests==2.9.2 LinkChecker script: - paddle/scripts/travis/main.sh notifications: diff --git a/CMakeLists.txt b/CMakeLists.txt index 65fbbb481c432f7b905f4dec7ea39c51ec853ae8..9ed757bd1bfbd23ca24445c15e7cf8e13860d26f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,166 +1,89 @@ -cmake_minimum_required(VERSION 2.8) +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License + +cmake_minimum_required(VERSION 3.0) project(paddle CXX C) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake") set(PROJ_ROOT ${CMAKE_SOURCE_DIR}) -include(package) -find_package(SWIG 2.0) -find_package(CUDA QUIET) -find_package(Protobuf REQUIRED) -# Check protobuf library version. -execute_process(COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --version - OUTPUT_VARIABLE PROTOBUF_VERSION) -string(REPLACE "libprotoc " "" PROTOBUF_VERSION ${PROTOBUF_VERSION}) +find_package(Sphinx) +find_package(CUDA QUIET) +find_package(Git REQUIRED) +find_package(Threads REQUIRED) -set(PROTOBUF_3 OFF) -if (${PROTOBUF_VERSION} VERSION_GREATER "3.0.0" OR ${PROTOBUF_VERSION} VERSION_EQUAL "3.0.0") - set(PROTOBUF_3 ON) -endif() +include(system) +include(simd) -find_package(PythonLibs 2.7 REQUIRED) -find_package(PythonInterp 2.7 REQUIRED) -find_package(ZLIB REQUIRED) -find_package(NumPy REQUIRED) -find_package(Threads REQUIRED) -find_package(AVX QUIET) -find_package(Glog REQUIRED) -find_package(Gflags REQUIRED) -find_package(GTest) -find_package(Sphinx) -find_package(Doxygen) -include(cblas) -find_program(M4_EXECUTABLE m4) -###################### Configurations ########################### +###################### Configurations ############################ option(WITH_DSO "Compile PaddlePaddle with dynamic linked libraries" ON) option(WITH_GPU "Compile PaddlePaddle with gpu" ${CUDA_FOUND}) option(WITH_DOUBLE "Compile PaddlePaddle with double precision, otherwise use single precision" OFF) option(WITH_AVX "Compile PaddlePaddle with avx intrinsics" ${AVX_FOUND}) option(WITH_PYTHON "Compile PaddlePaddle with python interpreter" ON) -option(WITH_STYLE_CHECK "Style Check for PaddlePaddle" ${PYTHONINTERP_FOUND}) +option(WITH_STYLE_CHECK "Style Check for PaddlePaddle" ON) option(WITH_RDMA "Compile PaddlePaddle with rdma support" OFF) option(WITH_TIMER "Compile PaddlePaddle use timer" OFF) option(WITH_PROFILER "Compile PaddlePaddle use gpu profiler" OFF) -option(WITH_TESTING "Compile and run unittest for PaddlePaddle" ${GTEST_FOUND}) +option(WITH_TESTING "Compile and run unittest for PaddlePaddle" ON) option(WITH_DOC "Compile PaddlePaddle with documentation" OFF) -option(WITH_SWIG_PY "Compile PaddlePaddle with py PaddlePaddle prediction api" ${SWIG_FOUND}) +option(WITH_SWIG_PY "Compile PaddlePaddle with py PaddlePaddle prediction api" ON) option(ON_TRAVIS "Running test on travis-ci or not." OFF) option(ON_COVERALLS "Generating code coverage data on coveralls or not." OFF) option(COVERALLS_UPLOAD "Uploading the generated coveralls json." ON) +include(external/zlib) # download, build, install zlib +include(external/gflags) # download, build, install gflags +include(external/glog) # download, build, install glog +include(external/gtest) # download, build, install gtest +include(external/protobuf) # download, build, install protobuf +include(external/python) # download, build, install python +include(external/openblas) # download, build, install openblas +include(external/swig) # download, build, install swig +include(external/warpctc) # download, build, install warpctc + +include(package) # set paddle packages +include(cpplint) # set paddle c++ style +include(ccache) # set ccache for compilation +include(util) # set unittest and link libs +include(rdma) # set rdma libraries +include(flags) # set paddle compile flags +include(cudnn) # set cudnn libraries +include(version) # set PADDLE_VERSION +include(coveralls) # set code coverage +include(python_module) # set python module + +include(configure) # add paddle env configuration -include(cpplint) -include(ccache) -if(WITH_RDMA) - include(rdma) -endif() -include(util) -include(flags) -include(cudnn) -include(FindPythonModule) -include(check_packages) -include(swig) -include(coveralls) - -# Set PaddlePaddle version to Git tag name or Git commit ID. -find_package(Git REQUIRED) -# version.cmake will get the current PADDLE_VERSION -include(version) -add_definitions(-DPADDLE_VERSION=${PADDLE_VERSION}) - -if(NOT WITH_GPU) - add_definitions(-DPADDLE_ONLY_CPU) - add_definitions(-DHPPL_STUB_FUNC) - - list(APPEND CMAKE_CXX_SOURCE_FILE_EXTENSIONS cu) -else() - if(${CUDA_VERSION_MAJOR} VERSION_LESS 7) - message(FATAL_ERROR "Paddle need CUDA >= 7.0 to compile") - endif() - - if(NOT CUDNN_FOUND) - message(FATAL_ERROR "Paddle need cudnn to compile") - endif() - - if(WITH_AVX) - set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS} "-Xcompiler ${AVX_FLAG}") - else(WITH_AVX) - set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS} "-Xcompiler ${SSE3_FLAG}") - endif(WITH_AVX) - - # Include cuda and cudnn - include_directories(${CUDNN_INCLUDE_DIR}) - include_directories(${CUDA_TOOLKIT_INCLUDE}) -endif(NOT WITH_GPU) - -if(WITH_DSO) - add_definitions(-DPADDLE_USE_DSO) -endif(WITH_DSO) - -if(WITH_DOUBLE) - add_definitions(-DPADDLE_TYPE_DOUBLE) - set(ACCURACY double) -else(WITH_DOUBLE) - set(ACCURACY float) -endif(WITH_DOUBLE) - -if(NOT WITH_TIMER) - add_definitions(-DPADDLE_DISABLE_TIMER) -endif(NOT WITH_TIMER) - -if(NOT WITH_PROFILER) - add_definitions(-DPADDLE_DISABLE_PROFILER) -endif(NOT WITH_PROFILER) - -if(WITH_AVX) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${AVX_FLAG}") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${AVX_FLAG}") -else(WITH_AVX) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SSE3_FLAG}") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SSE3_FLAG}") -endif(WITH_AVX) - -if(WITH_PYTHON) - include_directories(${PYTHON_INCLUDE_DIR}) - include_directories(${PYTHON_NUMPY_INCLUDE_DIR}) -else(WITH_PYTHON) - add_definitions(-DPADDLE_NO_PYTHON) -endif(WITH_PYTHON) - -if(WITH_RDMA) - include_directories("${RDMA_INC_DIR}") -else(WITH_RDMA) - add_definitions(-DPADDLE_DISABLE_RDMA) -endif(WITH_RDMA) - -# glog -include_directories(${LIBGLOG_INCLUDE_DIR}) - -#gflags -add_definitions(-DGFLAGS_NS=${GFLAGS_NAMESPACE}) -include_directories(${GFLAGS_INCLUDE_DIRS}) - -if(WITH_TESTING) - enable_testing() - include_directories(${GTEST_INCLUDE_DIRS}) -endif() - -include_directories("${CBLAS_INC_DIR}") include_directories("${PROJ_ROOT}") include_directories("${PROJ_ROOT}/paddle/cuda/include") -include_directories(${PROTOBUF_INCLUDE_DIRS}) include_directories("${CMAKE_CURRENT_BINARY_DIR}/proto") -if(EXISTS "${PROJ_ROOT}/paddle/internals/CMakeLists.txt") - set(PADDLE_WITH_INTERNAL ON) - include(paddle/internals/CMakeLists.txt) -else() - set(PADDLE_WITH_INTERNAL OFF) - set(INTERNAL_PROTO_PATH "") -endif() + +set(EXTERNAL_LIBS + # have not include gtest here. + ${GFLAGS_LIBRARIES} + ${GLOG_LIBRARIES} + ${CBLAS_LIBRARIES} + ${PROTOBUF_LIBRARY} + ${ZLIB_LIBRARIES} +) + add_subdirectory(proto) add_subdirectory(paddle) add_subdirectory(python) + if(WITH_DOC) add_subdirectory(doc) endif() diff --git a/cmake/cblas.cmake b/cmake/cblas.cmake index 685334c6585060c0344e552c6f3fda2c7324de03..4e1ae7dc81231943c4bf3db4d4ac6f073f4fd1c4 100644 --- a/cmake/cblas.cmake +++ b/cmake/cblas.cmake @@ -13,6 +13,7 @@ # system paths. # +set(CBLAS_FOUND OFF) ## Find MKL First. set(MKL_ROOT $ENV{MKL_ROOT} CACHE PATH "Folder contains MKL") @@ -35,11 +36,12 @@ find_library(MKL_INTEL_LP64 NAMES mkl_intel_lp64 PATHS if(MKL_INCLUDE_DIR AND MKL_CORE_LIB AND MKL_SEQUENTIAL_LIB AND MKL_INTEL_LP64) set(CBLAS_PROVIDER MKL) set(CBLAS_INC_DIR ${MKL_INCLUDE_DIR}) - set(CBLAS_LIBS ${MKL_INTEL_LP64} + set(CBLAS_LIBRARIES ${MKL_INTEL_LP64} ${MKL_SEQUENTIAL_LIB} ${MKL_CORE_LIB}) add_definitions(-DPADDLE_USE_MKL) - message(STATUS "Found MKL (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBS})") + message(STATUS "Found MKL (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})") + set(CBLAS_FOUND ON) return() # return file. endif() @@ -68,9 +70,10 @@ find_library(ATLAS_LIB NAMES lapack_atlas liblapack_atlas.so.3 if(ATLAS_INC_DIR AND ATLAS_CBLAS_LIB AND ATLAS_LIB) set(CBLAS_PROVIDER ATLAS) set(CBLAS_INC_DIR ${ATLAS_INC_DIR} ${ATLAS_CLAPACK_INC_DIR}) - set(CBLAS_LIBS ${ATLAS_LIB} ${ATLAS_CBLAS_LIB}) + set(CBLAS_LIBRARIES ${ATLAS_LIB} ${ATLAS_CBLAS_LIB}) add_definitions(-DPADDLE_USE_ATLAS) - message(STATUS "Found Atlas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBS})") + message(STATUS "Found Atlas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})") + set(CBLAS_FOUND ON) return() endif() @@ -98,8 +101,9 @@ find_library(OPENBLAS_LIB NAMES openblas if(OPENBLAS_INC_DIR AND OPENBLAS_LIB) set(CBLAS_PROVIDER OPENBLAS) set(CBLAS_INC_DIR ${OPENBLAS_INC_DIR}) - set(CBLAS_LIBS ${OPENBLAS_LIB}) - message(STATUS "Found OpenBlas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBS})") + set(CBLAS_LIBRARIES ${OPENBLAS_LIB}) + message(STATUS "Found OpenBlas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})") + set(CBLAS_FOUND ON) return() endif() @@ -130,9 +134,7 @@ find_library(REFERENCE_CBLAS_LIBRARY NAMES cblas PATHS if (REFERENCE_CBLAS_INCLUDE_DIR AND REFERENCE_CBLAS_LIBRARY) set(CBLAS_PROVIDER REFERENCE) set(CBLAS_INC_DIR ${REFERENCE_CBLAS_INCLUDE_DIR}) - set(CBLAS_LIBS ${REFERENCE_CBLAS_LIBRARY}) - return() + set(CBLAS_LIBRARIES ${REFERENCE_CBLAS_LIBRARY}) + message(STATUS "Found reference-cblas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBS})") + set(CBLAS_FOUND ON) endif() - -message(FATAL_ERROR "CBlas must be set. Paddle support MKL, ATLAS, OpenBlas, reference-cblas." - " Try set MKL_ROOT, ATLAS_ROOT, OPENBLAS_ROOT or REFERENCE_CBLAS_ROOT.") diff --git a/cmake/check_packages.cmake b/cmake/check_packages.cmake deleted file mode 100644 index afb84c6ff52af05769a99246d2e93380832c04e0..0000000000000000000000000000000000000000 --- a/cmake/check_packages.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# Check package for each cmake option - -if(WITH_GPU) - find_package(CUDA REQUIRED) # CUDA is required when use gpu -endif() - -if(WITH_PYTHON) - find_package(PythonLibs 2.6 REQUIRED) - find_package(PythonInterp REQUIRED) - find_package(NumPy REQUIRED) -endif() - -if(WITH_STYLE_CHECK) - find_package(PythonInterp REQUIRED) -endif() - -find_package(Glog REQUIRED) - -find_package(Gflags REQUIRED) - -if(WITH_TESTING) - find_package(GTest REQUIRED) -endif() - -if(WITH_DOC) - find_package(Sphinx REQUIRED) - find_python_module(recommonmark REQUIRED) -endif() - -if(WITH_SWIG_PY) - if(NOT SWIG_FOUND) - message(FATAL_ERROR "SWIG is not found. Please install swig or disable WITH_SWIG_PY") - endif() - find_python_module(wheel REQUIRED) # package wheel -endif() - -if(NOT M4_EXECUTABLE) - message(FATAL_ERROR "Paddle need m4 to generate proto file.") -endif() diff --git a/cmake/configure.cmake b/cmake/configure.cmake new file mode 100644 index 0000000000000000000000000000000000000000..ae0ec01d94da49f23b56f7d34f862ca57fb39b18 --- /dev/null +++ b/cmake/configure.cmake @@ -0,0 +1,64 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +if(WITH_DSO) + add_definitions(-DPADDLE_USE_DSO) +endif(WITH_DSO) + +if(WITH_DOUBLE) + add_definitions(-DPADDLE_TYPE_DOUBLE) +endif(WITH_DOUBLE) + +if(NOT WITH_TIMER) + add_definitions(-DPADDLE_DISABLE_TIMER) +endif(NOT WITH_TIMER) + +if(NOT WITH_PROFILER) + add_definitions(-DPADDLE_DISABLE_PROFILER) +endif(NOT WITH_PROFILER) + +if(NOT WITH_GPU) + add_definitions(-DPADDLE_ONLY_CPU) + add_definitions(-DHPPL_STUB_FUNC) + + list(APPEND CMAKE_CXX_SOURCE_FILE_EXTENSIONS cu) +else() + FIND_PACKAGE(CUDA REQUIRED) + + if(${CUDA_VERSION_MAJOR} VERSION_LESS 7) + message(FATAL_ERROR "Paddle need CUDA >= 7.0 to compile") + endif() + + if(NOT CUDNN_FOUND) + message(FATAL_ERROR "Paddle need cudnn to compile") + endif() + + if(WITH_AVX) + set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS} "-Xcompiler ${AVX_FLAG}") + else(WITH_AVX) + set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS} "-Xcompiler ${SSE3_FLAG}") + endif(WITH_AVX) + + # Include cuda and cudnn + include_directories(${CUDNN_INCLUDE_DIR}) + include_directories(${CUDA_TOOLKIT_INCLUDE}) +endif(NOT WITH_GPU) + +if(WITH_AVX) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${AVX_FLAG}") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${AVX_FLAG}") +else(WITH_AVX) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SSE3_FLAG}") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SSE3_FLAG}") +endif(WITH_AVX) diff --git a/cmake/cpplint.cmake b/cmake/cpplint.cmake index 241af9a0835b2f100c8fb8b246426e631e42aef3..38c636b30edc0af1c07255814e8bc2b1ad9514da 100644 --- a/cmake/cpplint.cmake +++ b/cmake/cpplint.cmake @@ -53,7 +53,7 @@ macro(add_style_check_target TARGET_NAME) if(LINT MATCHES ON) add_custom_command(TARGET ${TARGET_NAME} PRE_BUILD - COMMAND "${PYTHON_EXECUTABLE}" "${PROJ_ROOT}/paddle/scripts/cpplint.py" + COMMAND env ${py_env} "${PYTHON_EXECUTABLE}" "${PROJ_ROOT}/paddle/scripts/cpplint.py" "--filter=${STYLE_FILTER}" ${filename} WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}) endif() diff --git a/cmake/external/gflags.cmake b/cmake/external/gflags.cmake new file mode 100644 index 0000000000000000000000000000000000000000..d38b7d1ba2a74d5bb46d0c07e3abe6832d4c8af3 --- /dev/null +++ b/cmake/external/gflags.cmake @@ -0,0 +1,39 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(ExternalProject) + +SET(GFLAGS_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/gflags) +SET(GFLAGS_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/gflags) +SET(GFLAGS_INCLUDE_DIR "${GFLAGS_INSTALL_DIR}/include" CACHE PATH "gflags include directory." FORCE) +IF(WIN32) + set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/gflags.lib" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE) +ELSE(WIN32) + set(GFLAGS_LIBRARIES "${GFLAGS_INSTALL_DIR}/lib/libgflags.a" CACHE FILEPATH "GFLAGS_LIBRARIES" FORCE) +ENDIF(WIN32) + +INCLUDE_DIRECTORIES(${GFLAGS_INCLUDE_DIR}) + +ExternalProject_Add( + gflags + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/gflags/gflags.git" + PREFIX ${GFLAGS_SOURCES_DIR} + UPDATE_COMMAND "" + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${GFLAGS_INSTALL_DIR} + CMAKE_ARGS -DCMAKE_POSITION_INDEPENDENT_CODE=ON + CMAKE_ARGS -DBUILD_TESTING=OFF +) + +LIST(APPEND external_project_dependencies gflags) diff --git a/cmake/external/glog.cmake b/cmake/external/glog.cmake new file mode 100644 index 0000000000000000000000000000000000000000..bec69f3ddf093b62f084f9080fa1fe4398c93e9a --- /dev/null +++ b/cmake/external/glog.cmake @@ -0,0 +1,41 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(ExternalProject) + +SET(GLOG_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/glog) +SET(GLOG_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/glog) +SET(GLOG_INCLUDE_DIR "${GLOG_INSTALL_DIR}/include" CACHE PATH "glog include directory." FORCE) + +IF(WIN32) + SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/libglog.lib" CACHE FILEPATH "glog library." FORCE) +ELSE(WIN32) + SET(GLOG_LIBRARIES "${GLOG_INSTALL_DIR}/lib/libglog.a" CACHE FILEPATH "glog library." FORCE) +ENDIF(WIN32) + +INCLUDE_DIRECTORIES(${GLOG_INCLUDE_DIR}) + +ExternalProject_Add( + glog + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/google/glog.git" + PREFIX ${GLOG_SOURCES_DIR} + UPDATE_COMMAND "" + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${GLOG_INSTALL_DIR} + CMAKE_ARGS -DCMAKE_POSITION_INDEPENDENT_CODE=ON + CMAKE_ARGS -DWITH_GFLAGS=OFF + CMAKE_ARGS -DBUILD_TESTING=OFF +) + +LIST(APPEND external_project_dependencies glog) diff --git a/cmake/external/gtest.cmake b/cmake/external/gtest.cmake new file mode 100644 index 0000000000000000000000000000000000000000..2fcb7893fa30e7fcd84b9e860217f82cf01bf89e --- /dev/null +++ b/cmake/external/gtest.cmake @@ -0,0 +1,51 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +IF(WITH_TESTING) + ENABLE_TESTING() + INCLUDE(ExternalProject) + + SET(GTEST_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/gtest) + SET(GTEST_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/gtest) + SET(GTEST_INCLUDE_DIR "${GTEST_INSTALL_DIR}/include" CACHE PATH "gtest include directory." FORCE) + + INCLUDE_DIRECTORIES(${GTEST_INCLUDE_DIR}) + + IF(WIN32) + set(GTEST_LIBRARIES + "${GTEST_INSTALL_DIR}/lib/gtest.lib" CACHE FILEPATH "gtest libraries." FORCE) + set(GTEST_MAIN_LIBRARIES + "${GTEST_INSTALL_DIR}/lib/gtest_main.lib" CACHE FILEPATH "gtest main libraries." FORCE) + ELSE(WIN32) + set(GTEST_LIBRARIES + "${GTEST_INSTALL_DIR}/lib/libgtest.a" CACHE FILEPATH "gtest libraries." FORCE) + set(GTEST_MAIN_LIBRARIES + "${GTEST_INSTALL_DIR}/lib/libgtest_main.a" CACHE FILEPATH "gtest main libraries." FORCE) + ENDIF(WIN32) + + ExternalProject_Add( + gtest + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/google/googletest.git" + GIT_TAG "release-1.8.0" + PREFIX ${GTEST_SOURCES_DIR} + UPDATE_COMMAND "" + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${GTEST_INSTALL_DIR} + CMAKE_ARGS -DCMAKE_POSITION_INDEPENDENT_CODE=ON + CMAKE_ARGS -DBUILD_GMOCK=ON + CMAKE_ARGS -Dgtest_disable_pthreads=ON + CMAKE_ARGS -Dgtest_force_shared_crt=ON + ) + LIST(APPEND external_project_dependencies gtest) +ENDIF(WITH_TESTING) diff --git a/cmake/external/openblas.cmake b/cmake/external/openblas.cmake new file mode 100644 index 0000000000000000000000000000000000000000..677999cc9f5d320b4ac18fe0cc0d67a8e9921f8f --- /dev/null +++ b/cmake/external/openblas.cmake @@ -0,0 +1,46 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(cblas) + +IF(NOT ${CBLAS_FOUND}) + INCLUDE(ExternalProject) + + SET(CBLAS_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/openblas) + SET(CBLAS_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/openblas) + SET(CBLAS_INC_DIR "${CBLAS_INSTALL_DIR}/include" CACHE PATH "openblas include directory." FORCE) + + IF(WIN32) + SET(CBLAS_LIBRARIES "${CBLAS_INSTALL_DIR}/lib/openblas.lib" CACHE FILEPATH "openblas library." FORCE) + ELSE(WIN32) + SET(CBLAS_LIBRARIES "${CBLAS_INSTALL_DIR}/lib/libopenblas.a" CACHE FILEPATH "openblas library" FORCE) + ENDIF(WIN32) + + ExternalProject_Add( + openblas + ${EXTERNAL_PROJECT_LOG_ARGS} + URL "https://github.com/xianyi/OpenBLAS/archive/v0.2.19.tar.gz" + PREFIX ${CBLAS_SOURCES_DIR} + INSTALL_DIR ${CBLAS_INSTALL_DIR} + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND make CC=${CMAKE_C_COMPILER} FC=${CMAKE_Fortran_COMPILER} + INSTALL_COMMAND make install PREFIX= + UPDATE_COMMAND "" + ) + + LIST(APPEND external_project_dependencies openblas) +ENDIF() + +INCLUDE_DIRECTORIES(${CBLAS_INC_DIR}) diff --git a/cmake/external/protobuf.cmake b/cmake/external/protobuf.cmake new file mode 100644 index 0000000000000000000000000000000000000000..2f2769b4c628d8570c335d344cbf608bda84206f --- /dev/null +++ b/cmake/external/protobuf.cmake @@ -0,0 +1,62 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(ExternalProject) + +SET(PROTOBUF_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/protobuf) +SET(PROTOBUF_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/protobuf) +SET(PROTOBUF_INCLUDE_DIR "${PROTOBUF_INSTALL_DIR}/include" CACHE PATH "protobuf include directory." FORCE) + +INCLUDE_DIRECTORIES(${PROTOBUF_INCLUDE_DIR}) + +IF(WIN32) + SET(PROTOBUF_LITE_LIBRARY + "${PROTOBUF_INSTALL_DIR}/lib/libprotobuf-lite.lib" CACHE FILEPATH "protobuf lite library." FORCE) + SET(PROTOBUF_LIBRARY + "${PROTOBUF_INSTALL_DIR}/lib/libprotobuf.lib" CACHE FILEPATH "protobuf library." FORCE) + SET(PROTOBUF_PROTOC_LIBRARY + "${PROTOBUF_INSTALL_DIR}/lib/libprotoc.lib" CACHE FILEPATH "protoc library." FORCE) + SET(PROTOBUF_PROTOC_EXECUTABLE "${PROTOBUF_INSTALL_DIR}/bin/protoc.exe" CACHE FILEPATH "protobuf executable." FORCE) +ELSE(WIN32) + IF(${HOST_SYSTEM} STREQUAL "centos") + SET(LIB "lib64") + ELSE() + SET(LIB "lib") + ENDIF() + SET(PROTOBUF_LITE_LIBRARY + "${PROTOBUF_INSTALL_DIR}/${LIB}/libprotobuf-lite.a" CACHE FILEPATH "protobuf lite library." FORCE) + SET(PROTOBUF_LIBRARY + "${PROTOBUF_INSTALL_DIR}/${LIB}/libprotobuf.a" CACHE FILEPATH "protobuf library." FORCE) + SET(PROTOBUF_PROTOC_LIBRARY + "${PROTOBUF_INSTALL_DIR}/${LIB}/libprotoc.a" CACHE FILEPATH "protoc library." FORCE) + SET(PROTOBUF_PROTOC_EXECUTABLE "${PROTOBUF_INSTALL_DIR}/bin/protoc" CACHE FILEPATH "protobuf executable." FORCE) +ENDIF(WIN32) + +ExternalProject_Add( + protobuf + ${EXTERNAL_PROJECT_LOG_ARGS} + PREFIX ${PROTOBUF_SOURCES_DIR} + UPDATE_COMMAND "" + DEPENDS zlib + GIT_REPOSITORY "https://github.com/google/protobuf.git" + GIT_TAG "9f75c5aa851cd877fb0d93ccc31b8567a6706546" + CONFIGURE_COMMAND + ${CMAKE_COMMAND} ${PROTOBUF_SOURCES_DIR}/src/protobuf/cmake + -Dprotobuf_BUILD_TESTS=OFF + -DCMAKE_POSITION_INDEPENDENT_CODE=ON + -DCMAKE_BUILD_TYPE=Release + -DCMAKE_INSTALL_PREFIX=${PROTOBUF_INSTALL_DIR} +) + +LIST(APPEND external_project_dependencies protobuf) diff --git a/cmake/external/python.cmake b/cmake/external/python.cmake new file mode 100644 index 0000000000000000000000000000000000000000..e4c570479f682e951413017b256a8e16dfce625b --- /dev/null +++ b/cmake/external/python.cmake @@ -0,0 +1,204 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(ExternalProject) + + +##################################### PYTHON ######################################## +SET(PYTHON_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/python) +SET(PYTHON_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/python) +SET(_python_DIR ${PYTHON_INSTALL_DIR}) + +IF(UNIX) + SET(PYTHON_FOUND ON) + SET(PYTHON_INCLUDE_DIR "${PYTHON_INSTALL_DIR}/include/python2.7" CACHE PATH "Python include dir" FORCE) + SET(PYTHON_LIBRARIES "${PYTHON_INSTALL_DIR}/lib/libpython2.7.a" CACHE FILEPATH "Python library" FORCE) + SET(PYTHON_EXECUTABLE ${PYTHON_INSTALL_DIR}/bin/python CACHE FILEPATH "Python executable" FORCE) + SET(PY_SITE_PACKAGES_PATH "${PYTHON_INSTALL_DIR}/lib/python2.7/site-packages" CACHE PATH "Python site-packages path" FORCE) +ELSEIF(WIN32) + SET(PYTHON_FOUND ON) + SET(PYTHON_INCLUDE_DIR "${PYTHON_INSTALL_DIR}/include" CACHE PATH "Python include dir" FORCE) + SET(PYTHON_LIBRARIES "${PYTHON_INSTALL_DIR}/libs/python27.lib" CACHE FILEPATH "Python library" FORCE) + SET(PYTHON_EXECUTABLE "${PYTHON_INSTALL_DIR}/bin/python.exe" CACHE FILEPATH "Python executable" FORCE) + SET(PY_SITE_PACKAGES_PATH "${PYTHON_INSTALL_DIR}/Lib/site-packages" CACHE PATH "Python site-packages path" FORCE) +ELSE() + MESSAGE(FATAL_ERROR "Unknown system !") +ENDIF() + +SET(py_env + PATH=${PYTHON_INSTALL_DIR}/bin/:$ENV{PATH} + PYTHONHOME=${PYTHON_INSTALL_DIR} + PYTHONPATH=${PYTHON_INSTALL_DIR}/lib:${PYTHON_INSTALL_DIR}/lib/python2.7:${PY_SITE_PACKAGES_PATH}) + +INCLUDE_DIRECTORIES(${PYTHON_INCLUDE_DIR}) + +IF(APPLE) + LIST(APPEND EXTERNAL_PROJECT_OPTIONAL_CMAKE_ARGS + -DCMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=ON + ) +ENDIF() + +SET(EXTERNAL_PROJECT_OPTIONAL_CMAKE_CACHE_ARGS) + +# Force Python build to "Release". +IF(CMAKE_CONFIGURATION_TYPES) + SET(SAVED_CMAKE_CFG_INTDIR ${CMAKE_CFG_INTDIR}) + SET(CMAKE_CFG_INTDIR "Release") +ELSE() + LIST(APPEND EXTERNAL_PROJECT_OPTIONAL_CMAKE_CACHE_ARGS + -DCMAKE_BUILD_TYPE:STRING=Release + ) +ENDIF() + +ExternalProject_Add(python + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/python-cmake-buildsystem/python-cmake-buildsystem.git" + PREFIX ${PYTHON_SOURCES_DIR} + UPDATE_COMMAND "" + CMAKE_ARGS -DPYTHON_VERSION=2.7.12 + CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + CMAKE_CACHE_ARGS + -DCMAKE_INSTALL_PREFIX:PATH=${PYTHON_INSTALL_DIR} + -DBUILD_LIBPYTHON_SHARED:BOOL=OFF + -DUSE_SYSTEM_LIBRARIES:BOOL=OFF + -DZLIB_ROOT:FILEPATH=${ZLIB_ROOT} + -DZLIB_INCLUDE_DIR:PATH=${ZLIB_INCLUDE_DIR} + -DZLIB_LIBRARY:FILEPATH=${ZLIB_LIBRARIES} + -DDOWNLOAD_SOURCES:BOOL=ON + -DINSTALL_WINDOWS_TRADITIONAL:BOOL=OFF + ${EXTERNAL_PROJECT_OPTIONAL_CMAKE_CACHE_ARGS} + ${EXTERNAL_PROJECT_OPTIONAL_CMAKE_ARGS} + DEPENDS zlib +) +#################################################################################### + +##################################### SETUPTOOLS ################################### +SET(SETUPTOOLS_SOURCES_DIR ${PYTHON_SOURCES_DIR}/setuptools) +ExternalProject_Add(setuptools + ${EXTERNAL_PROJECT_LOG_ARGS} + PREFIX ${SETUPTOOLS_SOURCES_DIR} + URL "https://pypi.python.org/packages/source/s/setuptools/setuptools-18.3.2.tar.gz" + BUILD_IN_SOURCE 1 + PATCH_COMMAND "" + UPDATE_COMMAND "" + CONFIGURE_COMMAND "" + INSTALL_COMMAND "" + BUILD_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py install + DEPENDS python zlib +) +##################################################################################### + +##################################### SIX ########################################### +SET(SIX_SOURCES_DIR ${PYTHON_SOURCES_DIR}/six) +ExternalProject_Add(six + ${EXTERNAL_PROJECT_LOG_ARGS} + PREFIX ${SIX_SOURCES_DIR} + URL https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz + BUILD_IN_SOURCE 1 + PATCH_COMMAND "" + UPDATE_COMMAND "" + CONFIGURE_COMMAND "" + INSTALL_COMMAND "" + BUILD_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py install + DEPENDS python setuptools +) +##################################################################################### + +##################################### CYTHON ######################################## +SET(CYTHON_SOURCES_DIR ${PYTHON_SOURCES_DIR}/cython) +ExternalProject_Add(cython + ${EXTERNAL_PROJECT_LOG_ARGS} + PREFIX ${CYTHON_SOURCES_DIR} + URL https://github.com/cython/cython/archive/0.25.2.tar.gz + GIT_TAG 0.25.2 + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND "" + PATCH_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND "" + BUILD_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py install + DEPENDS python +) +#################################################################################### + +##################################### NUMPY ######################################## +SET(NUMPY_SOURCES_DIR ${PYTHON_SOURCES_DIR}/numpy) +SET(NUMPY_TAG_VERSION "v1.11.3") +SET(NUMPY_VERSION "1.11.3") + +SET(EGG_NAME "") +SET(PYTHON_NUMPY_INCLUDE_DIR "") +IF(WIN32) + SET(EGG_NAME "numpy-${NUMPY_VERSION}-py2.7-${HOST_SYSTEM}.egg") +ELSE(WIN32) + IF(APPLE) + SET(EGG_NAME "numpy-${NUMPY_VERSION}-py2.7-${HOST_SYSTEM}-${MACOS_VERSION}") + ELSE(APPLE) + SET(EGG_NAME "numpy-${NUMPY_VERSION}-py2.7-linux") + SET(EGG_NAME "numpy-${NUMPY_VERSION}-py2.7-linux") + ENDIF(APPLE) + + FOREACH(suffix x86_64 intel fat64 fat32 universal) + LIST(APPEND PYTHON_NUMPY_INCLUDE_DIR ${PY_SITE_PACKAGES_PATH}/${EGG_NAME}-${suffix}.egg/numpy/core/include) + ENDFOREACH() +ENDIF(WIN32) + +INCLUDE_DIRECTORIES(${PYTHON_NUMPY_INCLUDE_DIR}) + +ExternalProject_Add(numpy + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY https://github.com/numpy/numpy.git + GIT_TAG ${NUMPY_TAG_VERSION} + CONFIGURE_COMMAND "" + UPDATE_COMMAND "" + PREFIX ${NUMPY_SOURCES_DIR} + BUILD_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py build + INSTALL_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py install + BUILD_IN_SOURCE 1 + DEPENDS python setuptools cython +) +#################################################################################### + +##################################### WHEEL ######################################## +SET(WHEEL_SOURCES_DIR ${PYTHON_SOURCES_DIR}/wheel) +ExternalProject_Add(wheel + ${EXTERNAL_PROJECT_LOG_ARGS} + URL https://pypi.python.org/packages/source/w/wheel/wheel-0.29.0.tar.gz + PREFIX ${WHEEL_SOURCES_DIR} + CONFIGURE_COMMAND "" + UPDATE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py install + BUILD_IN_SOURCE 1 + DEPENDS python setuptools +) +#################################################################################### + +################################### PROTOBUF ####################################### +SET(PY_PROTOBUF_SOURCES_DIR ${PYTHON_SOURCES_DIR}/protobuf) +ExternalProject_Add(python-protobuf + ${EXTERNAL_PROJECT_LOG_ARGS} + URL https://pypi.python.org/packages/e0/b0/0a1b364fe8a7d177b4b7d4dca5b798500dc57a7273b93cca73931b305a6a/protobuf-3.1.0.post1.tar.gz + URL_MD5 38b5fb160c768d2f8444d0c6d637ff91 + PREFIX ${PY_PROTOBUF_SOURCES_DIR} + BUILD_IN_SOURCE 1 + PATCH_COMMAND "" + CONFIGURE_COMMAND "" + BUILD_COMMAND env PATH=${PROTOBUF_INSTALL_DIR}/bin:$ENV{PATH} ${py_env} ${PYTHON_EXECUTABLE} setup.py build + INSTALL_COMMAND env PATH=${PROTOBUF_INSTALL_DIR}/bin:$ENV{PATH} ${py_env} ${PYTHON_EXECUTABLE} setup.py install + DEPENDS python setuptools six +) + +LIST(APPEND external_project_dependencies python setuptools six cython numpy wheel python-protobuf) diff --git a/cmake/external/swig.cmake b/cmake/external/swig.cmake new file mode 100644 index 0000000000000000000000000000000000000000..5460b02c37ec302ebe3c8c5cec03d566a491cfdb --- /dev/null +++ b/cmake/external/swig.cmake @@ -0,0 +1,70 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# build swig as an external project +INCLUDE(ExternalProject) + +SET(SWIG_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/swig) +SET(SWIG_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/swig) +SET(SWIG_TARGET_VERSION "3.0.2") +SET(SWIG_DOWNLOAD_SRC_MD5 "62f9b0d010cef36a13a010dc530d0d41") +SET(SWIG_DOWNLOAD_WIN_MD5 "3f18de4fc09ab9abb0d3be37c11fbc8f") + +IF(WIN32) + # swig.exe available as pre-built binary on Windows: + ExternalProject_Add(swig + URL http://prdownloads.sourceforge.net/swig/swigwin-${SWIG_TARGET_VERSION}.zip + URL_MD5 ${SWIG_DOWNLOAD_WIN_MD5} + SOURCE_DIR ${SWIG_SOURCES_DIR} + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + UPDATE_COMMAND "" + ) + SET(SWIG_DIR ${SWIG_SOURCES_DIR} CACHE FILEPATH "SWIG Directory" FORCE) + SET(SWIG_EXECUTABLE ${SWIG_SOURCES_DIR}/swig.exe CACHE FILEPATH "SWIG Executable" FORCE) +ELSE(WIN32) + # From PCRE configure + ExternalProject_Add(pcre + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY https://github.com/svn2github/pcre.git + PREFIX ${SWIG_SOURCES_DIR}/pcre + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=${SWIG_INSTALL_DIR}/pcre + ) + + # swig uses bison find it by cmake and pass it down + FIND_PACKAGE(BISON) + + # From SWIG configure + ExternalProject_Add(swig + GIT_REPOSITORY https://github.com/swig/swig.git + GIT_TAG rel-3.0.10 + PREFIX ${SWIG_SOURCES_DIR} + CONFIGURE_COMMAND cd ${SWIG_SOURCES_DIR}/src/swig && ./autogen.sh + CONFIGURE_COMMAND cd ${SWIG_SOURCES_DIR}/src/swig && + env "PCRE_LIBS=${SWIG_INSTALL_DIR}/pcre/lib/libpcre.a ${SWIG_INSTALL_DIR}/pcre/lib/libpcrecpp.a ${SWIG_INSTALL_DIR}/pcre/lib/libpcreposix.a" + ./configure + --prefix=${SWIG_INSTALL_DIR} + --with-pcre-prefix=${SWIG_INSTALL_DIR}/pcre + BUILD_COMMAND cd ${SWIG_SOURCES_DIR}/src/swig && make + INSTALL_COMMAND cd ${SWIG_SOURCES_DIR}/src/swig && make install + UPDATE_COMMAND "" + DEPENDS pcre + ) + + SET(SWIG_DIR ${SWIG_INSTALL_DIR}/share/swig/${SWIG_TARGET_VERSION}) + SET(SWIG_EXECUTABLE ${SWIG_INSTALL_DIR}/bin/swig) +ENDIF(WIN32) + +LIST(APPEND external_project_dependencies swig) diff --git a/cmake/external/warpctc.cmake b/cmake/external/warpctc.cmake new file mode 100644 index 0000000000000000000000000000000000000000..d90768b6f1576e6d469d91d694ae0b9d1c7e8384 --- /dev/null +++ b/cmake/external/warpctc.cmake @@ -0,0 +1,58 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(ExternalProject) + +SET(WARPCTC_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/warpctc) +SET(WARPCTC_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/warpctc) +SET(WARPCTC_INCLUDE_DIR "${WARPCTC_INSTALL_DIR}/include" CACHE PATH "Warp-ctc Directory" FORCE) + +INCLUDE_DIRECTORIES(${WARPCTC_INCLUDE_DIR}) + +SET(WARPCTC_LIB_DIR "${WARPCTC_INSTALL_DIR}/lib" CACHE PATH "Warp-ctc Library Directory" FORCE) + +IF(WIN32) + SET(WARPCTC_LIBRARIES + "${WARPCTC_INSTALL_DIR}/lib/warpctc.dll" CACHE FILEPATH "Warp-ctc Library" FORCE) +ELSE(WIN32) + IF(APPLE) + SET(_warpctc_SHARED_SUFFIX dylib) + ELSE(APPLE) + SET(_warpctc_SHARED_SUFFIX so) + ENDIF(APPLE) + + SET(WARPCTC_LIBRARIES + "${WARPCTC_INSTALL_DIR}/lib/libwarpctc.${_warpctc_SHARED_SUFFIX}" CACHE FILEPATH "Warp-ctc Library" FORCE) +ENDIF(WIN32) + +IF(CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" ) + SET(USE_OMP OFF) +ELSE() + SET(USE_OMP ON) +ENDIF() + +ExternalProject_Add( + warpctc + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/gangliao/warp-ctc.git" + PREFIX ${WARPCTC_SOURCES_DIR} + UPDATE_COMMAND "" + CMAKE_ARGS -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${WARPCTC_INSTALL_DIR} + CMAKE_ARGS -DWITH_GPU=${WITH_GPU} + CMAKE_ARGS -DWITH_OMP=${USE_OMP} +) + +LIST(APPEND external_project_dependencies warpctc) diff --git a/cmake/external/zlib.cmake b/cmake/external/zlib.cmake new file mode 100644 index 0000000000000000000000000000000000000000..916f6816aae9938aad95ac527cf07ffbe38f7479 --- /dev/null +++ b/cmake/external/zlib.cmake @@ -0,0 +1,43 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +INCLUDE(ExternalProject) + +SET(ZLIB_SOURCES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/zlib) +SET(ZLIB_INSTALL_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/install/zlib) +SET(ZLIB_ROOT ${ZLIB_INSTALL_DIR} CACHE FILEPATH "zlib root directory." FORCE) +SET(ZLIB_INCLUDE_DIR "${ZLIB_INSTALL_DIR}/include" CACHE PATH "zlib include directory." FORCE) + +IF(WIN32) + SET(ZLIB_LIBRARIES "${ZLIB_INSTALL_DIR}/lib/zlibstatic.lib" CACHE FILEPATH "zlib library." FORCE) +ELSE(WIN32) + set(ZLIB_LIBRARIES "${ZLIB_INSTALL_DIR}/lib/libz.a" CACHE FILEPATH "zlib library." FORCE) +ENDIF(WIN32) + +INCLUDE_DIRECTORIES(${ZLIB_INCLUDE_DIR}) + +ExternalProject_Add( + zlib + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY "https://github.com/madler/zlib.git" + GIT_TAG "v1.2.8" + PREFIX ${ZLIB_SOURCES_DIR} + UPDATE_COMMAND "" + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${ZLIB_INSTALL_DIR} + CMAKE_ARGS -DBUILD_SHARED_LIBS=OFF + CMAKE_ARGS -DCMAKE_POSITION_INDEPENDENT_CODE=ON + CMAKE_ARGS -DCMAKE_MACOSX_RPATH=ON +) + +LIST(APPEND external_project_dependencies zlib) diff --git a/cmake/FindPythonModule.cmake b/cmake/python_module.cmake similarity index 100% rename from cmake/FindPythonModule.cmake rename to cmake/python_module.cmake diff --git a/cmake/rdma.cmake b/cmake/rdma.cmake index e9a4da79aa92a92aa7e5d21bb795ab9aaf60ab8b..9ff1a77cac74fb1bdfe470a78d225ed1767bb1b5 100644 --- a/cmake/rdma.cmake +++ b/cmake/rdma.cmake @@ -5,72 +5,76 @@ # svn co https://svn.baidu.com/sys/ip/trunk/rdma/thirdparty rdma/ # we use static output in svn repositories to avoid implict bugs from not standard runtime env. -set(RDMA_ROOT $ENV{RDMA_ROOT} CACHE PATH "Folder contains RDMA sock library and thirdparty library") +if(WITH_RDMA) + set(RDMA_ROOT $ENV{RDMA_ROOT} CACHE PATH "Folder contains RDMA sock library and thirdparty library") -function(generate_rdma_links) - #redirect to current DIR to isolate the pollution from system runtime environment - #it can benifits unified control for different gcc environment. - #e.g, by default gcc48 did not refer /usr/lib64 which could contain low version - #runtime libraries that will crash process while loading it. That redirect trick - #can fix it. - execute_process( - COMMAND mkdir -p librdma - COMMAND ln -s -f /usr/lib64/libibverbs.so.1.0.0 librdma/libibverbs.so.1 - COMMAND ln -s -f /usr/lib64/libibverbs.so.1.0.0 librdma/libibverbs.so - COMMAND ln -s -f /usr/lib64/librdmacm.so.1.0.0 librdma/librdmacm.so.1 - COMMAND ln -s -f /usr/lib64/librdmacm.so.1.0.0 librdma/librdmacm.so - WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} - ) -endfunction(generate_rdma_links) - - -#check and set headers -find_path(RDMA_INC_SXISOCK sxi_sock.h PATHS ${RDMA_ROOT}/sockrdmav1/output/include) -find_path(RDMA_INC_XIO libxio.h PATHS ${RDMA_ROOT}/thirdparty/output/accelio) -find_path(RDMA_INC_EVENT event2 PATHS ${RDMA_ROOT}/thirdparty/output/libevent) -find_path(RDMA_INC_NUMA numa.h PATHS ${RDMA_ROOT}/thirdparty/output/libnuma) - -#check and set libs -find_library(RDMA_LIB_SXISOCK NAMES sxisock PATHS ${RDMA_ROOT}/sockrdmav1/output) -find_library(RDMA_LIB_XIO NAMES xio PATHS ${RDMA_ROOT}/thirdparty/output/accelio) -find_library(RDMA_LIB_EVENT NAMES event PATHS ${RDMA_ROOT}/thirdparty/output/libevent) -find_library(RDMA_LIB_EVENT_CORE NAMES event_core PATHS ${RDMA_ROOT}/thirdparty/output/libevent) -find_library(RDMA_LIB_EVENT_EXTRA NAMES event_extra PATHS ${RDMA_ROOT}/thirdparty/output/libevent) -find_library(RDMA_LIB_EVENT_PTHREADS NAMES event_pthreads PATHS ${RDMA_ROOT}/thirdparty/output/libevent) -find_library(RDMA_LIB_NUMA NAMES numa PATHS ${RDMA_ROOT}/thirdparty/output/libnuma) - -if( - RDMA_INC_SXISOCK AND - RDMA_INC_XIO AND - RDMA_INC_EVENT AND - RDMA_INC_NUMA AND - RDMA_LIB_SXISOCK AND - RDMA_LIB_XIO AND - RDMA_LIB_EVENT AND - RDMA_LIB_EVENT_CORE AND - RDMA_LIB_EVENT_EXTRA AND - RDMA_LIB_EVENT_PTHREADS AND - RDMA_LIB_NUMA + function(generate_rdma_links) + #redirect to current DIR to isolate the pollution from system runtime environment + #it can benifits unified control for different gcc environment. + #e.g, by default gcc48 did not refer /usr/lib64 which could contain low version + #runtime libraries that will crash process while loading it. That redirect trick + #can fix it. + execute_process( + COMMAND mkdir -p librdma + COMMAND ln -s -f /usr/lib64/libibverbs.so.1.0.0 librdma/libibverbs.so.1 + COMMAND ln -s -f /usr/lib64/libibverbs.so.1.0.0 librdma/libibverbs.so + COMMAND ln -s -f /usr/lib64/librdmacm.so.1.0.0 librdma/librdmacm.so.1 + COMMAND ln -s -f /usr/lib64/librdmacm.so.1.0.0 librdma/librdmacm.so + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} ) + endfunction(generate_rdma_links) - set(RDMA_INC_DIR - ${RDMA_INC_SXISOCK} - ${RDMA_INC_XIO} - ${RDMA_INC_EVENT} - ${RDMA_INC_NUMA}) - set(RDMA_LIBS - ${RDMA_LIB_SXISOCK} - ${RDMA_LIB_XIO} - ${RDMA_LIB_EVENT} - ${RDMA_LIB_EVENT_CORE} - ${RDMA_LIB_EVENT_EXTRA} - ${RDMA_LIB_EVENT_PTHREADS} - ${RDMA_LIB_NUMA} - ) - set(RDMA_LD_FLAGS "-L./librdma -libverbs -lrdmacm -Xlinker -rpath ./librdma") - return() -endif() + #check and set headers + find_path(RDMA_INC_SXISOCK sxi_sock.h PATHS ${RDMA_ROOT}/sockrdmav1/output/include) + find_path(RDMA_INC_XIO libxio.h PATHS ${RDMA_ROOT}/thirdparty/output/accelio) + find_path(RDMA_INC_EVENT event2 PATHS ${RDMA_ROOT}/thirdparty/output/libevent) + find_path(RDMA_INC_NUMA numa.h PATHS ${RDMA_ROOT}/thirdparty/output/libnuma) + + #check and set libs + find_library(RDMA_LIB_SXISOCK NAMES sxisock PATHS ${RDMA_ROOT}/sockrdmav1/output) + find_library(RDMA_LIB_XIO NAMES xio PATHS ${RDMA_ROOT}/thirdparty/output/accelio) + find_library(RDMA_LIB_EVENT NAMES event PATHS ${RDMA_ROOT}/thirdparty/output/libevent) + find_library(RDMA_LIB_EVENT_CORE NAMES event_core PATHS ${RDMA_ROOT}/thirdparty/output/libevent) + find_library(RDMA_LIB_EVENT_EXTRA NAMES event_extra PATHS ${RDMA_ROOT}/thirdparty/output/libevent) + find_library(RDMA_LIB_EVENT_PTHREADS NAMES event_pthreads PATHS ${RDMA_ROOT}/thirdparty/output/libevent) + find_library(RDMA_LIB_NUMA NAMES numa PATHS ${RDMA_ROOT}/thirdparty/output/libnuma) -#if this module is not called, RDMA_INC_DIR RDMA_LIBS will be null, so top module always refer this variable + if( + RDMA_INC_SXISOCK AND + RDMA_INC_XIO AND + RDMA_INC_EVENT AND + RDMA_INC_NUMA AND + RDMA_LIB_SXISOCK AND + RDMA_LIB_XIO AND + RDMA_LIB_EVENT AND + RDMA_LIB_EVENT_CORE AND + RDMA_LIB_EVENT_EXTRA AND + RDMA_LIB_EVENT_PTHREADS AND + RDMA_LIB_NUMA + ) -message(FATAL_ERROR, "RDMA libraries are not found, try to set RDMA_ROOT or check all related libraries.") + set(RDMA_INC_DIR + ${RDMA_INC_SXISOCK} + ${RDMA_INC_XIO} + ${RDMA_INC_EVENT} + ${RDMA_INC_NUMA}) + set(RDMA_LIBS + ${RDMA_LIB_SXISOCK} + ${RDMA_LIB_XIO} + ${RDMA_LIB_EVENT} + ${RDMA_LIB_EVENT_CORE} + ${RDMA_LIB_EVENT_EXTRA} + ${RDMA_LIB_EVENT_PTHREADS} + ${RDMA_LIB_NUMA} + ) + set(RDMA_LD_FLAGS "-L./librdma -libverbs -lrdmacm -Xlinker -rpath ./librdma") + include_directories("${RDMA_INC_DIR}") + else() + #if this module is not called, RDMA_INC_DIR RDMA_LIBS will be null, so top module always refer this variable + message(FATAL_ERROR, "RDMA libraries are not found, try to set RDMA_ROOT or check all related libraries.") + endif() +else(WITH_RDMA) + set(RDMA_LIBS "") + set(RDMA_LD_FLAGS "") + add_definitions(-DPADDLE_DISABLE_RDMA) +endif(WITH_RDMA) diff --git a/cmake/FindAVX.cmake b/cmake/simd.cmake similarity index 100% rename from cmake/FindAVX.cmake rename to cmake/simd.cmake diff --git a/cmake/swig.cmake b/cmake/swig.cmake deleted file mode 100644 index 97e87aa947791e2c5a88e7e554dec43bcd661664..0000000000000000000000000000000000000000 --- a/cmake/swig.cmake +++ /dev/null @@ -1,15 +0,0 @@ -function(generate_python_api target_name) - add_custom_command(OUTPUT ${PROJ_ROOT}/paddle/py_paddle/swig_paddle.py - ${PROJ_ROOT}/paddle/Paddle_wrap.cxx - ${PROJ_ROOT}/paddle/Paddle_wrap.h - COMMAND swig -python -c++ -outcurrentdir -I../ api/Paddle.swig - && mv ${PROJ_ROOT}/paddle/swig_paddle.py ${PROJ_ROOT}/paddle/py_paddle/swig_paddle.py - DEPENDS ${PROJ_ROOT}/paddle/api/Paddle.swig - ${PROJ_ROOT}/paddle/api/PaddleAPI.h - WORKING_DIRECTORY ${PROJ_ROOT}/paddle - COMMENT "Generate Python API from swig") - add_custom_target(${target_name} ALL DEPENDS - ${PROJ_ROOT}/paddle/Paddle_wrap.cxx - ${PROJ_ROOT}/paddle/Paddle_wrap.h - ${PROJ_ROOT}/paddle/py_paddle/swig_paddle.py) -endfunction(generate_python_api) diff --git a/cmake/system.cmake b/cmake/system.cmake new file mode 100644 index 0000000000000000000000000000000000000000..788db404ebfb6facbaedf2910186f3b1afe775c1 --- /dev/null +++ b/cmake/system.cmake @@ -0,0 +1,53 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +IF(WIN32) + SET(HOST_SYSTEM "win32") +ELSE(WIN32) + IF(APPLE) + EXEC_PROGRAM (sw_vers ARGS -productVersion OUTPUT_VARIABLE MACOSX_VERSION) + STRING(REGEX MATCH "[0-9]+.[0-9]+" VERSION "${MACOSX_VERSION}") + SET(MACOS_VERSION ${VERSION}) + SET(HOST_SYSTEM "macosx") + ELSE(APPLE) + IF(EXISTS "/etc/issue") + FILE(READ "/etc/issue" LINUX_ISSUE) + IF(LINUX_ISSUE MATCHES "CentOS") + SET(HOST_SYSTEM "centos") + ELSEIF(LINUX_ISSUE MATCHES "Debian") + SET(HOST_SYSTEM "debian") + ELSEIF(LINUX_ISSUE MATCHES "Ubuntu") + SET(HOST_SYSTEM "ubuntu") + ENDIF() + ENDIF(EXISTS "/etc/issue") + ENDIF(APPLE) +ENDIF(WIN32) + +# query number of logical cores +CMAKE_HOST_SYSTEM_INFORMATION(RESULT CPU_CORES QUERY NUMBER_OF_LOGICAL_CORES) + +MARK_AS_ADVANCED(HOST_SYSTEM CPU_CORES) + +MESSAGE(STATUS "Found Paddle host system: ${HOST_SYSTEM}") +MESSAGE(STATUS "Found Paddle host system's CPU: ${CPU_CORES} cores") + +# external dependencies log output +SET(EXTERNAL_PROJECT_LOG_ARGS + LOG_DOWNLOAD 0 # Wrap download in script to log output + LOG_UPDATE 1 # Wrap update in script to log output + LOG_CONFIGURE 1 # Wrap configure in script to log output + LOG_BUILD 1 # Wrap build in script to log output + LOG_TEST 1 # Wrap test in script to log output + LOG_INSTALL 1 # Wrap install in script to log output +) diff --git a/cmake/util.cmake b/cmake/util.cmake index 38299a87e95a151fe0abc535d1cc1968914442b3..a19bf2a7998ed7772a66f6a7eb5f9e858b0e75a2 100644 --- a/cmake/util.cmake +++ b/cmake/util.cmake @@ -24,7 +24,7 @@ function(target_circle_link_libraries TARGET_NAME) list(APPEND libsInArgn ${arg}) endif() endforeach() - if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") + if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang") list(APPEND LIBS "-undefined dynamic_lookup") endif() list(REVERSE libsInArgn) @@ -81,18 +81,6 @@ function(link_paddle_exe TARGET_NAME) set(METRIC_LIBS "") endif() - if(PADDLE_WITH_INTERNAL) - set(INTERAL_LIBS paddle_internal_gserver paddle_internal_parameter) - target_circle_link_libraries(${TARGET_NAME} - ARCHIVE_START - paddle_internal_gserver - paddle_internal_owlqn - ARCHIVE_END - paddle_internal_parameter) - else() - set(INTERAL_LIBS "") - endif() - target_circle_link_libraries(${TARGET_NAME} ARCHIVE_START paddle_gserver @@ -108,24 +96,15 @@ function(link_paddle_exe TARGET_NAME) paddle_proto paddle_cuda ${METRIC_LIBS} - ${PROTOBUF_LIBRARY} - ${LIBGLOG_LIBRARY} - ${GFLAGS_LIBRARIES} + ${EXTERNAL_LIBS} ${CMAKE_THREAD_LIBS_INIT} - ${CBLAS_LIBS} - ${ZLIB_LIBRARIES} - ${INTERAL_LIBS} - ${CMAKE_DL_LIBS}) - - if(WITH_RDMA) - target_link_libraries(${TARGET_NAME} - ${RDMA_LD_FLAGS} - ${RDMA_LIBS}) - endif() + ${CMAKE_DL_LIBS} + ${RDMA_LD_FLAGS} + ${RDMA_LIBS}) if(WITH_PYTHON) target_link_libraries(${TARGET_NAME} - ${PYTHON_LIBRARIES}) + ${PYTHON_LIBRARIES} util) endif() if(WITH_GPU) @@ -141,11 +120,7 @@ function(link_paddle_exe TARGET_NAME) target_link_libraries(${TARGET_NAME} rt) endif() endif() - - if(NOT WITH_DSO) - target_link_libraries(${TARGET_NAME} - ${WARPCTC_LIBRARY}) - endif() + add_dependencies(${TARGET_NAME} ${external_project_dependencies}) endfunction() # link_paddle_test diff --git a/cmake/version.cmake b/cmake/version.cmake index a0518e07e88a1ff468c301523f888c7d95e15185..ac1583a24c828629c46cb9cf4e965f8da2273732 100644 --- a/cmake/version.cmake +++ b/cmake/version.cmake @@ -21,4 +21,5 @@ while ("${PADDLE_VERSION}" STREQUAL "") endif() endwhile() +add_definitions(-DPADDLE_VERSION=${PADDLE_VERSION}) message(STATUS "Paddle version is ${PADDLE_VERSION}") diff --git a/paddle/api/CMakeLists.txt b/paddle/api/CMakeLists.txt index da6dad10cd807654f9ddd03beeb29cef69fc8de0..3ac50e34bb434b14d346f1c4707084f93461284d 100644 --- a/paddle/api/CMakeLists.txt +++ b/paddle/api/CMakeLists.txt @@ -1,3 +1,21 @@ +FUNCTION(generate_python_api target_name) + ADD_CUSTOM_COMMAND(OUTPUT ${PROJ_ROOT}/paddle/py_paddle/swig_paddle.py + ${PROJ_ROOT}/paddle/Paddle_wrap.cxx + ${PROJ_ROOT}/paddle/Paddle_wrap.h + COMMAND ${SWIG_EXECUTABLE} -python -c++ -outcurrentdir -I../ api/Paddle.swig + && mv ${PROJ_ROOT}/paddle/swig_paddle.py ${PROJ_ROOT}/paddle/py_paddle/swig_paddle.py + DEPENDS ${PROJ_ROOT}/paddle/api/Paddle.swig + ${PROJ_ROOT}/paddle/api/PaddleAPI.h + ${external_project_dependencies} + WORKING_DIRECTORY ${PROJ_ROOT}/paddle + COMMENT "Generate Python API from swig") + ADD_CUSTOM_TARGET(${target_name} ALL DEPENDS + ${PROJ_ROOT}/paddle/Paddle_wrap.cxx + ${PROJ_ROOT}/paddle/Paddle_wrap.h + ${PROJ_ROOT}/paddle/py_paddle/swig_paddle.py + ${external_project_dependencies}) +ENDFUNCTION(generate_python_api) + set(API_SOURCES Arguments.cpp ConfigParser.cpp @@ -42,7 +60,7 @@ file(GLOB PY_PADDLE_PYTHON_FILES ${PROJ_ROOT}/paddle/py_paddle/*.py) # TODO(yuyang18) : make wheel name calculated by cmake add_custom_command(OUTPUT ${PROJ_ROOT}/paddle/dist/.timestamp - COMMAND ${PYTHON_EXECUTABLE} setup.py bdist_wheel + COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py bdist_wheel COMMAND ${CMAKE_COMMAND} -E touch dist/.timestamp COMMAND rm -rf py_paddle.egg-info build WORKING_DIRECTORY ${PROJ_ROOT}/paddle @@ -76,5 +94,17 @@ add_dependencies(python_api_wheel python_swig_sources paddle_cuda) if(WITH_TESTING) + SET(PIP_SOURCES_DIR ${PYTHON_SOURCES_DIR}/pip) + ExternalProject_Add(pip + ${EXTERNAL_PROJECT_LOG_ARGS} + GIT_REPOSITORY https://github.com/pypa/pip.git + GIT_TAG 9.0.1 + PREFIX ${PIP_SOURCES_DIR} + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py install + BUILD_IN_SOURCE 1 + DEPENDS python setuptools python_api_wheel + ) add_subdirectory(test) endif() diff --git a/paddle/api/paddle_api_config.py.in b/paddle/api/paddle_api_config.py.in index 23542b952b7699d66cf64b47d0354e9078ae06d9..e11ee920362aed3ec79a2e62d447d7dde4a99248 100644 --- a/paddle/api/paddle_api_config.py.in +++ b/paddle/api/paddle_api_config.py.in @@ -1,17 +1,17 @@ PADDLE_BUILD_DIR="@CMAKE_CURRENT_BINARY_DIR@/../" WITH_GPU="@WITH_GPU@" -PROTOBUF_LIB="@PROTOBUF_LIBRARY@" -ZLIB_LIB="@ZLIB_LIBRARIES@" +PROTOBUF_LIBRARY="@PROTOBUF_LIBRARY@" +ZLIB_LIBRARIES="@ZLIB_LIBRARIES@" CMAKE_THREAD_LIB="@CMAKE_THREAD_LIBS_INIT@" CMAKE_DL_LIBS="@CMAKE_DL_LIBS@" WITH_PYTHON="@WITH_PYTHON@" PYTHON_LIBRARIES="@PYTHON_LIBRARIES@" -LIBGLOG_LIBRARY="@LIBGLOG_LIBRARY@" +GLOG_LIBRARIES="@GLOG_LIBRARIES@" GFLAGS_LIBRARIES="@GFLAGS_LIBRARIES@" GFLAGS_LOCATION="@GFLAGS_LOCATION@" -CBLAS_LIBRARIES="@CBLAS_LIBS@" +CBLAS_LIBRARIES="@CBLAS_LIBRARIES@" -CUDA_LIBRARIES="@CUDA_LIBRARIES@" +CUDA_LIBRARIES="@CUDA_cudart_shared_LIBRARY@" WITH_COVERALLS="@ON_COVERALLS@" diff --git a/paddle/api/paddle_ld_flags.py b/paddle/api/paddle_ld_flags.py index b4d27b1cc728f92b2210f30b69f3f5899fe81d65..ad5dce209bf8e14120320a58c3cd85d6f6a97688 100644 --- a/paddle/api/paddle_ld_flags.py +++ b/paddle/api/paddle_ld_flags.py @@ -40,14 +40,14 @@ try: self.paddle_build_dir = PADDLE_BUILD_DIR self.paddle_build_dir = os.path.abspath(self.paddle_build_dir) self.with_gpu = PaddleLDFlag.cmake_bool(WITH_GPU) - self.protolib = PROTOBUF_LIB - self.zlib = ZLIB_LIB + self.protolib = PROTOBUF_LIBRARY + self.zlib = ZLIB_LIBRARIES self.thread = CMAKE_THREAD_LIB self.dl_libs = CMAKE_DL_LIBS self.with_python = PaddleLDFlag.cmake_bool(WITH_PYTHON) self.python_libs = PYTHON_LIBRARIES - self.glog_libs = LIBGLOG_LIBRARY + self.glog_libs = GLOG_LIBRARIES self.with_coverage = PaddleLDFlag.cmake_bool(WITH_COVERALLS) self.gflags_libs = GFLAGS_LIBRARIES diff --git a/paddle/api/test/CMakeLists.txt b/paddle/api/test/CMakeLists.txt index 08a0fe96a004d38b81d0bac881da1faeb52685f4..a2fa623c80087d42e6a2a5c05f62eba4997f8ec4 100644 --- a/paddle/api/test/CMakeLists.txt +++ b/paddle/api/test/CMakeLists.txt @@ -1,2 +1,2 @@ add_test(NAME test_swig_api - COMMAND bash ${PROJ_ROOT}/paddle/api/test/run_tests.sh) + COMMAND bash ${PROJ_ROOT}/paddle/api/test/run_tests.sh ${PYTHON_EXECUTABLE}) diff --git a/paddle/api/test/run_tests.sh b/paddle/api/test/run_tests.sh index 2f12ba026430ba7adb6f4dee11ed17ea3ad3f36d..bcf06afa86aaa1a3151aeb966b54f69657c541e3 100755 --- a/paddle/api/test/run_tests.sh +++ b/paddle/api/test/run_tests.sh @@ -20,11 +20,7 @@ popd > /dev/null cd $SCRIPTPATH -rm -rf .test_env -virtualenv .test_env -source .test_env/bin/activate - -pip --timeout 600 install ../../dist/*.whl +$1 -m pip install ../../dist/*.whl test_list="testArguments.py testGradientMachine.py testMatrix.py testVector.py testTrain.py testTrainer.py" @@ -33,7 +29,7 @@ export PYTHONPATH=$PWD/../../../python/ for fn in $test_list do echo "test $fn" - python $fn + $1 $fn if [ $? -ne 0 ]; then exit 1 fi diff --git a/paddle/cuda/CMakeLists.txt b/paddle/cuda/CMakeLists.txt index aa1ff4a771c4a1c64be86893e7b2261ae65f0f94..57fb89608f4bcf3e6829fe850a61c2a626adfbdc 100755 --- a/paddle/cuda/CMakeLists.txt +++ b/paddle/cuda/CMakeLists.txt @@ -88,6 +88,8 @@ else() ${CUDA_CXX_SOURCES}) endif() +add_dependencies(paddle_cuda ${external_project_dependencies}) + add_style_check_target(paddle_cuda ${CUDA_SOURCES} ${CUDA_HEADERS} diff --git a/paddle/cuda/include/hl_warpctc_wrap.h b/paddle/cuda/include/hl_warpctc_wrap.h index 79bf6c3db7f876009d98a62b6523588f021886e8..7885ae570148c0b9870089baf22b6bacb786f995 100644 --- a/paddle/cuda/include/hl_warpctc_wrap.h +++ b/paddle/cuda/include/hl_warpctc_wrap.h @@ -15,8 +15,8 @@ limitations under the License. */ #ifndef HL_WARPCTC_WRAP_H_ #define HL_WARPCTC_WRAP_H_ +#include "ctc.h" #include "hl_base.h" -#include "warp-ctc/include/ctc.h" typedef ctcStatus_t hl_warpctc_status_t; typedef ctcOptions hl_warpctc_options_t; diff --git a/paddle/function/CMakeLists.txt b/paddle/function/CMakeLists.txt index 42a9bd470c16dbfab6b6e6a6b713e471160514bd..de85eeca821742e1d39d5ce26f873238d4359cba 100644 --- a/paddle/function/CMakeLists.txt +++ b/paddle/function/CMakeLists.txt @@ -10,6 +10,8 @@ if(WITH_GPU) endif() add_library(paddle_function STATIC ${cpp_files} ${cu_objs}) +add_dependencies(paddle_function ${external_project_dependencies}) + if(WITH_GPU) if(WITH_TESTING) diff --git a/paddle/function/ContextProjectionOp.cpp b/paddle/function/ContextProjectionOp.cpp index bd367a859e10c0522206cd0215970922905905ed..07907fc1ba7973c728c3a882e4be6b1a7ef7a97a 100644 --- a/paddle/function/ContextProjectionOp.cpp +++ b/paddle/function/ContextProjectionOp.cpp @@ -85,15 +85,15 @@ public: void calc(const Arguments& inputs, const Arguments& outputs, const Arguments& inouts) override { - CHECK_EQ(3, inputs.size()); - CHECK_EQ(1, outputs.size()); - CHECK_EQ(0, inouts.size()); + CHECK_EQ(3, static_cast(inputs.size())); + CHECK_EQ(1, static_cast(outputs.size())); + CHECK_EQ(0, static_cast(inouts.size())); CHECK(outputs[0].getData() && inputs[0].getData() && inputs[2].getData()); - CHECK_EQ(outputs[0].dims_.size(), 2); - CHECK_EQ(inputs[0].dims_.size(), 2); - CHECK_EQ(inputs[1].dims_.size(), 2); - CHECK_EQ(inputs[2].dims_.size(), 1); + CHECK_EQ(static_cast(outputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[1].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[2].dims_.size()), 1); /// dim of output = dim of input * context_length CHECK_EQ(outputs[0].dims_[1], inputs[0].dims_[1] * context_length_); /// dim of input == dim of weight @@ -202,15 +202,15 @@ public: void calc(const Arguments& inputs, const Arguments& outputs, const Arguments& inouts) override { - CHECK_EQ(3, inputs.size()); - CHECK_EQ(1, outputs.size()); - CHECK_EQ(0, inouts.size()); + CHECK_EQ(3, static_cast(inputs.size())); + CHECK_EQ(1, static_cast(outputs.size())); + CHECK_EQ(0, static_cast(inouts.size())); CHECK(outputs[0].getData() && inputs[2].getData()); - CHECK_EQ(outputs[0].dims_.size(), 2); - CHECK_EQ(inputs[0].dims_.size(), 2); - CHECK_EQ(inputs[1].dims_.size(), 2); - CHECK_EQ(inputs[2].dims_.size(), 1); + CHECK_EQ(static_cast(outputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[1].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[2].dims_.size()), 1); /// dim of input == dim of weight CHECK_EQ(inputs[0].dims_[1], inputs[1].dims_[1]); @@ -269,13 +269,13 @@ public: void calc(const Arguments& inputs, const Arguments& outputs, const Arguments& inouts) override { - CHECK_EQ(2, inputs.size()); - CHECK_EQ(1, outputs.size()); - CHECK_EQ(0, inouts.size()); + CHECK_EQ(2, static_cast(inputs.size())); + CHECK_EQ(1, static_cast(outputs.size())); + CHECK_EQ(0, static_cast(inouts.size())); CHECK(inputs[0].getData() && outputs[0].getData() && inputs[1].getData()); - CHECK_EQ(outputs[0].dims_.size(), 2); - CHECK_EQ(inputs[0].dims_.size(), 2); - CHECK_EQ(inputs[1].dims_.size(), 1); + CHECK_EQ(static_cast(outputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[1].dims_.size()), 1); CHECK_EQ(outputs[0].dims_[1], inputs[0].dims_[1] * context_length_); /// input and output has the same batch_size CHECK_EQ(inputs[0].dims_[0], outputs[0].dims_[0]); @@ -317,14 +317,14 @@ public: void calc(const Arguments& inputs, const Arguments& outputs, const Arguments& inouts) override { - CHECK_EQ(2, inputs.size()); - CHECK_EQ(1, outputs.size()); - CHECK_EQ(0, inouts.size()); + CHECK_EQ(2, static_cast(inputs.size())); + CHECK_EQ(1, static_cast(outputs.size())); + CHECK_EQ(0, static_cast(inouts.size())); CHECK(inputs[0].getData() && outputs[0].getData() && inputs[1].getData()); - CHECK_EQ(outputs[0].dims_.size(), 2); - CHECK_EQ(inputs[0].dims_.size(), 2); - CHECK_EQ(inputs[1].dims_.size(), 1); + CHECK_EQ(static_cast(outputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[0].dims_.size()), 2); + CHECK_EQ(static_cast(inputs[1].dims_.size()), 1); CHECK_EQ(outputs[0].dims_[1], inputs[0].dims_[1] * context_length_); auto out_grad_mat = std::make_shared::type>( diff --git a/paddle/function/CrossMapNormalOp.cpp b/paddle/function/CrossMapNormalOp.cpp index f13eb78d27d900064f8cf0dc4194d1e34ded2b14..96a7a30eebbf0f01fa89ea91110ddb826fd2f64b 100644 --- a/paddle/function/CrossMapNormalOp.cpp +++ b/paddle/function/CrossMapNormalOp.cpp @@ -128,11 +128,11 @@ public: void calc(const Arguments& inputs, const Arguments& outputs, const Arguments& inouts) override { - CHECK_EQ(1, inputs.size()); - CHECK_EQ(2, outputs.size()); - CHECK_EQ(0, inouts.size()); + CHECK_EQ(1, static_cast(inputs.size())); + CHECK_EQ(2, static_cast(outputs.size())); + CHECK_EQ(0, static_cast(inouts.size())); - CHECK_EQ(inputs[0].dims_.size(), 4); + CHECK_EQ(static_cast(inputs[0].dims_.size()), 4); for (size_t i = 0; i < inputs[0].dims_.size(); i++) { CHECK_EQ(inputs[0].dims_[i], outputs[0].dims_[i]); CHECK_EQ(inputs[0].dims_[i], outputs[1].dims_[i]); @@ -180,11 +180,11 @@ public: void calc(const Arguments& inputs, const Arguments& outputs, const Arguments& inouts) override { - CHECK_EQ(4, inputs.size()); - CHECK_EQ(1, outputs.size()); - CHECK_EQ(0, inouts.size()); + CHECK_EQ(4, static_cast(inputs.size())); + CHECK_EQ(1, static_cast(outputs.size())); + CHECK_EQ(0, static_cast(inouts.size())); - CHECK_EQ(inputs[0].dims_.size(), 4); + CHECK_EQ(static_cast(inputs[0].dims_.size()), 4); for (size_t i = 0; i < inputs[0].dims_.size(); i++) { CHECK_EQ(inputs[0].dims_[i], inputs[1].dims_[i]); CHECK_EQ(inputs[0].dims_[i], inputs[2].dims_[i]); diff --git a/paddle/function/Function.cpp b/paddle/function/Function.cpp index 6f82a8d053bc203eed44bd0d8d4c47d23a15268d..614e76b8ac0c9a9145a27f5b532ea63bef7f90f0 100644 --- a/paddle/function/Function.cpp +++ b/paddle/function/Function.cpp @@ -46,28 +46,32 @@ bool FuncConfig::get(const std::string& key) const { template <> FuncConfig& FuncConfig::set(const std::string& key, size_t v) { - CHECK_EQ(valueMap_.count(key), 0) << "Duplicated value: " << key; + CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " + << key; valueMap_[key].s = v; return *this; } template <> FuncConfig& FuncConfig::set(const std::string& key, real v) { - CHECK_EQ(valueMap_.count(key), 0) << "Duplicated value: " << key; + CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " + << key; valueMap_[key].r = v; return *this; } template <> FuncConfig& FuncConfig::set(const std::string& key, int v) { - CHECK_EQ(valueMap_.count(key), 0) << "Duplicated value: " << key; + CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " + << key; valueMap_[key].i = v; return *this; } template <> FuncConfig& FuncConfig::set(const std::string& key, bool v) { - CHECK_EQ(valueMap_.count(key), 0) << "Duplicated value: " << key; + CHECK_EQ(static_cast(valueMap_.count(key)), 0) << "Duplicated value: " + << key; valueMap_[key].b = v; return *this; } diff --git a/paddle/gserver/layers/ContextProjection.cpp b/paddle/gserver/layers/ContextProjection.cpp index e947b2b9ecbebda11db5c049e1606a2d5926c28c..ee4db219890a135d786c46827632d02d1db5b760 100644 --- a/paddle/gserver/layers/ContextProjection.cpp +++ b/paddle/gserver/layers/ContextProjection.cpp @@ -111,7 +111,8 @@ void ContextProjection::forward() { size_t dim = out_->value->getWidth(); CHECK_EQ(dim, input_dim * config_.context_length()); size_t batch_size = in_->value->getHeight(); - CHECK_EQ(forward_.size(), 1) << "Only one forward function here"; + CHECK_EQ(static_cast(forward_.size()), 1) + << "Only one forward function here"; REGISTER_TIMER_INFO("ContextProjectionForward", getName().c_str()); bool is_padding = config_.trainable_padding(); @@ -154,7 +155,8 @@ void ContextProjection::backward(const UpdateCallback& callback) { CHECK_EQ(dim, input_dim * config_.context_length()); size_t batch_size = in_->value->getHeight(); CHECK_EQ(batch_size, out_->value->getHeight()); - CHECK_EQ(backward_.size(), 1) << "Only one backward function here"; + CHECK_EQ(static_cast(backward_.size()), 1) + << "Only one backward function here"; REGISTER_TIMER_INFO("ContextProjectionBackward", getName().c_str()); bool is_padding = config_.trainable_padding(); diff --git a/paddle/gserver/layers/ConvProjection.cpp b/paddle/gserver/layers/ConvProjection.cpp index e1c4b91ace21522a3bc640dfc4eaa1a42668ed02..0281170bc59855f6f4d2f4212523275a92d202d5 100644 --- a/paddle/gserver/layers/ConvProjection.cpp +++ b/paddle/gserver/layers/ConvProjection.cpp @@ -130,7 +130,8 @@ void ConvProjection::reshapeTensorDesc(int batchSize) { void ConvProjection::reshape(int batchSize) { size_t width = calOutputSize(); CHECK_EQ(width, out_->value->getWidth()); - CHECK_EQ(channels_ * imageH_ * imageW_, in_->value->getWidth()) + CHECK_EQ(static_cast(channels_ * imageH_ * imageW_), + in_->value->getWidth()) << "Wrong input size for convolution" << " channels=" << channels_ << " imageH=" << imageH_ << " imageW=" << imageW_ << " inputSize=" << in_->value->getWidth(); diff --git a/paddle/gserver/tests/CMakeLists.txt b/paddle/gserver/tests/CMakeLists.txt index 6775563b2ba9a443069b3923b25c5682988babea..0caa5e1e11e6d42fadfa87149814c4b77b3b6271 100644 --- a/paddle/gserver/tests/CMakeLists.txt +++ b/paddle/gserver/tests/CMakeLists.txt @@ -80,7 +80,7 @@ if(NOT WITH_DOUBLE) test_WarpCTCLayer.cpp) add_test(NAME test_WarpCTCLayer - COMMAND ${CMAKE_CURRENT_BINARY_DIR}/test_WarpCTCLayer --warpctc_dir=${PROJ_ROOT}/warp-ctc/build + COMMAND ${CMAKE_CURRENT_BINARY_DIR}/test_WarpCTCLayer --warpctc_dir=${WARPCTC_LIB_DIR} WORKING_DIRECTORY ${PROJ_ROOT}/paddle) endif() diff --git a/paddle/gserver/tests/LayerGradUtil.cpp b/paddle/gserver/tests/LayerGradUtil.cpp index 57c176810fddf96828c210807673b7d1a3c739c0..ae016e74eaa84f7c43a30c09c8c4577e25360c4e 100644 --- a/paddle/gserver/tests/LayerGradUtil.cpp +++ b/paddle/gserver/tests/LayerGradUtil.cpp @@ -310,7 +310,7 @@ void initDataLayer(TestConfig testConf, testConf.inputDefs[i].labelSeqStartPositions; if (labelSeqStartPositions.size() != 0) { CHECK(!sequenceStartPositions); - CHECK_GE(labelSeqStartPositions.size(), 2); + CHECK_GE(static_cast(labelSeqStartPositions.size()), 2); sequenceStartPositions = ICpuGpuVector::create(labelSeqStartPositions.size(), useGpu); diff --git a/paddle/gserver/tests/test_BatchNorm.cpp b/paddle/gserver/tests/test_BatchNorm.cpp index 822db5a3c40a80bfed4d06f8f28d21253ae44f87..d07299bfe3c4147742384a45dc6f1698d9c382f4 100644 --- a/paddle/gserver/tests/test_BatchNorm.cpp +++ b/paddle/gserver/tests/test_BatchNorm.cpp @@ -114,8 +114,8 @@ TEST(Layer, batchNorm) { bnLayer->forward(PASS_GC); convLayer->forward(PASS_GC); - CHECK_EQ(convLayer->getOutputValue()->getHeight(), 100); - CHECK_EQ(convLayer->getOutputValue()->getWidth(), 576); + CHECK_EQ(static_cast(convLayer->getOutputValue()->getHeight()), 100); + CHECK_EQ(static_cast(convLayer->getOutputValue()->getWidth()), 576); } int main(int argc, char** argv) { diff --git a/paddle/gserver/tests/test_PyDataProvider2.cpp b/paddle/gserver/tests/test_PyDataProvider2.cpp index 5f8bc5ecd0f77efc6dcda0330f124ca6cab7f277..7e193eb31a03e6a6b8b0b02e89608a0e02b9e248 100644 --- a/paddle/gserver/tests/test_PyDataProvider2.cpp +++ b/paddle/gserver/tests/test_PyDataProvider2.cpp @@ -293,7 +293,7 @@ TEST(PyDataProvider2, can_over_batch_size) { while (true) { int64_t realBatchSize = provider->getNextBatchInternal(batchSize, &batch); if (realBatchSize) { - CHECK_LE(realBatchSize, batchSize); + CHECK_LE(static_cast(realBatchSize), batchSize); } else { break; } diff --git a/paddle/scripts/travis/before_install.linux.sh b/paddle/scripts/travis/before_install.linux.sh deleted file mode 100755 index 9620bff6bcf77c6e87f149e8e33408170dd8e507..0000000000000000000000000000000000000000 --- a/paddle/scripts/travis/before_install.linux.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -set -e -pip install protobuf -cd /tmp -wget https://github.com/google/protobuf/archive/v3.0.2.tar.gz -O protobuf.tar.gz -tar xf protobuf.tar.gz -cd protobuf* -./autogen.sh -./configure --prefix=/usr/ -make -j 2 install -cd .. -rm -rf protobuf* - -pushd /usr/src/gtest -cmake . -make -sudo cp *.a /usr/lib -popd diff --git a/paddle/scripts/travis/before_install.osx.sh b/paddle/scripts/travis/before_install.osx.sh index bd88ed39132f19ca7cfc4f0dd6acdbc6b83e94ab..fd113d313e3140ad11460c1c288927b08fea88c4 100755 --- a/paddle/scripts/travis/before_install.osx.sh +++ b/paddle/scripts/travis/before_install.osx.sh @@ -1,12 +1,4 @@ #!/bin/bash brew update brew tap homebrew/science -brew install python -sudo pip install --upgrade protobuf -brew install cmake python glog gflags openblas wget md5sha1sum protobuf - -wget https://github.com/google/googletest/archive/release-1.8.0.tar.gz -O gtest.tar.gz -tar xf gtest.tar.gz -cd googletest-release-1.8.0/ -cmake . -make install +brew install openblas md5sha1sum diff --git a/paddle/scripts/travis/build_and_test.sh b/paddle/scripts/travis/build_and_test.sh index 9caeb21beb15ee5281f9a6aefcfd59b94b91e48a..ffc48eae66aa615aab1ac007f8987ba6aba3ed8f 100755 --- a/paddle/scripts/travis/build_and_test.sh +++ b/paddle/scripts/travis/build_and_test.sh @@ -1,27 +1,26 @@ #!/bin/bash -./build_submodules.sh source ./common.sh -CMAKE_EXTRA="" + +python -c 'import pip; print(pip.pep425tags.get_supported())' + if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then - CMAKE_EXTRA="-DPYTHON_LIBRARY=/usr/local/Cellar/python/2.7.12_1/Frameworks/Python.framework/Versions/2.7/lib/python2.7/config/libpython2.7.dylib" + CMAKE_EXTRA="-DWITH_SWIG_PY=OFF" else CMAKE_EXTRA="-DWITH_SWIG_PY=ON" fi - -cmake .. -DCMAKE_BUILD_TYPE=Debug -DWITH_GPU=OFF -DWITH_DOC=OFF -DWITH_TESTING=ON -DON_TRAVIS=ON -DON_COVERALLS=ON ${CMAKE_EXTRA} +cmake .. -DWITH_GPU=OFF -DWITH_DOC=OFF -DWITH_TESTING=ON -DON_TRAVIS=ON -DON_COVERALLS=ON ${CMAKE_EXTRA} NPROC=1 if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then NRPOC=`nproc` make -j $NPROC make coveralls + sudo make install elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then NPROC=`sysctl -n hw.ncpu` make -j $NPROC env CTEST_OUTPUT_ON_FAILURE=1 make test ARGS="-j $NPROC" + sudo make install + sudo paddle version fi - - -sudo make install -sudo paddle version diff --git a/paddle/scripts/travis/build_submodules.sh b/paddle/scripts/travis/build_submodules.sh deleted file mode 100755 index d458bf92bf455609de601c60402101d09765dfe4..0000000000000000000000000000000000000000 --- a/paddle/scripts/travis/build_submodules.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -set -e -WORK_DIR=$PWD -PROJ_ROOT=$(git rev-parse --show-cdup) -SUBMODULES=$(grep path ${PROJ_ROOT}.gitmodules | sed 's/^.*path = //') - -for module in $SUBMODULES -do - case $module in - "warp-ctc") - if [ -d ${PROJ_ROOT}warp-ctc/build ]; then - rm -rf ${PROJ_ROOT}warp-ctc/build - fi - mkdir ${PROJ_ROOT}warp-ctc/build - cd ${PROJ_ROOT}warp-ctc/build - cmake ..; make - ;; - esac -done -cd $WORK_DIR diff --git a/paddle/setup.py.in b/paddle/setup.py.in index 464ad632868bd1fd4d88547212421302ca0b2116..e3650bf1c0c4692a50e9731fcd8b832865eaac62 100644 --- a/paddle/setup.py.in +++ b/paddle/setup.py.in @@ -14,7 +14,9 @@ # This file is used to build paddle python binding package. # It will be invoked by Makefile that generated by COMAKE + from setuptools import setup, Extension + import numpy as np import api.paddle_ld_flags import platform diff --git a/paddle/trainer/tests/CMakeLists.txt b/paddle/trainer/tests/CMakeLists.txt index 28c3d6f2631f9e28e3f1ff086b1e8edf994e73a4..22e07bd0e98a4cd36e6ed5860bcff0d4ae7cb1d2 100644 --- a/paddle/trainer/tests/CMakeLists.txt +++ b/paddle/trainer/tests/CMakeLists.txt @@ -17,9 +17,10 @@ add_test(NAME test_Compare ################# test_Trainer ########################### add_unittest_without_exec(test_Trainer test_Trainer.cpp) -set(diy_dll_dir ${CMAKE_CURRENT_BINARY_DIR}/../../gserver/tests) add_test(NAME test_Trainer COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ + ${PYTHON_EXECUTABLE} ${PROJ_ROOT}/paddle/trainer/tests/gen_proto_data.py && + ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ ${CMAKE_CURRENT_BINARY_DIR}/test_Trainer WORKING_DIRECTORY ${PROJ_ROOT}/paddle/) @@ -82,5 +83,5 @@ add_test(NAME test_PyDataProviderWrapper #################### test_config_parser ######################### add_test(NAME test_config_parser COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ - python ${PROJ_ROOT}/paddle/trainer/tests/config_parser_test.py + ${PYTHON_EXECUTABLE} ${PROJ_ROOT}/paddle/trainer/tests/config_parser_test.py WORKING_DIRECTORY ${PROJ_ROOT}/paddle/) diff --git a/paddle/trainer/tests/test_Trainer.cpp b/paddle/trainer/tests/test_Trainer.cpp index 371282dd6bb9a995bc6ae8b2a5bd708f831d7e33..264bc46ebcd0aa17fd605e537fcb2c316ef31162 100644 --- a/paddle/trainer/tests/test_Trainer.cpp +++ b/paddle/trainer/tests/test_Trainer.cpp @@ -96,11 +96,6 @@ TEST(checkGradient, multi) { TEST(checkGradient, hsigmoid) { checkGradientTest(configFile2, false, false); } TEST(checkGradient, chunk) { -#if defined(__APPLE__) || defined(__OSX__) - EXPECT_EQ(0, system("python trainer/tests/gen_proto_data.py")); -#else - EXPECT_EQ(0, system("python2 trainer/tests/gen_proto_data.py")); -#endif checkGradientTest(configFile3, false, false); #ifndef PADDLE_ONLY_CPU checkGradientTest(configFile3, true, true); diff --git a/paddle/utils/.gitignore b/paddle/utils/.gitignore index f2cfd7409412de68f4183daebcb48e7a3ae37672..956b606a18cae1bb11322accfa174ae5ce1580de 100644 --- a/paddle/utils/.gitignore +++ b/paddle/utils/.gitignore @@ -1 +1,2 @@ enable_virtualenv.c +PythonUtil.cpp diff --git a/paddle/utils/CMakeLists.txt b/paddle/utils/CMakeLists.txt index 45240b5002aa18be4a9b7e3ec3b754eb83ca0e09..10d906ee16656a808122b81d8b2fef55b8e7b7e9 100644 --- a/paddle/utils/CMakeLists.txt +++ b/paddle/utils/CMakeLists.txt @@ -1,5 +1,7 @@ # The utilities for paddle +configure_file(PythonUtil.cpp.in ${PROJ_ROOT}/paddle/utils/PythonUtil.cpp) + file(GLOB UTIL_HEADERS . *.h) file(GLOB UTIL_SOURCES . *.cpp) create_resources(enable_virtualenv.py enable_virtualenv.c) diff --git a/paddle/utils/PythonUtil.cpp b/paddle/utils/PythonUtil.cpp.in similarity index 98% rename from paddle/utils/PythonUtil.cpp rename to paddle/utils/PythonUtil.cpp.in index 7faeff55c28b9065179ad27b3b604a9f411249e5..e0caaf4cd6cf429e57ee221a0b0957a905b89973 100644 --- a/paddle/utils/PythonUtil.cpp +++ b/paddle/utils/PythonUtil.cpp.in @@ -195,6 +195,8 @@ extern const char enable_virtualenv_py[]; } void initPython(int argc, char** argv) { #ifndef PADDLE_NO_PYTHON + char PythonHome[] = "@PYTHON_INSTALL_DIR@"; // NOLINT + Py_SetPythonHome(PythonHome); Py_SetProgramName(argv[0]); Py_Initialize(); PySys_SetArgv(argc, argv); diff --git a/proto/CMakeLists.txt b/proto/CMakeLists.txt index 2c40070eca44d8656d7ce82157a1b840092b9965..e854b2b427e550ec491dacf931cc2d2cce7ba6c2 100644 --- a/proto/CMakeLists.txt +++ b/proto/CMakeLists.txt @@ -18,10 +18,10 @@ foreach(filename ${proto_filenames}) ${PROTO_GEN} ${CUR_PROTO_GEN}) add_custom_command(OUTPUT ${CUR_PROTO_GEN} - COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} + COMMAND env ${py_env} ${PROTOBUF_PROTOC_EXECUTABLE} --cpp_out ${CMAKE_CURRENT_BINARY_DIR} - --proto_path ${PROJ_ROOT}/proto ${PROJ_ROOT}/proto/${filename} - DEPENDS ${filename}) + --proto_path ${PROJ_ROOT}/proto ${PROJ_ROOT}/proto/${filename} + DEPENDS ${filename} ${external_project_dependencies}) set(CUR_PROTO_GEN_PY ${PROJ_ROOT}/paddle/python/paddle/proto/${base_filename}_pb2.py) @@ -29,9 +29,9 @@ foreach(filename ${proto_filenames}) ${CUR_PROTO_GEN_PY} ${PROTO_GEN_PY}) add_custom_command(OUTPUT ${CUR_PROTO_GEN_PY} - COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --python_out ${PROJ_ROOT}/python/paddle/proto - --proto_path ${PROJ_ROOT}/proto ${PROJ_ROOT}/proto/${filename} - DEPENDS ${filename}) + COMMAND env ${py_env} ${PROTOBUF_PROTOC_EXECUTABLE} --python_out ${PROJ_ROOT}/python/paddle/proto + --proto_path ${PROJ_ROOT}/proto ${PROJ_ROOT}/proto/${filename} + DEPENDS ${filename} ${external_project_dependencies}) endforeach() include_directories(${CMAKE_CURRENT_BINARY_DIR}/proto) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index dce0b909524369926eda54763e571706b79daeaf..1cda4762eb2a55175d6c9faee98aaeaa1f763890 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -10,26 +10,17 @@ set(PY_FILES paddle/__init__.py ${HELPERS_PY_FILES} ${UTILS_PY_FILES}) -set(PADDLE_INTERNAL_PACKAGE "") -if (PADDLE_WITH_INTERNAL) - set(PADDLE_INTERNAL_PACKAGE "paddle.internals") -endif() - configure_file(${CMAKE_CURRENT_SOURCE_DIR}/setup.py.in ${CMAKE_CURRENT_BINARY_DIR}/setup.py) add_custom_command(OUTPUT ${OUTPUT_DIR}/.timestamp - COMMAND ${PYTHON_EXECUTABLE} setup.py bdist_wheel + COMMAND env ${py_env} ${PYTHON_EXECUTABLE} setup.py bdist_wheel COMMAND ${CMAKE_COMMAND} -E touch ${OUTPUT_DIR}/.timestamp - DEPENDS gen_proto_py ${PY_FILES}) + DEPENDS gen_proto_py ${PY_FILES} ${external_project_dependencies}) add_custom_target(paddle_python ALL DEPENDS ${OUTPUT_DIR}/.timestamp) -find_python_module(pip REQUIRED) -find_python_module(wheel REQUIRED) -find_python_module(google.protobuf REQUIRED) - add_subdirectory(paddle/trainer_config_helpers/tests) install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/dist/ diff --git a/python/paddle/trainer_config_helpers/tests/CMakeLists.txt b/python/paddle/trainer_config_helpers/tests/CMakeLists.txt index d1a9843d326669711bf3d0769df1b804cfcfa673..403aafabe9143472dd2f0857ecd25f7acf515b6c 100644 --- a/python/paddle/trainer_config_helpers/tests/CMakeLists.txt +++ b/python/paddle/trainer_config_helpers/tests/CMakeLists.txt @@ -1,12 +1,12 @@ #################### test_config_parser ######################### add_test(NAME layers_test COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ - python ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/layers_test.py + ${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/layers_test.py WORKING_DIRECTORY ${PROJ_ROOT}/python/paddle) add_test(NAME test_reset_hook COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python/ - python ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/test_reset_hook.py + ${PYTHON_EXECUTABLE} ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/test_reset_hook.py WORKING_DIRECTORY ${PROJ_ROOT}/python/paddle) if (PROTOBUF_3) @@ -14,12 +14,12 @@ if (PROTOBUF_3) ProtobufEqualMain.cpp) add_test(NAME test_layerHelpers COMMAND - ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh + ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/protobuf_equal ) else() add_test(NAME test_layerHelpers COMMAND - ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh + ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh ${PYTHON_EXECUTABLE} ) endif() diff --git a/python/paddle/trainer_config_helpers/tests/configs/generate_protostr.sh b/python/paddle/trainer_config_helpers/tests/configs/generate_protostr.sh index a54af94ce3db4ed300dee697b30516c3b6448d7c..ee5961af75ebb33af52f9add645f793015288f4e 100755 --- a/python/paddle/trainer_config_helpers/tests/configs/generate_protostr.sh +++ b/python/paddle/trainer_config_helpers/tests/configs/generate_protostr.sh @@ -10,13 +10,13 @@ protostr=$PWD/protostr for conf in ${configs[*]} do echo "Generating " $conf - python -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unittest - cat ${conf}.py |python test_config_parser_for_non_file_config.py > $protostr/$conf.protostr.non_file_config.unittest + $1 -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unittest + cat ${conf}.py |$1 test_config_parser_for_non_file_config.py > $protostr/$conf.protostr.non_file_config.unittest done for conf in ${whole_configs[*]} do echo "Generating " $conf - python -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unittest - cat ${conf}.py |python test_config_parser_for_non_file_config.py --whole > $protostr/$conf.protostr.non_file_config.unittest + $1 -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unittest + cat ${conf}.py |$1 test_config_parser_for_non_file_config.py --whole > $protostr/$conf.protostr.non_file_config.unittest done diff --git a/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh b/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh index e984ee70625456241b3cfe6202fdadaa3807d33c..a37eb6439e6d2803a417883f0aed2a5d56d059b9 100755 --- a/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh +++ b/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh @@ -7,7 +7,7 @@ protostr=`dirname $0`/protostr files=`ls $protostr | grep -v "unittest"` -./generate_protostr.sh +./generate_protostr.sh $1 . ./file_list.sh diff --git a/python/setup.py.in b/python/setup.py.in index d2fb95f27ff2f0673050e699316dde504dbf28f6..b66a42e87c78701e9eb26b1b7dc8f46a95035a76 100644 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -1,16 +1,11 @@ from setuptools import setup -INTERNAL_PACKAGE='${PADDLE_INTERNAL_PACKAGE}' - packages=['paddle', 'paddle.proto', 'paddle.trainer', 'paddle.trainer_config_helpers', 'paddle.utils'] -if len(INTERNAL_PACKAGE) != 0: - packages.append(INTERNAL_PACKAGE) - setup(name='paddle', version='${PADDLE_VERSION}', description='Parallel Distributed Deep Learning', diff --git a/warp-ctc b/warp-ctc deleted file mode 160000 index bd535c8d44e03c8ebd2d768e06c8c05fdccd11d2..0000000000000000000000000000000000000000 --- a/warp-ctc +++ /dev/null @@ -1 +0,0 @@ -Subproject commit bd535c8d44e03c8ebd2d768e06c8c05fdccd11d2