提交 c62357e0 编写于 作者: L Liu Yiqun

Merge branch 'develop' into cmake_protobuf

......@@ -33,20 +33,18 @@ find_library(MKL_INTEL_LP64 NAMES mkl_intel_lp64 PATHS
${MKL_ROOT}/lib
${MKL_ROOT}/lib/intel64)
if(MKL_INC_DIR AND MKL_CORE_LIB AND MKL_SEQUENTIAL_LIB AND MKL_INTEL_LP64)
if(MKL_LAPACK_INC_DIR AND MKL_INC_DIR AND MKL_CORE_LIB AND MKL_SEQUENTIAL_LIB AND MKL_INTEL_LP64)
set(CBLAS_FOUND ON)
set(CBLAS_PROVIDER MKL)
set(CBLAS_INC_DIR ${MKL_INC_DIR})
set(CBLAS_LIBRARIES ${MKL_INTEL_LP64}
${MKL_SEQUENTIAL_LIB}
${MKL_CORE_LIB})
set(CBLAS_INC_DIR ${MKL_INC_DIR} ${MKL_LAPACK_INC_DIR})
set(CBLAS_LIBRARIES ${MKL_INTEL_LP64} ${MKL_SEQUENTIAL_LIB} ${MKL_CORE_LIB})
add_definitions(-DPADDLE_USE_MKL)
message(STATUS "Found MKL (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})")
set(CBLAS_FOUND ON)
if(${MKL_LAPACK_INC_DIR})
add_definitions(-DLAPACK_FOUND)
message(STATUS "Found MKL (include: ${MKL_INC_DIR}, library: ${CBLAS_LIBRARIES})")
message(STATUS "Found lapack in MKL (include: ${MKL_LAPACK_INC_DIR})")
endif()
return() # return file.
return()
endif()
## Then find atlas.
......@@ -68,20 +66,20 @@ find_path(ATLAS_CLAPACK_INC_DIR NAMES clapack.h
PATHS ${ATLAS_INCLUDE_SEARCH_PATHS})
find_library(ATLAS_CBLAS_LIB NAMES cblas libcblas.so.3
PATHS ${ATLAS_LIB_SEARCH_PATHS})
find_library(ATLAS_LIB NAMES lapack_atlas liblapack_atlas.so.3
find_library(ATLAS_CLAPACK_LIB NAMES lapack_atlas liblapack_atlas.so.3
PATHS ${ATLAS_LIB_SEARCH_PATHS})
if(ATLAS_INC_DIR AND ATLAS_CBLAS_LIB AND ATLAS_LIB AND NOT CBLAS_FOUND)
if(ATLAS_CLAPACK_INC_DIR AND ATLAS_INC_DIR AND ATLAS_CBLAS_LIB AND ATLAS_CLAPACK_LIB)
set(CBLAS_FOUND ON)
set(CBLAS_PROVIDER ATLAS)
set(CBLAS_INC_DIR ${ATLAS_INC_DIR})
set(CBLAS_LIBRARIES ${ATLAS_LIB} ${ATLAS_CBLAS_LIB})
set(CBLAS_INC_DIR ${ATLAS_INC_DIR} ${ATLAS_CLAPACK_INC_DIR})
set(CBLAS_LIBRARIES ${ATLAS_CLAPACK_LIB} ${ATLAS_CBLAS_LIB})
add_definitions(-DPADDLE_USE_ATLAS)
message(STATUS "Found ATLAS (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})")
set(CBLAS_FOUND ON)
if(ATLAS_CLAPACK_INC_DIR)
set(CBLAS_INC_DIR ${CBLAS_INC_DIR} ${ATLAS_CLAPACK_INC_DIR})
add_definitions(-DLAPACK_FOUND)
message(STATUS "Found ATLAS (include: ${ATLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})")
message(STATUS "Found lapack in ATLAS (include: ${ATLAS_CLAPACK_INC_DIR})")
endif()
return()
endif()
......@@ -106,15 +104,17 @@ find_path(OPENBLAS_LAPACKE_INC_DIR NAMES lapacke.h
find_library(OPENBLAS_LIB NAMES openblas
PATHS ${OPENBLAS_LIB_SEARCH_PATHS})
if(OPENBLAS_INC_DIR AND OPENBLAS_LIB)
if(OPENBLAS_LAPACKE_INC_DIR AND OPENBLAS_INC_DIR AND OPENBLAS_LIB)
set(CBLAS_FOUND ON)
set(CBLAS_PROVIDER OPENBLAS)
set(CBLAS_INC_DIR ${OPENBLAS_INC_DIR})
set(CBLAS_INC_DIR ${OPENBLAS_INC_DIR} ${OPENBLAS_LAPACKE_INC_DIR})
set(CBLAS_LIBRARIES ${OPENBLAS_LIB})
message(STATUS "Found OpenBLAS (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})")
set(CBLAS_FOUND ON)
if(OPENBLAS_LAPACKE_INC_DIR)
add_definitions(-DPADDLE_USE_OPENBLAS)
add_definitions(-DLAPACK_FOUND)
message(STATUS "Found OpenBLAS (include: ${OPENBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})")
message(STATUS "Found lapack in OpenBLAS (include: ${OPENBLAS_LAPACKE_INC_DIR})")
endif()
return()
endif()
......@@ -143,9 +143,10 @@ find_library(REFERENCE_CBLAS_LIBRARY NAMES cblas PATHS
${REFERENCE_CBLAS_LIB_SEARCH_PATHS})
if (REFERENCE_CBLAS_INCLUDE_DIR AND REFERENCE_CBLAS_LIBRARY)
set(CBLAS_FOUND ON)
set(CBLAS_PROVIDER REFERENCE)
set(CBLAS_INC_DIR ${REFERENCE_CBLAS_INCLUDE_DIR})
set(CBLAS_LIBRARIES ${REFERENCE_CBLAS_LIBRARY})
message(STATUS "Found reference-cblas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBS})")
set(CBLAS_FOUND ON)
add_definitions(-DPADDLE_USE_REFERENCE_CBLAS)
message(STATUS "Found reference-cblas (include: ${CBLAS_INC_DIR}, library: ${CBLAS_LIBRARIES})")
endif()
......@@ -24,7 +24,7 @@ IF(NOT ${CBLAS_FOUND})
SET(CBLAS_LIBRARIES "${CBLAS_INSTALL_DIR}/lib/${LIBRARY_PREFIX}openblas${STATIC_LIBRARY_SUFFIX}"
CACHE FILEPATH "openblas library." FORCE)
SET(COMMON_ARGS CC=${CMAKE_C_COMPILER} NO_LAPACK=1 NO_SHARED=1)
SET(COMMON_ARGS CC=${CMAKE_C_COMPILER} NO_SHARED=1 NO_LAPACK=1)
IF(ANDROID)
# arm_soft_fp_abi branch of OpenBLAS to support softfp
......@@ -49,22 +49,10 @@ IF(NOT ${CBLAS_FOUND})
INSTALL_DIR ${CBLAS_INSTALL_DIR}
BUILD_IN_SOURCE 1
BUILD_COMMAND ${CMAKE_MAKE_PROGRAM} ${COMMON_ARGS} ${OPTIONAL_ARGS}
INSTALL_COMMAND ${CMAKE_MAKE_PROGRAM} install NO_SHARED=1 PREFIX=<INSTALL_DIR>
INSTALL_COMMAND ${CMAKE_MAKE_PROGRAM} install NO_SHARED=1 NO_LAPACK=1 PREFIX=<INSTALL_DIR>
UPDATE_COMMAND ""
CONFIGURE_COMMAND ""
)
IF(NOT ANDROID AND NOT RPI)
ExternalProject_Add_Step(
openblas lapacke_install
COMMAND ${CMAKE_COMMAND} -E copy "${CBLAS_SOURCES_DIR}/src/openblas/lapack-netlib/LAPACKE/include/lapacke_mangling_with_flags.h" "${CBLAS_INSTALL_DIR}/include/lapacke_mangling.h"
COMMAND ${CMAKE_COMMAND} -E copy "${CBLAS_SOURCES_DIR}/src/openblas/lapack-netlib/LAPACKE/include/lapacke.h" "${CBLAS_INSTALL_DIR}/include/lapacke.h"
COMMAND ${CMAKE_COMMAND} -E copy "${CBLAS_SOURCES_DIR}/src/openblas/lapack-netlib/LAPACKE/include/lapacke_config.h" "${CBLAS_INSTALL_DIR}/include/lapacke_config.h"
COMMAND ${CMAKE_COMMAND} -E copy "${CBLAS_SOURCES_DIR}/src/openblas/lapack-netlib/LAPACKE/include/lapacke_utils.h" "${CBLAS_INSTALL_DIR}/include/lapacke_utils.h"
DEPENDEES install
)
ENDIF()
LIST(APPEND external_project_dependencies openblas)
ENDIF(NOT ${CBLAS_FOUND})
......
......@@ -149,7 +149,6 @@ endfunction()
# Create a python unittest using run_python_tests.sh,
# which takes care of making correct running environment
function(add_python_test TEST_NAME)
message("PYTHON: ${PYTHON_EXECUTABLE}")
add_test(NAME ${TEST_NAME}
COMMAND bash ${PROJ_ROOT}/paddle/scripts/run_python_tests.sh
${USE_VIRTUALENV_FOR_TEST} ${PYTHON_EXECUTABLE} ${ARGN}
......
#edit-mode: -*- python -*-
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.trainer_config_helpers import *
settings(batch_size=10)
data = data_layer(name="input", size=8 * 16 * 16)
conv = img_conv_layer(
input=data,
filter_size=1,
filter_size_y=1,
num_channels=8,
num_filters=16,
stride=1,
bias_attr=True,
act=LinearActivation(),
groups=2,
layer_type="cudnn_conv")
outputs(conv)
#edit-mode: -*- python -*-
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.trainer_config_helpers import *
settings(batch_size=10)
data = data_layer(name="input", size=8 * 16 * 16)
conv = img_conv_layer(
input=data,
filter_size=1,
filter_size_y=1,
num_channels=8,
num_filters=16,
stride=1,
bias_attr=True,
act=LinearActivation(),
groups=2,
layer_type="exconv")
outputs(conv)
......@@ -258,12 +258,12 @@ TEST(Compare, img_conv) {
// Test cudnn_conv and exconv give the same result
TEST(Compare, img_conv2) {
std::string config_file_a = "./gserver/tests/img_conv_a.conf";
std::string config_file_b = "./gserver/tests/img_conv_c.conf";
std::string config_file_a = "./gserver/tests/img_conv_cudnn.py";
std::string config_file_b = "./gserver/tests/img_conv_exconv.py";
bool useGpu = FLAGS_use_gpu;
double eps = FLAGS_checkgrad_eps;
FLAGS_use_gpu = true;
FLAGS_checkgrad_eps = 1e-3;
FLAGS_checkgrad_eps = 1e-2;
compareNetwork(config_file_a, config_file_b);
FLAGS_use_gpu = useGpu;
FLAGS_checkgrad_eps = eps;
......
......@@ -34,6 +34,9 @@ void* lapack_dso_handle = nullptr;
// We have to use two levels of macro to do the expansion.
// See https://gcc.gnu.org/onlinedocs/cpp/Stringizing.html
#define STR(x) #x
// clang-format off
#ifndef LAPACK_FOUND
#define DYNAMIC_LOAD_LAPACK_WRAP(__name) \
struct DynLoad__##__name { \
template <typename... Args> \
......@@ -46,8 +49,16 @@ void* lapack_dso_handle = nullptr;
return reinterpret_cast<lapack_func>(p_##__name)(args...); \
} \
} __name; // struct DynLoad__##__name
#else
#define DYNAMIC_LOAD_LAPACK_WRAP(__name) \
struct DynLoad__##__name { \
template <typename... Args> \
auto operator()(Args... args) -> decltype(__name(args...)) { \
return __name(args...); \
} \
} __name; // struct DynLoad__##__name
#endif
// clang-format off
#ifdef PADDLE_USE_ATLAS
#define PADDLE_SGETRF clapack_sgetrf
#define PADDLE_DGETRF clapack_dgetrf
......
......@@ -18,17 +18,32 @@ limitations under the License. */
#ifdef PADDLE_USE_MKL
#include <mkl.h>
#include <mkl_lapacke.h>
#else
extern "C" {
#include <cblas.h>
}
#endif
#ifdef PADDLE_USE_ATLAS
extern "C" {
#include <cblas.h>
#include <clapack.h>
}
#else
#endif
#ifdef PADDLE_USE_OPENBLAS
#include <cblas.h>
#include <lapacke.h>
#endif
#ifndef LAPACK_FOUND
extern "C" {
#include <cblas.h>
int LAPACKE_sgetrf(
int matrix_layout, int m, int n, float* a, int lda, int* ipiv);
int LAPACKE_dgetrf(
int matrix_layout, int m, int n, double* a, int lda, int* ipiv);
int LAPACKE_sgetri(
int matrix_layout, int n, float* a, int lda, const int* ipiv);
int LAPACKE_dgetri(
int matrix_layout, int n, double* a, int lda, const int* ipiv);
}
#endif
#include <cmath>
......
......@@ -236,8 +236,19 @@ TEST(Matrix, unary) {
testMatrixTranspose(height, width);
testMatrixRotate(height, width);
}
#ifdef LAPACK_FOUND
// inverse matrix
testMatrixInverse(height);
#else
LOG(WARNING) << "Cannot run Matrix Inverse Unit Test.\n"
<< "Failed to find lapack library in current system.\n"
<< "To address this issue, Please adopt one of the following "
"approaches: \n"
<< "1. Simply issue `sudo apt-get install liblapacke-dev` to "
"avoid re-build source code. \n"
<< "2. Install MKL/Openblas/ATLAS and re-build PaddlePaddle "
"source code.";
#endif
}
}
......
......@@ -165,8 +165,8 @@ void GetWarpCTCDsoHandle(void** dso_handle) {
void GetLapackDsoHandle(void** dso_handle) {
#if defined(__APPLE__) || defined(__OSX__)
GetDsoHandleFromSearchPath(FLAGS_lapack_dir, "liblapack.dylib", dso_handle);
GetDsoHandleFromSearchPath(FLAGS_lapack_dir, "liblapacke.dylib", dso_handle);
#else
GetDsoHandleFromSearchPath(FLAGS_lapack_dir, "liblapack.so", dso_handle);
GetDsoHandleFromSearchPath(FLAGS_lapack_dir, "liblapacke.so", dso_handle);
#endif
}
......@@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import optimizer
import layer
import activation
......@@ -42,7 +43,15 @@ __all__ = [
def init(**kwargs):
args = []
for key in kwargs.keys():
args_dict = {}
# NOTE: append arguments if they are in ENV
for ek, ev in os.environ.iteritems():
if ek.startswith("PADDLE_INIT_"):
args_dict[ek.replace("PADDLE_INIT_", "").lower()] = str(ev)
args_dict.update(kwargs)
# NOTE: overwrite arguments from ENV if it is in kwargs
for key in args_dict.keys():
args.append('--%s=%s' % (key, str(kwargs[key])))
api.initPaddle(*args)
......
......@@ -5,7 +5,7 @@ import topology
import minibatch
from data_feeder import DataFeeder
__all__ = ['infer']
__all__ = ['infer', 'Inference']
class Inference(object):
......@@ -13,7 +13,14 @@ class Inference(object):
Inference combines neural network output and parameters together
to do inference.
:param outptut_layer: The neural network that should be inferenced.
.. code-block:: python
inferer = Inference(output_layer=prediction, parameters=parameters)
for data_batch in batches:
print inferer.infer(data_batch)
:param output_layer: The neural network that should be inferenced.
:type output_layer: paddle.v2.config_base.Layer or the sequence
of paddle.v2.config_base.Layer
:param parameters: The parameters dictionary.
......@@ -56,8 +63,14 @@ class Inference(object):
item = [each_result[each_field] for each_field in field]
yield item
def infer(self, field='value', **kwargs):
def infer(self, input, field='value', **kwargs):
"""
Infer a data by model.
:param input: input data batch. Should be python iterable object.
:param field: output field.
"""
retv = None
kwargs['input'] = input
for result in self.iter_infer_field(field=field, **kwargs):
if retv is None:
retv = [[] for i in xrange(len(result))]
......@@ -79,7 +92,7 @@ def infer(output_layer, parameters, input, feeding=None, field='value'):
.. code-block:: python
result = paddle.infer(outptut_layer=prediction,
result = paddle.infer(output_layer=prediction,
parameters=parameters,
input=SomeData)
print result
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册