提交 b02c453d 编写于 作者: L Luo Tao

Merge branch 'develop' into stride

...@@ -49,6 +49,7 @@ option(COVERALLS_UPLOAD "Package code coverage data to coveralls" OFF) ...@@ -49,6 +49,7 @@ option(COVERALLS_UPLOAD "Package code coverage data to coveralls" OFF)
option(ON_TRAVIS "Exclude special unit test on Travis CI" OFF) option(ON_TRAVIS "Exclude special unit test on Travis CI" OFF)
option(WITH_C_API "Compile PaddlePaddle with C-API(Prediction)" OFF) option(WITH_C_API "Compile PaddlePaddle with C-API(Prediction)" OFF)
option(WITH_GOLANG "Compile PaddlePaddle with GOLANG" OFF) option(WITH_GOLANG "Compile PaddlePaddle with GOLANG" OFF)
option(GLIDE_INSTALL "Download and install go dependencies " ON)
option(USE_NNPACK "Compile PaddlePaddle with NNPACK library" OFF) option(USE_NNPACK "Compile PaddlePaddle with NNPACK library" OFF)
# CMAKE_BUILD_TYPE # CMAKE_BUILD_TYPE
...@@ -96,6 +97,8 @@ include(external/warpctc) # download, build, install warpctc ...@@ -96,6 +97,8 @@ include(external/warpctc) # download, build, install warpctc
include(external/any) # download libn::any include(external/any) # download libn::any
include(external/eigen) # download eigen3 include(external/eigen) # download eigen3
include(cudnn) # set cudnn libraries, must before configure
include(configure) # add paddle env configuration
include(generic) # simplify cmake module include(generic) # simplify cmake module
include(package) # set paddle packages include(package) # set paddle packages
include(cpplint) # set paddle c++ style include(cpplint) # set paddle c++ style
...@@ -103,10 +106,9 @@ include(ccache) # set ccache for compilation ...@@ -103,10 +106,9 @@ include(ccache) # set ccache for compilation
include(util) # set unittest and link libs include(util) # set unittest and link libs
include(rdma) # set rdma libraries include(rdma) # set rdma libraries
include(flags) # set paddle compile flags include(flags) # set paddle compile flags
include(cudnn) # set cudnn libraries
include(version) # set PADDLE_VERSION include(version) # set PADDLE_VERSION
include(coveralls) # set code coverage include(coveralls) # set code coverage
include(configure) # add paddle env configuration
include_directories("${PROJ_ROOT}") include_directories("${PROJ_ROOT}")
include_directories("${PROJ_ROOT}/paddle/cuda/include") include_directories("${PROJ_ROOT}/paddle/cuda/include")
...@@ -139,8 +141,7 @@ add_subdirectory(proto) ...@@ -139,8 +141,7 @@ add_subdirectory(proto)
# "add_subdirectory(paddle)" and "add_subdirectory(python)" should be # "add_subdirectory(paddle)" and "add_subdirectory(python)" should be
# placed after this block, because they depends on it. # placed after this block, because they depends on it.
if(WITH_GOLANG) if(WITH_GOLANG)
add_subdirectory(go/master/c) add_subdirectory(go)
add_subdirectory(go/pserver/cclient)
endif(WITH_GOLANG) endif(WITH_GOLANG)
add_subdirectory(paddle) add_subdirectory(paddle)
......
...@@ -34,14 +34,18 @@ RUN apt-get update && \ ...@@ -34,14 +34,18 @@ RUN apt-get update && \
net-tools && \ net-tools && \
apt-get clean -y apt-get clean -y
# Install Go # Install Go and glide
RUN wget -O go.tgz https://storage.googleapis.com/golang/go1.8.1.linux-amd64.tar.gz && \ RUN wget -O go.tgz https://storage.googleapis.com/golang/go1.8.1.linux-amd64.tar.gz && \
tar -C /usr/local -xzf go.tgz && \ tar -C /usr/local -xzf go.tgz && \
mkdir /root/gopath && \ mkdir /root/gopath && \
mkdir /root/gopath/bin && \
mkdir /root/gopath/src && \
rm go.tgz rm go.tgz
ENV GOROOT=/usr/local/go GOPATH=/root/gopath ENV GOROOT=/usr/local/go GOPATH=/root/gopath
# should not be in the same line with GOROOT definition, otherwise docker build could not find GOROOT. # should not be in the same line with GOROOT definition, otherwise docker build could not find GOROOT.
ENV PATH=${PATH}:${GOROOT}/bin ENV PATH=${PATH}:${GOROOT}/bin:${GOPATH}/bin
# install glide
RUN curl -q https://glide.sh/get | sh
# git credential to skip password typing # git credential to skip password typing
RUN git config --global credential.helper store RUN git config --global credential.helper store
......
...@@ -79,6 +79,9 @@ if(WITH_GOLANG) ...@@ -79,6 +79,9 @@ if(WITH_GOLANG)
set(GOPATH "${CMAKE_CURRENT_BINARY_DIR}/go") set(GOPATH "${CMAKE_CURRENT_BINARY_DIR}/go")
file(MAKE_DIRECTORY ${GOPATH}) file(MAKE_DIRECTORY ${GOPATH})
set(PADDLE_IN_GOPATH "${GOPATH}/src/github.com/PaddlePaddle/Paddle") set(PADDLE_IN_GOPATH "${GOPATH}/src/github.com/PaddlePaddle/Paddle")
file(MAKE_DIRECTORY "${PADDLE_IN_GOPATH}")
set(PADDLE_GO_PATH "${CMAKE_SOURCE_DIR}/go")
add_custom_target(go_path) add_custom_target(go_path)
add_custom_command(TARGET go_path add_custom_command(TARGET go_path
# Symlink Paddle directory into GOPATH # Symlink Paddle directory into GOPATH
...@@ -89,7 +92,22 @@ if(WITH_GOLANG) ...@@ -89,7 +92,22 @@ if(WITH_GOLANG)
# We can't run `go get -d ./...` for every target, because # We can't run `go get -d ./...` for every target, because
# multiple `go get` can not run concurrently, but make need to be # multiple `go get` can not run concurrently, but make need to be
# able to run with multiple jobs. # able to run with multiple jobs.
COMMAND env GOPATH=${GOPATH} ${CMAKE_Go_COMPILER} get -d ./go/...
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
) )
if (GLIDE_INSTALL)
if(EXISTS $ENV{GOPATH}/bin/glide)
set(GLIDE "$ENV{GOPATH}/bin/glide")
else()
message(FATAL_ERROR "no glide executeble found: $ENV{GOPATH}/bin/glide")
endif()
add_custom_target(go_vendor)
add_custom_command(TARGET go_vendor
COMMAND env GOPATH=${GOPATH} ${GLIDE} install
WORKING_DIRECTORY "${PADDLE_IN_GOPATH}/go"
)
add_dependencies(go_vendor go_path)
endif()
endif(WITH_GOLANG) endif(WITH_GOLANG)
...@@ -25,6 +25,7 @@ set(STYLE_FILTER "${STYLE_FILTER}-readability/casting") ...@@ -25,6 +25,7 @@ set(STYLE_FILTER "${STYLE_FILTER}-readability/casting")
set(IGNORE_PATTERN set(IGNORE_PATTERN
.*ImportanceSampler.* .*ImportanceSampler.*
.*cblas\\.h.* .*cblas\\.h.*
.*\\.pb\\.txt
.*LtrDataProvider.* .*LtrDataProvider.*
.*MultiDataProvider.*) .*MultiDataProvider.*)
......
...@@ -278,14 +278,16 @@ function(go_library TARGET_NAME) ...@@ -278,14 +278,16 @@ function(go_library TARGET_NAME)
set(${TARGET_NAME}_LIB_PATH "${CMAKE_CURRENT_BINARY_DIR}/${${TARGET_NAME}_LIB_NAME}" CACHE STRING "output library path for target ${TARGET_NAME}") set(${TARGET_NAME}_LIB_PATH "${CMAKE_CURRENT_BINARY_DIR}/${${TARGET_NAME}_LIB_NAME}" CACHE STRING "output library path for target ${TARGET_NAME}")
file(GLOB GO_SOURCE RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "*.go") file(GLOB GO_SOURCE RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "*.go")
string(REPLACE "${PADDLE_GO_PATH}/" "" CMAKE_CURRENT_SOURCE_REL_DIR ${CMAKE_CURRENT_SOURCE_DIR})
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
COMMAND rm "${${TARGET_NAME}_LIB_PATH}" COMMAND rm "${${TARGET_NAME}_LIB_PATH}"
# Golang build source code # Golang build source code
COMMAND env GOPATH=${GOPATH} ${CMAKE_Go_COMPILER} build ${BUILD_MODE} COMMAND env GOPATH=${GOPATH} ${CMAKE_Go_COMPILER} build ${BUILD_MODE}
-o "${${TARGET_NAME}_LIB_PATH}" -o "${${TARGET_NAME}_LIB_PATH}"
${GO_SOURCE} "./${CMAKE_CURRENT_SOURCE_REL_DIR}/${GO_SOURCE}"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) # must run under GOPATH
add_dependencies(${TARGET_NAME} go_path) WORKING_DIRECTORY "${PADDLE_IN_GOPATH}/go")
add_dependencies(${TARGET_NAME} go_vendor)
endfunction(go_library) endfunction(go_library)
function(go_binary TARGET_NAME) function(go_binary TARGET_NAME)
...@@ -293,12 +295,15 @@ function(go_binary TARGET_NAME) ...@@ -293,12 +295,15 @@ function(go_binary TARGET_NAME)
set(oneValueArgs "") set(oneValueArgs "")
set(multiValueArgs SRCS DEPS) set(multiValueArgs SRCS DEPS)
cmake_parse_arguments(go_binary "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) cmake_parse_arguments(go_binary "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
string(REPLACE "${PADDLE_GO_PATH}/" "" CMAKE_CURRENT_SOURCE_REL_DIR ${CMAKE_CURRENT_SOURCE_DIR})
add_custom_command(OUTPUT ${TARGET_NAME}_timestamp add_custom_command(OUTPUT ${TARGET_NAME}_timestamp
COMMAND env GOPATH=${GOPATH} ${CMAKE_Go_COMPILER} build COMMAND env GOPATH=${GOPATH} ${CMAKE_Go_COMPILER} build
-o "${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME}" -o "${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME}"
${go_library_SRCS} "./${CMAKE_CURRENT_SOURCE_REL_DIR}/${go_binary_SRCS}"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) WORKING_DIRECTORY "${PADDLE_IN_GOPATH}/go")
add_custom_target(${TARGET_NAME} ALL DEPENDS ${TARGET_NAME}_timestamp ${go_binary_DEPS}) # TODO: don't know what ${TARGET_NAME}_link does
add_custom_target(${TARGET_NAME} ALL DEPENDS go_vendor ${TARGET_NAME}_timestamp ${go_binary_DEPS})
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME} DESTINATION bin) install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${TARGET_NAME} DESTINATION bin)
endfunction(go_binary) endfunction(go_binary)
......
## Interaction between C++ and Python
Users employ API in Python to describe their own network, however, the network construction actually happens in C++. so Protobuf is introduced to send the message between Python and C++.
The Interaction between Python and C++ can be simplified as two steps:
1. C++ tells Python how many Ops there are, and what parameter do users need to offer to initialize a new Op. Python then builds API for each Op at compile time.
2. Users invoke APIs built by Python and provide necessary parameters. These parameters will be sent to C++ fo finish Op construction task.
### Message form C++ to Python
We define a Protobuf message class `OpProto` to hold message needed in the first step. What should an `OpProto` contain? This question is equivalent to “What message do we need to offer, to build a Python API which is legal and user oriented and can use to describe a whole Op.”
Following message are necessary:
1. Op's name, and its simple comment.
2. Input and output variable number; each variable's name, type, and comment.
3. Op's attributes; each attribute includes name, type, comment, **default value** and **value range**.
So `OpProto` can be defined as follows:
```proto
enum AttrType {
INT = 1;
FLOAT = 2;
STRING = 3;
INTS = 4;
FLOATS = 5;
STRINGS = 6;
};
message AttrValue {
AttrType type = 1;
optional int iv = 2;
optional float fv = 3;
optional string sv = 4;
repeated int ivs = 5;
repeated float fvs = 6;
repeated string svs = 7;
};
message AttrProto {
required string name = 1;
required string comment = 2;
required AttrType type = 3;
};
message VarProto {
required string name = 1;
required string comment = 2;
};
message OpProto {
repeated VarProto inputs = 1;
repeated VarProto outputs = 2;
repeated AttrProto attrs = 3;
required string type = 4;
required string comment = 5;
};
```
To generate Python code automatically:
```python
def create_python_ops_creatation_functions():
op_protos = paddle.framework.OpRegistry.get_all_op_proto()
for type_name in op_protos:
op_proto = op_protos[type_name]
def __impl__(**kwargs): # User must use key word args in Paddle API
inputs = [kwargs.get(ipt.name, "") for ipt in op_proto.inputs]
outputs = [kwargs.get(opt.name, "") for opt in op_proto.outputs]
attrs = [cast_to_op_attr(attr, kwargs.get(attr.name, None)) for attr in op_proto.attrs]
opdesc = input, outputs, type_name, attrs
return paddle.framework.OpRegistry.CreateOp(opdesc)
__impl__.__doc__ = create_doc_string(op_proto)
globals()[type_name] = __impl__
create_python_ops_creatation_functions()
```
### Message from Python to C++
To hold message needed in the above second step, we define Protobuf message class `OpDesc`. It is used to hold user-specified parameters in Op describing.
```proto
message OpDesc {
required string type = 1;
repeated string inputs = 2;
repeated string outputs = 3;
map<string, AttrValue> attrs = 4;
};
```
## OpProto Register
Every Op has its own `OpProto`. For using convenience, we need to register them and record all their messages. For each `Op` class, we define a corresponding `OpMaker` class, in whose constructor we implement the `OpProto`'s building process. `OpMaker`'s constructor will be invoked by another function `OpRegistry::RegisterOp()`.
```cpp
class OpProtoMaker {
public:
OpProtoMaker(OpProto* proto): proto_(proto) {}
protected:
OpProto* proto_;
void AddInput(const std::string& name, const std::string& desc) {...}
void AddAttr(const std::string& name, const std::string& desc, TypeId type) {...}
void AddComment(const std::string& comment) { ... }
};
class OpRegistry {
public:
using OpCreator = std::function<OperatorBase* (OpDesc& desc)>;
template <typename OpType, typename OpMaker>
static void RegisterOp(const std::string& name) {
gCreators_[name] = [](const OpDesc& desc) {
return new OpType(desc);
};
OpProto& opProto = gProtos_[name];
OpMaker()(&opProto);
}
static map<string, OpCreator> gCreators_;
static map<string, OpProto> gProtos_;
};
template <typename OpType, typename OpMaker>
class OpRegister {
public:
OpRegister(std::string type) {
OpRegistry::RegisterOp<OpType, OpMaker>(type);
}
};
#define REGISTER_OP(op_class, op_maker_class, type_name) \
class op_class##Register { \
private: \
const static OpRegister<#op_class, #op_maker_class> reg; \
}; \
const Register op_class##Register::reg(#type_name);
class CosineOp {
// ...
}
struct CosineOpProtoMaker : public OpProtoMaker {
CosineOpProtoMaker(OpProto* proto) : OpProtoMaker(proto) {
AddInput("input", "input of cosine op");
AddAttr("scale", "scale of cosine op", float).Default(1.0).LargerThan(0.0);
AddType("cos");
AddComment("This is cos op");
}
}
REGISTER_OP(CosineOp, CosineOpProtoMaker, cos);
```
In `REGISTER_OP(CosineOp, CosineOpProtoMaker, cos)`, we register not only `CosineOp` but also `CosineOpProto`. As fields of `CosineOpProto`, the default value and value range of `scale` are also registered here.
## Python API
Python APIs are divided into two types, high-level API and low-level API.
### High-Level API
High-level API is called by users directly, so it should keep its style consistent with existing V2 APIs.
Here is a sample about how a define a fc layer:
```python
hd = fc_layer(input=data, size=56, with_bias=True, activation="sigmoid");
```
`hd` is the output of `fc_layer` and it's a `variable`. It can be further sent into other layers as input.
The definition of `fc_layer()`:
```python
def fc_layer(input, size, with_bias, activation):
attr_map = {"size":size}
check_attrs(attr_map)
w = make_variable('w')
if with_bias:
b = make_variable('b')
else:
b = None
fc_output = make_variable('fc_output');
fc_op(input, w, b, fc_output, attr_map)
act_output = make_variable('sigmod_output');
if activation == "sigmod":
sigmod_op(fc_output, act_output);
elif:
# ...
return act_output;
```
### Low Leval API
In above sample, `fc_op` and `sigmod_op` are low-level API. They build `OpDesc` and invoke corresponding C++ code.
*TODO*
vendor/
.glide/
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
add_subdirectory(pserver/cclient)
add_subdirectory(cmd/pserver)
add_subdirectory(cmd/master)
add_subdirectory(master/c)
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
go_binary(master SRC master.go)
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
go_binary(pserver SRCS pserver.go)
hash: b8f18ce6784bd3fadd9fed0b8443e7b658234ea785ae1f220723ae2c1f652aa7
updated: 2017-06-27T14:05:48.925262819+08:00
imports:
- name: github.com/coreos/etcd
version: 61fc123e7a8b14a0a258aa3f5c4159861b1ec2e7
subpackages:
- auth/authpb
- clientv3
- clientv3/concurrency
- etcdserver/api/v3rpc/rpctypes
- etcdserver/etcdserverpb
- mvcc/mvccpb
- name: github.com/golang/protobuf
version: 4bd1920723d7b7c925de087aa32e2187708897f7
subpackages:
- jsonpb
- proto
- name: github.com/golang/snappy
version: 553a641470496b2327abcac10b36396bd98e45c9
- name: github.com/namsral/flag
version: 71ceffbeb0ba60fccc853971bb3ed4d7d90bfd04
- name: github.com/PaddlePaddle/recordio
version: edfb82af0739c84f241c87390ec5649c7b28c129
- name: github.com/sirupsen/logrus
version: 202f25545ea4cf9b191ff7f846df5d87c9382c2b
- name: golang.org/x/net
version: c8c74377599bd978aee1cf3b9b63a8634051cec2
subpackages:
- context
- http2
- http2/hpack
- idna
- internal/timeseries
- lex/httplex
- trace
- name: golang.org/x/sys
version: f7928cfef4d09d1b080aa2b6fd3ca9ba1567c733
subpackages:
- unix
- name: golang.org/x/text
version: 4e9ab9ee170f2a39bd66c92b3e0a47ff47a4bc77
subpackages:
- secure/bidirule
- transform
- unicode/bidi
- unicode/norm
- name: google.golang.org/grpc
version: 8050b9cbc271307e5a716a9d782803d09b0d6f2d
subpackages:
- codes
- credentials
- grpclog
- internal
- keepalive
- metadata
- naming
- peer
- stats
- tap
- transport
testImports: []
package: github.com/PaddlePaddle/Paddle/go
import:
- package: github.com/PaddlePaddle/recordio
- package: github.com/coreos/etcd
version: ^3.2.1
subpackages:
- clientv3
- clientv3/concurrency
- package: github.com/namsral/flag
version: ^1.7.4-pre
- package: github.com/sirupsen/logrus
version: ^1.0.0
cmake_minimum_required(VERSION 3.0)
go_library(paddle_master SHARED) go_library(paddle_master SHARED)
cc_library(paddle_go_optimizer DEPS paddle_optimizer paddle_proto glog gflags protobuf)
go_library(paddle_pserver_cclient STATIC) go_library(paddle_pserver_cclient STATIC)
if(WITH_TESTING)
add_subdirectory(test) add_subdirectory(test)
endif()
cc_binary(main SRCS main.c DEPS paddle_pserver_cclient)
cc_test(test_cclient SRCS test_cclient.c DEPS paddle_pserver_cclient) cc_test(test_cclient SRCS test_cclient.c DEPS paddle_pserver_cclient)
add_style_check_target(test_cclient test_cclient.c)
#include <stdio.h>
#include <stdlib.h>
#include "libpaddle_pserver_cclient.h"
// TODO(helin): Fix: gtest using cmake is not working, using this
// hacky way for now.
#define fail() \
fprintf(stderr, "info: %s:%d: ", __FILE__, __LINE__); \
exit(-1);
void sendGrads(paddle_pserver_client c) {
unsigned char grad_a[2000] = {2};
unsigned char grad_b[3000] = {3};
paddle_gradient grad1 = {
"param_a", PADDLE_ELEMENT_TYPE_FLOAT32, grad_a, 2000};
paddle_gradient grad2 = {
"param_b", PADDLE_ELEMENT_TYPE_FLOAT32, grad_b, 3000};
paddle_gradient* grads[2] = {&grad1, &grad2};
if (paddle_send_grads(c, grads, 2)) {
fail();
}
}
void getParams(paddle_pserver_client c) {
paddle_parameter param_a;
paddle_parameter param_b;
char name_a[] = "param_a";
char name_b[] = "param_b";
// Must pre-allocate the prameter content before calling paddle_get_params.
unsigned char content_a[2000] = {};
unsigned char content_b[3000] = {};
param_a.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param_a.name = name_a;
param_a.content = content_a;
param_a.content_len = 2000;
param_b.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param_b.name = name_b;
param_b.content = content_b;
param_b.content_len = 3000;
paddle_parameter* params[2] = {&param_a, &param_b};
if (paddle_get_params(c, params, 2)) {
fail();
}
}
int main() {
char addr[] = "localhost:3000";
paddle_pserver_client c = paddle_new_pserver_client(addr, 1);
retry:
if (paddle_begin_init_params(c)) {
paddle_parameter param;
char name_a[] = "param_a";
char name_b[] = "param_b";
unsigned char content_a[2000] = {1};
unsigned char content_b[3000] = {0};
param.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param.name = name_a;
param.content = content_a;
param.content_len = 2000;
int error = paddle_init_param(c, param, NULL, 0);
if (error != 0) {
goto retry;
}
param.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param.name = name_b;
param.content = content_b;
param.content_len = 3000;
error = paddle_init_param(c, param, NULL, 0);
if (error != 0) {
goto retry;
}
error = paddle_finish_init_params(c);
if (error != 0) {
goto retry;
}
}
int i;
for (i = 0; i < 100; i++) {
sendGrads(c);
getParams(c);
}
if (paddle_save_model(c, "/tmp/")) {
fail();
}
return 0;
}
...@@ -3,113 +3,101 @@ ...@@ -3,113 +3,101 @@
#include "libpaddle_pserver_cclient.h" #include "libpaddle_pserver_cclient.h"
typedef float real; // TODO(helin): Fix: gtest using cmake is not working, using this
// hacky way for now.
void fail() { #define fail() \
// TODO(helin): fix: gtest using cmake is not working, using this fprintf(stderr, "info: %s:%d: ", __FILE__, __LINE__); \
// hacky way for now.
printf("test failed.\n");
exit(-1); exit(-1);
}
void print_parameter(paddle_gradient* param) { void sendGrads(paddle_pserver_client c) {
if (param == NULL) { unsigned char grad_a[2000] = {2};
printf("param is NULL!!\n"); unsigned char grad_b[3000] = {3};
} else { paddle_gradient grad1 = {
printf("==== parameter ====\n"); "param_a", PADDLE_ELEMENT_TYPE_FLOAT32, grad_a, 2000};
printf("name: %s\n", param->name); paddle_gradient grad2 = {
printf("content_len: %d\n", param->content_len); "param_b", PADDLE_ELEMENT_TYPE_FLOAT32, grad_b, 3000};
printf("content_type: %d\n", param->element_type); paddle_gradient *grads[2] = {&grad1, &grad2};
int i; if (paddle_send_grads(c, grads, 2)) {
for (i = 0; i < param->content_len / (int)sizeof(real); ++i) { fail();
printf("%f ", ((float*)param->content)[i]);
} }
printf("\n\n"); }
void getParams(paddle_pserver_client c) {
paddle_parameter param_a;
paddle_parameter param_b;
char name_a[] = "param_a";
char name_b[] = "param_b";
// Must pre-allocate the prameter content before calling paddle_get_params.
unsigned char content_a[2000] = {};
unsigned char content_b[3000] = {};
param_a.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param_a.name = name_a;
param_a.content = content_a;
param_a.content_len = 2000;
param_b.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param_b.name = name_b;
param_b.content = content_b;
param_b.content_len = 3000;
paddle_parameter *params[2] = {&param_a, &param_b};
if (paddle_get_params(c, params, 2)) {
fail();
} }
} }
int main() { int main() {
char addr[] = "localhost:3000"; char addr[] = "localhost:3000";
paddle_pserver_client c = paddle_new_pserver_client(addr, 1); paddle_pserver_client c = paddle_new_pserver_client(addr, 1);
char *config_proto;
char* names[] = {"param_a", "param_b"}; size_t config_proto_len = 0;
ssize_t nread;
FILE *fp = fopen("testdata/optimizer.pb", "r");
if (!fp) {
fail();
}
while ((nread = getline(&config_proto, &config_proto_len, fp)) != -1) {
printf("%s", config_proto);
}
fclose(fp);
retry: retry:
printf("init parameter to pserver:\n");
real param_content1[] = {0.1, 0.2, 0.3};
real param_content2[] = {0.4, 0.5, 0.6};
paddle_parameter** params =
(paddle_parameter**)malloc(sizeof(paddle_parameter*) * 2);
params[0] = (paddle_parameter*)malloc(sizeof(paddle_parameter));
params[0]->name = names[0];
params[0]->content = (unsigned char*)param_content1;
params[0]->content_len = 3 * sizeof(real);
params[0]->element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
params[1] = (paddle_parameter*)malloc(sizeof(paddle_parameter));
params[1]->name = names[1];
params[1]->content = (unsigned char*)param_content2;
params[1]->content_len = 3 * sizeof(real);
params[1]->element_type = PADDLE_ELEMENT_TYPE_INT32;
if (paddle_begin_init_params(c)) { if (paddle_begin_init_params(c)) {
if (paddle_init_param(c, *params[0], NULL, 0) != 0) { paddle_parameter param;
char name_a[] = "param_a";
char name_b[] = "param_b";
unsigned char content_a[2000] = {1};
unsigned char content_b[3000] = {0};
param.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param.name = name_a;
param.content = content_a;
param.content_len = 2000;
int error =
paddle_init_param(c, param, (void *)config_proto, config_proto_len);
if (error != 0) {
goto retry; goto retry;
} }
if (paddle_init_param(c, *params[1], NULL, 0) != 0) {
param.element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
param.name = name_b;
param.content = content_b;
param.content_len = 3000;
error = paddle_init_param(c, param, (void *)config_proto, config_proto_len);
if (error != 0) {
goto retry; goto retry;
} }
if (paddle_finish_init_params(c) != 0) {
error = paddle_finish_init_params(c);
if (error != 0) {
goto retry; goto retry;
} }
} else {
fail();
}
printf("get inited parameters from pserver:\n");
// get parameters again by reusing the allocated parameter buffers.
if (paddle_get_params(c, params, 2) != 0) {
fail();
} }
print_parameter(params[0]);
print_parameter(params[1]);
printf("send gradient to pserver:\n");
real gradient_content1[] = {0.01, 0.02, 0.03};
real gradinet_content2[] = {0.04, 0.05, 0.06};
paddle_gradient** grads =
(paddle_gradient**)malloc(sizeof(paddle_gradient*) * 2);
grads[0] = (paddle_gradient*)malloc(sizeof(paddle_gradient));
grads[0]->name = names[0];
grads[0]->content = (unsigned char*)gradient_content1;
grads[0]->content_len = 3 * sizeof(real);
grads[0]->element_type = PADDLE_ELEMENT_TYPE_FLOAT32;
grads[1] = (paddle_gradient*)malloc(sizeof(paddle_gradient)); int i;
grads[1]->name = names[1]; for (i = 0; i < 100; i++) {
grads[1]->content = (unsigned char*)gradinet_content2; sendGrads(c);
grads[1]->content_len = 3 * sizeof(real); getParams(c);
grads[1]->element_type = PADDLE_ELEMENT_TYPE_INT32;
printf("print gradient sent to pserver:\n");
print_parameter(grads[0]);
print_parameter(grads[1]);
if (paddle_send_grads(c, grads, 2) != 0) {
fail();
}
printf("get updated parameters from pserver:\n");
// get parameters again by reusing the allocated parameter buffers.
if (paddle_get_params(c, params, 2) != 0) {
fail();
} }
print_parameter(params[0]);
print_parameter(params[1]);
if (paddle_save_model(c, "/tmp/") != 0) { if (paddle_save_model(c, "/tmp/")) {
fail(); fail();
} }
......
...@@ -22,6 +22,8 @@ def main(): ...@@ -22,6 +22,8 @@ def main():
# create optimizer # create optimizer
optimizer = paddle.optimizer.Momentum(momentum=0) optimizer = paddle.optimizer.Momentum(momentum=0)
#TODO(zhihong) : replace optimizer with new OptimizerConfig
trainer = paddle.trainer.SGD(cost=cost, trainer = paddle.trainer.SGD(cost=cost,
parameters=parameters, parameters=parameters,
update_equation=optimizer, update_equation=optimizer,
......
package pserver_test package pserver_test
import ( import (
"io/ioutil"
"net" "net"
"net/http" "net/http"
"net/rpc" "net/rpc"
...@@ -74,18 +75,22 @@ func TestClientFull(t *testing.T) { ...@@ -74,18 +75,22 @@ func TestClientFull(t *testing.T) {
} }
const numParameter = 100 const numParameter = 100
config, err := ioutil.ReadFile("./cclient/test/testdata/optimizer.pb")
if err != nil {
t.Fatalf("read optimizer proto failed")
}
for i := 0; i < numParameter; i++ { for i := 0; i < numParameter; i++ {
var p pserver.Parameter var p pserver.Parameter
p.Name = "p_" + strconv.Itoa(i) p.Name = "p_" + strconv.Itoa(i)
p.ElementType = pserver.Float32 p.ElementType = pserver.Float32
p.Content = make([]byte, (i+1)*100) p.Content = make([]byte, (i+1)*100)
err := c.InitParam(pserver.ParameterWithConfig{Param: p}) err := c.InitParam(pserver.ParameterWithConfig{Param: p, Config: config})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
} }
err := c.FinishInitParams() err = c.FinishInitParams()
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
......
#include <stdlib.h>
#include "optimizer.h"
typedef int (*update_func)(void*, void*, paddle_element_type, const void*, int);
typedef void (*release_func)(void*);
typedef struct paddle_optimizer {
update_func update;
release_func release;
void* optimizer;
} paddle_optimizer;
void paddle_release_optimizer(paddle_optimizer* o) {
o->release(o->optimizer);
free(o);
}
int paddle_update_parameter(paddle_optimizer* o,
void* buffer,
paddle_element_type element_type,
const void* gradient,
int num_bytes) {
return o->update(o->optimizer, buffer, element_type, gradient, num_bytes);
}
typedef struct { double learning_rate; } SGD_optimizer;
int update_SGD(void* optimizer,
void* buffer,
paddle_element_type element_type,
const void* gradient,
int num_bytes) {
SGD_optimizer* o = (SGD_optimizer*)optimizer;
float* parameter = (float*)buffer;
float* grad = (float*)gradient;
int i;
for (i = 0; i < num_bytes / sizeof(float); ++i) {
parameter[i] -= o->learning_rate * grad[i];
}
return 0;
}
void release_SGD(void* optimizer) {
SGD_optimizer* o = (SGD_optimizer*)optimizer;
// nothing allocated on heap
}
paddle_optimizer* paddle_create_SGD_optimizer(double learning_rate) {
SGD_optimizer* impl = (SGD_optimizer*)malloc(sizeof(SGD_optimizer));
impl->learning_rate = learning_rate;
paddle_optimizer* opt = (paddle_optimizer*)malloc(sizeof(paddle_optimizer));
opt->update = update_SGD;
opt->release = release_SGD;
opt->optimizer = impl;
return opt;
}
package pserver package pserver
/* // #cgo CFLAGS: -I ../../
#include "optimizer.h" // //FIXME: ldflags contain "build" path
*/ // #cgo LDFLAGS: ../../build/go/pserver/cclient/libpaddle_go_optimizer.a -lstdc++
// #include "paddle/optimizer/optimizer.h"
// #include <stdlib.h>
// #include <string.h>
import "C" import "C"
import ( import (
"fmt" "fmt"
"unsafe" "unsafe"
)
type optimizerType int
const ( log "github.com/sirupsen/logrus"
sgd optimizerType = iota
) )
var nullPtr = unsafe.Pointer(uintptr(0)) var nullPtr = unsafe.Pointer(uintptr(0))
type optimizer struct { type optimizer struct {
opt *C.struct_paddle_optimizer opt *C.struct_paddle_optimizer
elementType ElementType
}
func cArrayToSlice(p unsafe.Pointer, len int) []byte {
if p == nullPtr {
return nil
}
// create a Go clice backed by a C array, reference:
// https://github.com/golang/go/wiki/cgo#turning-c-arrays-into-go-slices
//
// Go garbage collector will not interact with this data, need
// to be freed properly.
return (*[1 << 30]byte)(p)[:len:len]
} }
func newOptimizer(t optimizerType, learning_rate float64) *optimizer { func newOptimizer(paramWithConfigs ParameterWithConfig) *optimizer {
o := &optimizer{} o := &optimizer{}
o.opt = C.paddle_create_SGD_optimizer(C.double(learning_rate)) o.elementType = paramWithConfigs.Param.ElementType
p := paramWithConfigs.Param
c := paramWithConfigs.Config
log.WithFields(log.Fields{
"ElementType": p.ElementType,
"ParamSize": len(p.Content),
"ConfigSize": len(c),
}).Info("New Optimizer Created with config:")
var cbuffer unsafe.Pointer
cbuffer = C.malloc(C.size_t(len(p.Content)))
C.memcpy(cbuffer, unsafe.Pointer(&p.Content[0]), C.size_t(len(p.Content)))
o.opt = C.paddle_create_optimizer((*C.uchar)(&c[0]), C.int(len(c)),
C.paddle_element_type(p.ElementType), cbuffer, C.int(len(p.Content)/C.sizeof_float),
(*C.char)(nullPtr), 0)
return o return o
} }
func (o *optimizer) UpdateParameter(p Parameter, g Gradient) error { func (o *optimizer) GetWeights() []byte {
if len(p.Content) != len(g.Content) { var buffer unsafe.Pointer
return fmt.Errorf("Name: %s, parameter and gradient length not match, parameter: %d, gradient: %d", p.Name, len(p.Content), len(g.Content)) buffer_len := C.paddle_optimizer_get_weights(o.opt, &buffer)
} return cArrayToSlice(buffer, int(buffer_len)*C.sizeof_float)
}
if p.ElementType != g.ElementType { func (o *optimizer) UpdateParameter(g Gradient) error {
return fmt.Errorf("Name: %s, parameter and gradient element type not match, parameter: %v, gradient: %v", p.Name, p.ElementType, g.ElementType) if o.elementType != g.ElementType {
return fmt.Errorf("Name: %s, parameter and gradient element type not match, parameter: %v, gradient: %v", g.Name, o.elementType, g.ElementType)
} }
r := C.paddle_update_parameter(o.opt, unsafe.Pointer(&p.Content[0]), C.paddle_element_type(p.ElementType), unsafe.Pointer(&g.Content[0]), C.int(len(g.Content))) r := C.paddle_update_parameter(o.opt, C.paddle_element_type(g.ElementType), unsafe.Pointer(&g.Content[0]), C.int(len(g.Content))/C.sizeof_float)
if r != 0 { if r != 0 {
return fmt.Errorf("optimizer update returned error code: %d", r) return fmt.Errorf("optimizer update returned error code: %d", r)
} }
......
#ifndef PADDLE_PSERVER_OPTIMIZER_H
#define PADDLE_PSERVER_OPTIMIZER_H
typedef enum {
PADDLE_ELEMENT_TYPE_INT32 = 0,
PADDLE_ELEMENT_TYPE_UINT32 = 1,
PADDLE_ELEMENT_TYPE_INT64 = 2,
PADDLE_ELEMENT_TYPE_UINT64 = 3,
PADDLE_ELEMENT_TYPE_FLOAT32 = 4,
PADDLE_ELEMENT_TYPE_FLOAT64 = 5,
} paddle_element_type;
struct paddle_optimizer;
struct paddle_optimizer* paddle_create_SGD_optimizer(double learning_rate);
void paddle_release_optimizer(struct paddle_optimizer* o);
int paddle_update_parameter(struct paddle_optimizer* o,
void* buffer,
paddle_element_type element_type,
const void* gradient,
int num_bytes);
#endif /* PADDLE_PSERVER_OPTIMIZER_H */
package pserver package pserver
import "testing" import (
"io/ioutil"
"testing"
)
func TestSGDCreateRelease(t *testing.T) { func TestOptimizerCreateRelease(t *testing.T) {
o := newOptimizer(sgd, 1) p := Parameter{
Name: "a",
ElementType: Int32,
}
p.Content = []byte{1, 3}
config, err := ioutil.ReadFile("./cclient/test/testdata/optimizer.pb")
if err != nil {
t.Fatalf("read optimizer proto failed")
}
param := ParameterWithConfig{
Param: p,
Config: config,
}
o := newOptimizer(param)
o.Cleanup() o.Cleanup()
} }
...@@ -49,8 +49,7 @@ type Service struct { ...@@ -49,8 +49,7 @@ type Service struct {
idx int idx int
mu sync.Mutex mu sync.Mutex
opt *optimizer optMap map[string]*optimizer
paramMap map[string]Parameter
} }
// NewService creates a new service, will bypass etcd registration if no // NewService creates a new service, will bypass etcd registration if no
...@@ -58,9 +57,8 @@ type Service struct { ...@@ -58,9 +57,8 @@ type Service struct {
func NewService(idx int) (*Service, error) { func NewService(idx int) (*Service, error) {
s := &Service{ s := &Service{
idx: idx, idx: idx,
opt: newOptimizer(sgd, 0.005),
} }
s.paramMap = make(map[string]Parameter) s.optMap = make(map[string]*optimizer)
s.initialized = make(chan struct{}) s.initialized = make(chan struct{})
return s, nil return s, nil
} }
...@@ -81,7 +79,7 @@ func (s *Service) InitParam(paramWithConfigs ParameterWithConfig, dummy *int) er ...@@ -81,7 +79,7 @@ func (s *Service) InitParam(paramWithConfigs ParameterWithConfig, dummy *int) er
// TODO(helin): check if paramWithConfigs.Param.Content is // TODO(helin): check if paramWithConfigs.Param.Content is
// properly memory aligned, if not, make copy to a memory // properly memory aligned, if not, make copy to a memory
// aligned region. // aligned region.
s.paramMap[paramWithConfigs.Param.Name] = paramWithConfigs.Param s.optMap[paramWithConfigs.Param.Name] = newOptimizer(paramWithConfigs)
return nil return nil
} }
...@@ -110,12 +108,12 @@ func (s *Service) SendGrad(g Gradient, dummy *int) error { ...@@ -110,12 +108,12 @@ func (s *Service) SendGrad(g Gradient, dummy *int) error {
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
p, ok := s.paramMap[g.Name] o, ok := s.optMap[g.Name]
if !ok { if !ok {
return fmt.Errorf("parameter: %s does not exist", g.Name) return fmt.Errorf("parameter: %s does not exist", g.Name)
} }
return s.opt.UpdateParameter(p, g) return o.UpdateParameter(g)
} }
// GetParam gets parameters from the parameter server. // GetParam gets parameters from the parameter server.
...@@ -124,7 +122,7 @@ func (s *Service) GetParam(name string, parameter *Parameter) error { ...@@ -124,7 +122,7 @@ func (s *Service) GetParam(name string, parameter *Parameter) error {
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
p, ok := s.paramMap[name] opt, ok := s.optMap[name]
if !ok { if !ok {
return fmt.Errorf("parameter: %s does not exist", name) return fmt.Errorf("parameter: %s does not exist", name)
} }
...@@ -136,7 +134,9 @@ func (s *Service) GetParam(name string, parameter *Parameter) error { ...@@ -136,7 +134,9 @@ func (s *Service) GetParam(name string, parameter *Parameter) error {
// nature. This race condition is allowed deliberately // nature. This race condition is allowed deliberately
// to save the program from making a copy of the // to save the program from making a copy of the
// paramter content. // paramter content.
*parameter = p parameter.Name = name
parameter.ElementType = opt.elementType
parameter.Content = opt.GetWeights()
return nil return nil
} }
......
package pserver_test package pserver_test
import ( import (
"io/ioutil"
"reflect" "reflect"
"sync" "sync"
"testing" "testing"
...@@ -9,7 +10,7 @@ import ( ...@@ -9,7 +10,7 @@ import (
"github.com/PaddlePaddle/Paddle/go/pserver" "github.com/PaddlePaddle/Paddle/go/pserver"
) )
func TestFull(t *testing.T) { func TestServiceFull(t *testing.T) {
s, err := pserver.NewService(0) s, err := pserver.NewService(0)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
...@@ -18,7 +19,12 @@ func TestFull(t *testing.T) { ...@@ -18,7 +19,12 @@ func TestFull(t *testing.T) {
p.Name = "param_a" p.Name = "param_a"
p.Content = []byte{1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0} p.Content = []byte{1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0}
p.ElementType = pserver.Int32 p.ElementType = pserver.Int32
err = s.InitParam(pserver.ParameterWithConfig{Param: p, Config: nil}, nil) config, err := ioutil.ReadFile("./cclient/test/testdata/optimizer.pb")
if err != nil {
t.Fatalf("read optimizer proto failed")
}
err = s.InitParam(pserver.ParameterWithConfig{Param: p, Config: config}, nil)
if err != nil { if err != nil {
t.FailNow() t.FailNow()
} }
...@@ -27,7 +33,7 @@ func TestFull(t *testing.T) { ...@@ -27,7 +33,7 @@ func TestFull(t *testing.T) {
p1.Name = "param_b" p1.Name = "param_b"
p1.Content = []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} p1.Content = []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
p1.ElementType = pserver.Float32 p1.ElementType = pserver.Float32
err = s.InitParam(pserver.ParameterWithConfig{Param: p1, Config: nil}, nil) err = s.InitParam(pserver.ParameterWithConfig{Param: p1, Config: config}, nil)
if err != nil { if err != nil {
t.FailNow() t.FailNow()
} }
...@@ -48,6 +54,7 @@ func TestFull(t *testing.T) { ...@@ -48,6 +54,7 @@ func TestFull(t *testing.T) {
} }
g1, g2 := pserver.Gradient(p1), pserver.Gradient(p) g1, g2 := pserver.Gradient(p1), pserver.Gradient(p)
err = s.SendGrad(g1, nil) err = s.SendGrad(g1, nil)
if err != nil { if err != nil {
t.FailNow() t.FailNow()
...@@ -142,7 +149,12 @@ func TestBlockUntilInitialized(t *testing.T) { ...@@ -142,7 +149,12 @@ func TestBlockUntilInitialized(t *testing.T) {
p.Name = "param_a" p.Name = "param_a"
p.Content = []byte{1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0} p.Content = []byte{1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0}
p.ElementType = pserver.Int32 p.ElementType = pserver.Int32
err = s.InitParam(pserver.ParameterWithConfig{Param: p, Config: nil}, nil) config, err := ioutil.ReadFile("./cclient/test/testdata/optimizer.pb")
if err != nil {
t.Fatalf("read optimizer proto failed")
}
err = s.InitParam(pserver.ParameterWithConfig{Param: p, Config: config}, nil)
if err != nil { if err != nil {
t.FailNow() t.FailNow()
} }
......
...@@ -5,3 +5,6 @@ nv_test(dim_test SRCS dim_test.cu DEPS ddim) ...@@ -5,3 +5,6 @@ nv_test(dim_test SRCS dim_test.cu DEPS ddim)
cc_test(variable_test SRCS variable_test.cc) cc_test(variable_test SRCS variable_test.cc)
cc_test(scope_test SRCS scope_test.cc) cc_test(scope_test SRCS scope_test.cc)
cc_test(enforce_test SRCS enforce_test.cc) cc_test(enforce_test SRCS enforce_test.cc)
proto_library(attr_type SRCS attr_type.proto)
proto_library(op_proto SRCS op_proto.proto)
cc_test(op_proto_test SRCS op_proto_test.cc DEPS op_proto attr_type protobuf)
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
syntax="proto2";
package paddle.framework;
// Attribute Type for paddle's Op.
// Op contains many attributes. Each type of attributes could be different.
// The AttrType will be shared between AttrDesc and AttrProto.
enum AttrType {
INT = 0;
FLOAT = 1;
STRING = 2;
INTS = 3;
FLOATS = 4;
STRINGS = 5;
}
\ No newline at end of file
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
// Protocol Message for 3rd-party language binding.
//
// Paddle Python package will use `OpProto` to generate op creation methods.
// The op creation methods take user's input and generate `OpDesc` proto message,
// then pass `OpDesc` to C++ side and create Op pointer.
//
syntax="proto2";
package paddle.framework;
import "attr_type.proto";
// Attribute protocol message for 3rd-party language binding.
// It will store the Op support what attribute and what type.
message AttrProto {
// Supported attribute name. e.g. `scale` for cosine op.
required string name = 1;
// Supported attribute type.
required AttrType type = 2;
// Supported attribute comments. It helps 3rd-party language generate doc-string.
required string comment = 3;
}
// Input or output message for 3rd-party language binding.
// It contains parameter name and its comments.
message VarProto {
// Input or output name in that op creation function.
// e.g. `cos(a, b, output, ...)`, "a", "b", "output" are names.
required string name = 1;
// The comment for that input. It helps 3rd-party language generate doc-string.
required string comment = 2;
}
// Op protocol message for 3rd-party language binding.
// It contains all information for generating op creation method.
message OpProto {
// The input information to generate op creation method.
repeated VarProto inputs = 1;
// The output information to generate op creation method.
repeated VarProto outputs = 2;
// The attribute information to generate op creation method.
repeated AttrProto attrs = 3;
// The comments for that Op. It helps 3rd-party language generate
// doc-string. The whole documentation of that Op is generated by comment,
// inputs, outputs, attrs together.
required string comment = 4;
// The type of that Op.
required string type = 5;
}
#include <gtest/gtest.h>
#include <paddle/framework/op_proto.pb.h>
TEST(TestOpProto, ALL) {
paddle::framework::OpProto proto;
{
auto ipt = proto.mutable_inputs()->Add();
*ipt->mutable_name() = "a";
*ipt->mutable_comment() = "the one input of cosine op";
}
{
auto ipt = proto.mutable_inputs()->Add();
*ipt->mutable_name() = "b";
*ipt->mutable_comment() = "the other input of cosine op";
}
{
auto opt = proto.mutable_outputs()->Add();
*opt->mutable_name() = "output";
*opt->mutable_comment() = "the output of cosine op";
}
{
auto attr = proto.mutable_attrs()->Add();
*attr->mutable_name() = "scale";
attr->set_type(paddle::framework::AttrType::FLOAT);
*attr->mutable_comment() = "the scale attribute of cosine op";
}
proto.set_type("cos");
*proto.mutable_comment() = "cosine op, output = scale * cos(a, b)";
ASSERT_TRUE(proto.IsInitialized());
}
\ No newline at end of file
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#pragma once
namespace paddle {
namespace framework {
class Tensor {
using paddle::platform::Place;
using paddle::platform::get_place;
public:
template <typename T>
const T* data() const {
PADDLE_ASSERT(holder_ != nullptr,
"Tensor::data must be called after Tensor::mutable_data");
return static_cast<const T*>(holder->Ptr());
}
template <typename T, // must be POD types
typename = std::enable_if<std::is_pod<T>::value>::type>
T* mutable_data(DDim dims, Place place) {
if (holder_ == nullptr || holder_->Place() != place ||
holder_->Size() < dims.product() * sizeof(T)) {
holder_.reset(new PlaceholderImpl(place, dims.product() * sizeof(T)));
}
return static_cast<T*>(holder_->Ptr());
}
template <typename T, // must be POD types
typename = std::enable_if<std::is_pod<T>::value>::type>
T* mutable_data(DDim dims) {
return mutable_data<T>(dims, paddle::platform::get_place());
}
private:
// Placeholder hides type T, so it doesn't appear as a template
// parameter of Variable.
struct Placeholder {
virtual ~Placeholder() {}
virtual void* Ptr() const = 0;
virtual Place Place() const = 0;
virtual size_t Size() const = 0;
};
template <typename T>
struct PlaceholderImpl : public Placeholder {
PlaceholderImpl(Place pl, size_t size)
: ptr_(paddle::memory::Alloc(pl, size), paddle::memory::Deleter(pl)),
place_(pl),
size_(size) {}
virtual void* Ptr() const { return static_cast<void*>(ptr_.get()); }
virtual size_t Size() const { return size_; }
virtual Place Place() const { return place_; }
std::unique_ptr<T, memory::Deleter> ptr_;
Place place_; // record the place of ptr_.
size_t size_; // size of the memory block.
};
std::unique_ptr<Placeholder> holder_; // holds the memory block if allocated.
};
} // namespace framework
} // namespace paddle
...@@ -12,6 +12,7 @@ set(OPITMIZER_SRCS ...@@ -12,6 +12,7 @@ set(OPITMIZER_SRCS
add_library(paddle_optimizer STATIC ${OPITMIZER_SRCS}) add_library(paddle_optimizer STATIC ${OPITMIZER_SRCS})
add_dependencies(paddle_optimizer paddle_proto ${external_project_dependencies}) add_dependencies(paddle_optimizer paddle_proto ${external_project_dependencies})
if(WITH_TESTING) if(WITH_TESTING)
add_simple_unittest(serialization_test) add_simple_unittest(serialization_test)
add_simple_unittest(parameter_optimizer_test) add_simple_unittest(parameter_optimizer_test)
......
...@@ -5,6 +5,8 @@ import paddle.trainer_config_helpers.optimizers as v1_optimizers ...@@ -5,6 +5,8 @@ import paddle.trainer_config_helpers.optimizers as v1_optimizers
""" """
Optimizers(update equation) for SGD method. Optimizers(update equation) for SGD method.
TODO(zhihong) : create new optimizer with proto config, add new optimizer here
TODO(yuyang18): Complete comments. TODO(yuyang18): Complete comments.
""" """
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册