diff --git a/go/pserver/client/c/test/test_train.py b/go/pserver/client/c/test/test_train.py index 68e1d9b269209b695e27f91a656dc2d8e527b4cd..d6922672f4c1253e62cfe54965f6c2f3b5e6c7bf 100644 --- a/go/pserver/client/c/test/test_train.py +++ b/go/pserver/client/c/test/test_train.py @@ -19,7 +19,7 @@ def main(): # create parameters parameters = paddle.parameters.create(cost) - # create optimizer + # create optimizer of new remote updater to pserver optimizer = paddle.optimizer.Momentum(momentum=0) #TODO(zhihong) : replace optimizer with new OptimizerConfig diff --git a/go/pserver/optimizer.go b/go/pserver/optimizer.go index 54d108209402c27e79a9948f60ecbdadeffc7d9b..ee5fe6205b670920b0ce13dd678901e5154c0e7e 100644 --- a/go/pserver/optimizer.go +++ b/go/pserver/optimizer.go @@ -42,12 +42,12 @@ func newOptimizer(paramWithConfigs ParameterWithConfig) *optimizer { c := paramWithConfigs.Config log.WithFields(log.Fields{ "ElementType": p.ElementType, - "ParamSize": len(p.Content), + "ParamSize": len(p.Content) / C.sizeof_float, "ConfigSize": len(c), }).Info("New Optimizer Created with config:") var cbuffer unsafe.Pointer cbuffer = C.malloc(C.size_t(len(p.Content))) - C.memcpy(cbuffer, unsafe.Pointer(&p.Content[0]), C.size_t(len(p.Content))) + C.memcpy(cbuffer, unsafe.Pointer(&p.Content[0]), C.size_t(len(p.Content)/C.sizeof_float)) o.opt = C.paddle_create_optimizer((*C.uchar)(&c[0]), C.int(len(c)), C.paddle_element_type(p.ElementType), cbuffer, C.int(len(p.Content)/C.sizeof_float), (*C.char)(nullPtr), 0) diff --git a/paddle/trainer/NewRemoteParameterUpdater.cpp b/paddle/trainer/NewRemoteParameterUpdater.cpp index f25ce2f7f06f6da0feab27da61b8e49689cbe213..b359d9da2167bf459504e15c3140b3d956f417f3 100644 --- a/paddle/trainer/NewRemoteParameterUpdater.cpp +++ b/paddle/trainer/NewRemoteParameterUpdater.cpp @@ -22,7 +22,8 @@ DECLARE_string(save_dir); namespace paddle { NewRemoteParameterUpdater::NewRemoteParameterUpdater( const OptimizationConfig &config, const std::string pserverSpec) - : parameterClient_(-1), + : trainerConfig_(config), + parameterClient_(-1), newParameters_(nullptr), newGradients_(nullptr), pserverSpec_(pserverSpec) {} @@ -51,7 +52,22 @@ void NewRemoteParameterUpdater::init( LOG(INFO) << "paddle_begin_init_params start"; for (int i = 0; i < parameterSize(); ++i) { auto paramConfig = parameters_[i]->getConfig(); - std::string bytes = paramConfig.SerializeAsString(); + LOG(INFO) << "old param config: " << paramConfig.DebugString(); + // FIXME(typhoonzero): convert old paramConfig to optimizerConfig + OptimizerConfig optimizeConfigV2; + auto sgdConfigV2 = optimizeConfigV2.mutable_sgd(); + sgdConfigV2->set_momentum(paramConfig.momentum()); + sgdConfigV2->set_decay(paramConfig.decay_rate()); + optimizeConfigV2.set_lr_policy(paddle::OptimizerConfig::Const); + auto constlr = optimizeConfigV2.mutable_const_lr(); + constlr->set_learning_rate(paramConfig.learning_rate()); + if (trainerConfig_.algorithm() == "sgd") { + optimizeConfigV2.set_optimizer(paddle::OptimizerConfig::SGD); + // FIXME: config all algorithms + } else { + optimizeConfigV2.set_optimizer(paddle::OptimizerConfig::SGD); + } + std::string bytes = optimizeConfigV2.SerializeAsString(); const char *array = bytes.data(); int size = (int)bytes.size(); paddle_init_param( @@ -83,4 +99,4 @@ void NewRemoteParameterUpdater::finishBatch(real cost) { void NewRemoteParameterUpdater::startPass() {} bool NewRemoteParameterUpdater::finishPass() { return true; } -} +} // namespace paddle diff --git a/paddle/trainer/NewRemoteParameterUpdater.h b/paddle/trainer/NewRemoteParameterUpdater.h index f735185f62b3491a63e34cfc4a2ef73dae12243e..dfed00bc216b1d41bb7520619b76702f9fe650f2 100644 --- a/paddle/trainer/NewRemoteParameterUpdater.h +++ b/paddle/trainer/NewRemoteParameterUpdater.h @@ -16,6 +16,7 @@ limitations under the License. */ #include #include +#include "OptimizerConfig.pb.h" #include "ParameterUpdater.h" #include "libpaddle_pserver_cclient.h" #include "paddle/pserver/ParameterClient2.h" @@ -101,6 +102,7 @@ private: } protected: + const OptimizationConfig& trainerConfig_; /// internal parameter client object for exchanging data with pserver paddle_pserver_client parameterClient_; /// the parameters for new pserver client diff --git a/python/paddle/v2/optimizer.py b/python/paddle/v2/optimizer.py index 8124e219ba499333ecdf4b34ff5352e281aaa016..390c22ee552c506fde1567efba1326a6d735ad2e 100644 --- a/python/paddle/v2/optimizer.py +++ b/python/paddle/v2/optimizer.py @@ -66,6 +66,8 @@ class Optimizer(object): if use_sparse_remote_updater: gradient_machine.prefetch(in_args) parameter_updater.getParametersRemote() + + :param pserver_spec: pserver location, eg: localhost:3000 :return: parameter_updater """ if is_local: diff --git a/python/paddle/v2/trainer.py b/python/paddle/v2/trainer.py index f9658a8c5df9562073c8a187074a6cb3459ac5d9..96c6c4b89a2f2e2c3ecb95213e0e0191b1998f50 100644 --- a/python/paddle/v2/trainer.py +++ b/python/paddle/v2/trainer.py @@ -41,6 +41,7 @@ class SGD(object): :type parameters: paddle.v2.parameters.Parameters :param extra_layers: Some layers in the neural network graph are not in the path of cost layer. + :param pserver_spec: pserver location, eg: localhost:3000 :type extra_layers: paddle.v2.config_base.Layer """