diff --git a/go/pserver/cclient/test/main.c b/go/pserver/cclient/test/main.c index 72ec3590768c8ec7b2bfb400888c146f1d6077a4..6adc3c9b533f54b343f46ab688799878888fabfd 100644 --- a/go/pserver/cclient/test/main.c +++ b/go/pserver/cclient/test/main.c @@ -76,7 +76,5 @@ retry: fail(); } - printf("test success!\n"); - return 0; } diff --git a/go/pserver/cclient/test/test_cclient.c b/go/pserver/cclient/test/test_cclient.c index 50ba2d5597a1ce6d1b63eb5156d237402608e758..9083064eeeb8115dde2d4117bedfde192a64b876 100644 --- a/go/pserver/cclient/test/test_cclient.c +++ b/go/pserver/cclient/test/test_cclient.c @@ -21,7 +21,7 @@ void print_parameter(paddle_gradient* param) { printf("content_len: %d\n", param->content_len); printf("content_type: %d\n", param->element_type); int i; - for (i = 0; i < param->content_len / sizeof(real); ++i) { + for (i = 0; i < param->content_len / (int)sizeof(real); ++i) { printf("%f ", ((float*)param->content)[i]); } printf("\n\n"); @@ -110,6 +110,5 @@ retry: fail(); } - printf("test success!\n"); return 0; } diff --git a/go/pserver/optimizer.c b/go/pserver/optimizer.c index 48bbceb343b4bf3167d3459cfae7eac81bf98b01..f16ba2cbf8e168a434fdcdb4f1e0ba1e98d91c6b 100644 --- a/go/pserver/optimizer.c +++ b/go/pserver/optimizer.c @@ -32,7 +32,6 @@ int update_SGD(void* optimizer, const void* gradient, int num_bytes) { SGD_optimizer* o = (SGD_optimizer*)optimizer; - // TODO(a simple SGD implement) float* parameter = (float*)buffer; float* grad = (float*)gradient; diff --git a/go/pserver/service.go b/go/pserver/service.go index ab814662b6bf783f9336264834781f3881e10c1e..7d2a1fea865091edb2802e1c9f8f57e398559562 100644 --- a/go/pserver/service.go +++ b/go/pserver/service.go @@ -29,10 +29,6 @@ type Parameter struct { Content []byte } -func (p *Parameter) toString() { - fmt.Println(p.Name, p.ElementType, p.Content) -} - // ParameterWithConfig contains the parameter and the configuration. type ParameterWithConfig struct { Param Parameter @@ -53,7 +49,7 @@ type Service struct { // NewService creates a new service. func NewService() *Service { - s := &Service{opt: newOptimizer(sgd, 0.01)} + s := &Service{opt: newOptimizer(sgd, 0.005)} s.paramMap = make(map[string]Parameter) s.initialized = make(chan struct{}) return s diff --git a/paddle/trainer/NewRemoteParameterUpdater.cpp b/paddle/trainer/NewRemoteParameterUpdater.cpp index d554e09759cec5d790644d1caf819d98b5601cbf..b3655d9d0255a86a0d8664fa3a70de3045a7b3cc 100644 --- a/paddle/trainer/NewRemoteParameterUpdater.cpp +++ b/paddle/trainer/NewRemoteParameterUpdater.cpp @@ -31,7 +31,6 @@ NewRemoteParameterUpdater::NewRemoteParameterUpdater( void NewRemoteParameterUpdater::init( const std::vector ¶meters) { ParameterUpdater::init(parameters); - LOG(INFO) << "NewRemoteParameterUpdater init in"; for (auto ¶ : parameters_) { para->getBuf(PARAMETER_VALUE)->zeroMem(); @@ -58,7 +57,12 @@ void NewRemoteParameterUpdater::init( if (paddle_begin_init_params(parameterClient_)) { LOG(INFO) << "paddle_begin_init_params start"; for (int i = 0; i < parameterSize(); ++i) { - paddle_init_param(parameterClient_, *newParameters_[i], NULL, 0); + auto paramConfig = parameters_[i]->getConfig(); + std::string bytes = paramConfig.SerializeAsString(); + const char *array = bytes.data(); + int size = (int)bytes.size(); + paddle_init_param( + parameterClient_, *newParameters_[i], (void *)array, size); } paddle_finish_init_params(parameterClient_); LOG(INFO) << "paddle_begin_init_params done"; diff --git a/paddle/trainer/NewRemoteParameterUpdater.h b/paddle/trainer/NewRemoteParameterUpdater.h index b7c0425982b28f5a2ba0e7a4aee73daf4f82814a..1f22c15cef5494d9a1d4ef53f0ac1de4d3fae389 100644 --- a/paddle/trainer/NewRemoteParameterUpdater.h +++ b/paddle/trainer/NewRemoteParameterUpdater.h @@ -84,7 +84,6 @@ private: for (int i = 0; i < parameterSize(); ++i) { ParameterPtr param = parameters_[i]; - new_params[i]->content_len = 10; new_params[i]->element_type = PADDLE_ELEMENT_TYPE_FLOAT32; new_params[i]->name = (char*)param->getName().c_str(); new_params[i]->content =