diff --git a/go/pserver/cclient/cclient.go b/go/pserver/cclient/cclient.go index 6aaaff7409dfcf500a24496b0d11c3eae3eb9348..ba2a235de37066a1bfab379eb27e5979f5a292f2 100644 --- a/go/pserver/cclient/cclient.go +++ b/go/pserver/cclient/cclient.go @@ -121,14 +121,7 @@ func paddle_begin_init_params(client C.paddle_pserver_client) C.int { //export paddle_init_param func paddle_init_param(client C.paddle_pserver_client, param C.paddle_parameter, param_config unsafe.Pointer, config_len C.int) C.int { - et := pserver.ElementType(param.element_type) - name := C.GoString(param.name) - pc := pserver.ParameterWithConfig{ - Param: pserver.Parameter{Name: name, ElementType: et, Content: param.content, Length: para.content_len}, - Config: cArrayToSlice(param_config, int(config_len)), - } - c := get(client) - err := c.InitParam(pc) + et if err != nil { if err.Error() == pserver.AlreadyInitialized { diff --git a/go/pserver/cclient/test/dump_optimizer_proto.py b/go/pserver/cclient/test/dump_optimizer_proto.py new file mode 100644 index 0000000000000000000000000000000000000000..2ed4db97f907e20ce33d5e3e73381c239ad0f46c --- /dev/null +++ b/go/pserver/cclient/test/dump_optimizer_proto.py @@ -0,0 +1,13 @@ +import OptimizerConfig_pb2 as pb + +config = pb.OptimizerConfig() +config.clip_norm = 0.1 +config.lr_policy = pb.OptimizerConfig.Const +config.optimizer = pb.OptimizerConfig.SGD +config.sgd.momentum = 0.0 +config.sgd.decay = 0.0 +config.sgd.nesterov = False +config.const_lr.learning_rate = 0.1 +s = config.SerializeToString() +with open("optimizer.pb.txt", 'w') as f: + f.write(s) diff --git a/go/pserver/cclient/test/main.c b/go/pserver/cclient/test/main.c index 03f749d4e46c4890c6dcfa25af572dab4a053c86..7d26127b600e0110e8d2ae0e6c514a006efdcd5c 100644 --- a/go/pserver/cclient/test/main.c +++ b/go/pserver/cclient/test/main.c @@ -45,9 +45,20 @@ void getParams(paddle_pserver_client c) { } } + + int main() { char addr[] = "localhost:3000"; paddle_pserver_client c = paddle_new_pserver_client(addr, 1); + char config_proto[1024]; + size_t config_proto_len = 0; + ssize_t nread; + FILE *fp = fopen("optimizer.pb.txt", "r"); + if(!fp) { fail(); } + while((nread = getline(&config_proto, &config_proto_len, fp)) != -1) { + printf("%s", config_proto); + } + fclose(fp); retry: if (paddle_begin_init_params(c)) { paddle_parameter param; @@ -59,7 +70,7 @@ retry: param.name = name_a; param.content = content_a; param.content_len = 2000; - int error = paddle_init_param(c, param, NULL, 0); + int error = paddle_init_param(c, param, config_proto, config_proto_len); if (error != 0) { goto retry; } diff --git a/go/pserver/cclient/test/optimizer.pb.txt b/go/pserver/cclient/test/optimizer.pb.txt new file mode 100644 index 0000000000000000000000000000000000000000..27c8a584df40ab714edfd730f0ff7b7bd3783964 Binary files /dev/null and b/go/pserver/cclient/test/optimizer.pb.txt differ diff --git a/go/pserver/cclient/test/test_train.py b/go/pserver/cclient/test/test_train.py index 3f8d5d793bdeb687c9d234005d9e2eae760cc3a7..68e1d9b269209b695e27f91a656dc2d8e527b4cd 100644 --- a/go/pserver/cclient/test/test_train.py +++ b/go/pserver/cclient/test/test_train.py @@ -22,6 +22,8 @@ def main(): # create optimizer optimizer = paddle.optimizer.Momentum(momentum=0) + #TODO(zhihong) : replace optimizer with new OptimizerConfig + trainer = paddle.trainer.SGD(cost=cost, parameters=parameters, update_equation=optimizer, diff --git a/go/pserver/client_test.go b/go/pserver/client_test.go index d0371a26a13fac9daecacd0b6a271caa6d830651..c5d38e41129dc6297f639becf14809e716019c83 100644 --- a/go/pserver/client_test.go +++ b/go/pserver/client_test.go @@ -75,7 +75,9 @@ func TestClientFull(t *testing.T) { var p pserver.Parameter p.Name = "p_" + strconv.Itoa(i) p.ElementType = pserver.Float32 - p.Content = make([]byte, (i+1)*100) + ElementValue := make([]byte, (i+1)*100) + p.Content = &ElementValue[0] + p.Length = len(ElementValue) err := c.InitParam(pserver.ParameterWithConfig{Param: p}) if err != nil { t.Fatal(err) @@ -92,7 +94,9 @@ func TestClientFull(t *testing.T) { var g pserver.Gradient g.Name = "p_" + strconv.Itoa(i) g.ElementType = pserver.Float32 - g.Content = make([]byte, (i+1)*100) + ElementValue := make([]byte, (i+1)*100) + g.Content = &ElementValue[0] + g.Length = len(ElementValue) grads = append(grads, g) } diff --git a/python/paddle/v2/optimizer.py b/python/paddle/v2/optimizer.py index 1ef2dceca910e806bddf17c95d1c345a144d9e31..8124e219ba499333ecdf4b34ff5352e281aaa016 100644 --- a/python/paddle/v2/optimizer.py +++ b/python/paddle/v2/optimizer.py @@ -5,6 +5,8 @@ import paddle.trainer_config_helpers.optimizers as v1_optimizers """ Optimizers(update equation) for SGD method. +TODO(zhihong) : create new optimizer with proto config, add new optimizer here + TODO(yuyang18): Complete comments. """