提交 1f217f0a 编写于 作者: D dzhwinter

"add c testing, python testing TODO"

上级 5128714c
...@@ -121,14 +121,7 @@ func paddle_begin_init_params(client C.paddle_pserver_client) C.int { ...@@ -121,14 +121,7 @@ func paddle_begin_init_params(client C.paddle_pserver_client) C.int {
//export paddle_init_param //export paddle_init_param
func paddle_init_param(client C.paddle_pserver_client, param C.paddle_parameter, param_config unsafe.Pointer, config_len C.int) C.int { func paddle_init_param(client C.paddle_pserver_client, param C.paddle_parameter, param_config unsafe.Pointer, config_len C.int) C.int {
et := pserver.ElementType(param.element_type) et
name := C.GoString(param.name)
pc := pserver.ParameterWithConfig{
Param: pserver.Parameter{Name: name, ElementType: et, Content: param.content, Length: para.content_len},
Config: cArrayToSlice(param_config, int(config_len)),
}
c := get(client)
err := c.InitParam(pc)
if err != nil { if err != nil {
if err.Error() == pserver.AlreadyInitialized { if err.Error() == pserver.AlreadyInitialized {
......
import OptimizerConfig_pb2 as pb
config = pb.OptimizerConfig()
config.clip_norm = 0.1
config.lr_policy = pb.OptimizerConfig.Const
config.optimizer = pb.OptimizerConfig.SGD
config.sgd.momentum = 0.0
config.sgd.decay = 0.0
config.sgd.nesterov = False
config.const_lr.learning_rate = 0.1
s = config.SerializeToString()
with open("optimizer.pb.txt", 'w') as f:
f.write(s)
...@@ -45,9 +45,20 @@ void getParams(paddle_pserver_client c) { ...@@ -45,9 +45,20 @@ void getParams(paddle_pserver_client c) {
} }
} }
int main() { int main() {
char addr[] = "localhost:3000"; char addr[] = "localhost:3000";
paddle_pserver_client c = paddle_new_pserver_client(addr, 1); paddle_pserver_client c = paddle_new_pserver_client(addr, 1);
char config_proto[1024];
size_t config_proto_len = 0;
ssize_t nread;
FILE *fp = fopen("optimizer.pb.txt", "r");
if(!fp) { fail(); }
while((nread = getline(&config_proto, &config_proto_len, fp)) != -1) {
printf("%s", config_proto);
}
fclose(fp);
retry: retry:
if (paddle_begin_init_params(c)) { if (paddle_begin_init_params(c)) {
paddle_parameter param; paddle_parameter param;
...@@ -59,7 +70,7 @@ retry: ...@@ -59,7 +70,7 @@ retry:
param.name = name_a; param.name = name_a;
param.content = content_a; param.content = content_a;
param.content_len = 2000; param.content_len = 2000;
int error = paddle_init_param(c, param, NULL, 0); int error = paddle_init_param(c, param, config_proto, config_proto_len);
if (error != 0) { if (error != 0) {
goto retry; goto retry;
} }
......
...@@ -22,6 +22,8 @@ def main(): ...@@ -22,6 +22,8 @@ def main():
# create optimizer # create optimizer
optimizer = paddle.optimizer.Momentum(momentum=0) optimizer = paddle.optimizer.Momentum(momentum=0)
#TODO(zhihong) : replace optimizer with new OptimizerConfig
trainer = paddle.trainer.SGD(cost=cost, trainer = paddle.trainer.SGD(cost=cost,
parameters=parameters, parameters=parameters,
update_equation=optimizer, update_equation=optimizer,
......
...@@ -75,7 +75,9 @@ func TestClientFull(t *testing.T) { ...@@ -75,7 +75,9 @@ func TestClientFull(t *testing.T) {
var p pserver.Parameter var p pserver.Parameter
p.Name = "p_" + strconv.Itoa(i) p.Name = "p_" + strconv.Itoa(i)
p.ElementType = pserver.Float32 p.ElementType = pserver.Float32
p.Content = make([]byte, (i+1)*100) ElementValue := make([]byte, (i+1)*100)
p.Content = &ElementValue[0]
p.Length = len(ElementValue)
err := c.InitParam(pserver.ParameterWithConfig{Param: p}) err := c.InitParam(pserver.ParameterWithConfig{Param: p})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
...@@ -92,7 +94,9 @@ func TestClientFull(t *testing.T) { ...@@ -92,7 +94,9 @@ func TestClientFull(t *testing.T) {
var g pserver.Gradient var g pserver.Gradient
g.Name = "p_" + strconv.Itoa(i) g.Name = "p_" + strconv.Itoa(i)
g.ElementType = pserver.Float32 g.ElementType = pserver.Float32
g.Content = make([]byte, (i+1)*100) ElementValue := make([]byte, (i+1)*100)
g.Content = &ElementValue[0]
g.Length = len(ElementValue)
grads = append(grads, g) grads = append(grads, g)
} }
......
...@@ -5,6 +5,8 @@ import paddle.trainer_config_helpers.optimizers as v1_optimizers ...@@ -5,6 +5,8 @@ import paddle.trainer_config_helpers.optimizers as v1_optimizers
""" """
Optimizers(update equation) for SGD method. Optimizers(update equation) for SGD method.
TODO(zhihong) : create new optimizer with proto config, add new optimizer here
TODO(yuyang18): Complete comments. TODO(yuyang18): Complete comments.
""" """
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册