提交 5128714c 编写于 作者: D dzhwinter

"integrate go and optimizer library"

上级 f2e6b99a
......@@ -123,9 +123,8 @@ func paddle_begin_init_params(client C.paddle_pserver_client) C.int {
func paddle_init_param(client C.paddle_pserver_client, param C.paddle_parameter, param_config unsafe.Pointer, config_len C.int) C.int {
et := pserver.ElementType(param.element_type)
name := C.GoString(param.name)
content := cArrayToSlice(unsafe.Pointer(param.content), int(param.content_len))
pc := pserver.ParameterWithConfig{
Param: pserver.Parameter{Name: name, ElementType: et, Content: content},
Param: pserver.Parameter{Name: name, ElementType: et, Content: param.content, Length: para.content_len},
Config: cArrayToSlice(param_config, int(config_len)),
}
c := get(client)
......@@ -167,8 +166,7 @@ func paddle_send_grads(client C.paddle_pserver_client, grads **C.paddle_gradient
grad := *(**C.paddle_gradient)(unsafe.Pointer((uintptr(unsafe.Pointer(grads)) + uintptr(i)*unsafe.Sizeof(*grads))))
et := pserver.ElementType(grad.element_type)
name := C.GoString(grad.name)
content := cArrayToSlice(unsafe.Pointer(grad.content), int(grad.content_len))
gs = append(gs, pserver.Gradient{Name: name, ElementType: et, Content: content})
gs = append(gs, pserver.Gradient{Name: name, ElementType: et, Content: grad.content, Length: grad.content_len})
}
c := get(client)
......@@ -225,14 +223,14 @@ func paddle_get_params(client C.paddle_pserver_client, dst **C.paddle_parameter,
}
if unsafe.Pointer(param.content) != nullPtr {
if int(param.content_len) != len(p.Content) {
if int(param.content_len) != p.Length {
log.Errorf("the pre-allocated content len does not match parameter content len. Pre-allocated len: %d, returned len: %d", param.content_len, len(p.Content))
return C.PSERVER_ERROR
}
}
C.memcpy(unsafe.Pointer(param.content), unsafe.Pointer(&p.Content[0]), C.size_t(len(p.Content)))
param.content_len = C.int(len(p.Content))
C.memcpy(unsafe.Pointer(param.content), unsafe.Pointer(p.Content), C.size_t(p.Length))
param.content_len = C.int(p.Length)
param.element_type = C.paddle_element_type(p.ElementType)
}
......
#include <stdlib.h>
#include "optimizer.h"
typedef int (*update_func)(void*, void*, paddle_element_type, const void*, int);
typedef void (*release_func)(void*);
typedef struct paddle_optimizer {
update_func update;
release_func release;
void* optimizer;
} paddle_optimizer;
void paddle_release_optimizer(paddle_optimizer* o) {
o->release(o->optimizer);
free(o);
}
int paddle_update_parameter(paddle_optimizer* o,
void* buffer,
paddle_element_type element_type,
const void* gradient,
int num_bytes) {
return o->update(o->optimizer, buffer, element_type, gradient, num_bytes);
}
typedef struct { double learning_rate; } SGD_optimizer;
int update_SGD(void* optimizer,
void* buffer,
paddle_element_type element_type,
const void* gradient,
int num_bytes) {
SGD_optimizer* o = (SGD_optimizer*)optimizer;
float* parameter = (float*)buffer;
float* grad = (float*)gradient;
int i;
for (i = 0; i < num_bytes / sizeof(float); ++i) {
parameter[i] -= o->learning_rate * grad[i];
}
return 0;
}
void release_SGD(void* optimizer) {
SGD_optimizer* o = (SGD_optimizer*)optimizer;
// nothing allocated on heap
}
paddle_optimizer* paddle_create_SGD_optimizer(double learning_rate) {
SGD_optimizer* impl = (SGD_optimizer*)malloc(sizeof(SGD_optimizer));
impl->learning_rate = learning_rate;
paddle_optimizer* opt = (paddle_optimizer*)malloc(sizeof(paddle_optimizer));
opt->update = update_SGD;
opt->release = release_SGD;
opt->optimizer = impl;
return opt;
}
package pserver
/*
#include "optimizer.h"
#include "paddle/optimizer/optimizer.h"
*/
import "C"
import (
......@@ -9,34 +9,30 @@ import (
"unsafe"
)
type optimizerType int
const (
sgd optimizerType = iota
)
var nullPtr = unsafe.Pointer(uintptr(0))
type optimizer struct {
opt *C.struct_paddle_optimizer
}
func newOptimizer(t optimizerType, learning_rate float64) *optimizer {
func newOptimizer(paramWithConfigs ParameterWithConfig) *optimizer {
o := &optimizer{}
o.opt = C.paddle_create_SGD_optimizer(C.double(learning_rate))
p := paramWithConfigs.Param
c := paramWithConfigs.Config
o.opt = C.paddle_create_optimizer(C.uchar(c), C.int(len(c)), unsafe.Pointer(p.Content), c.int(p.Length), nullPtr, 0)
return o
}
func (o *optimizer) UpdateParameter(p Parameter, g Gradient) error {
if len(p.Content) != len(g.Content) {
return fmt.Errorf("Name: %s, parameter and gradient length not match, parameter: %d, gradient: %d", p.Name, len(p.Content), len(g.Content))
if p.Length != g.Length {
return fmt.Errorf("Name: %s, parameter and gradient length not match, parameter: %d, gradient: %d", p.Name, p.Length, g.Length)
}
if p.ElementType != g.ElementType {
return fmt.Errorf("Name: %s, parameter and gradient element type not match, parameter: %v, gradient: %v", p.Name, p.ElementType, g.ElementType)
}
r := C.paddle_update_parameter(o.opt, unsafe.Pointer(&p.Content[0]), C.paddle_element_type(p.ElementType), unsafe.Pointer(&g.Content[0]), C.int(len(g.Content)))
r := C.paddle_update_parameter(o.opt, C.paddle_element_type(p.ElementType), unsafe.Pointer(g.Content), C.int(g.Length))
if r != 0 {
return fmt.Errorf("optimizer update returned error code: %d", r)
}
......
#ifndef PADDLE_PSERVER_OPTIMIZER_H
#define PADDLE_PSERVER_OPTIMIZER_H
typedef enum {
PADDLE_ELEMENT_TYPE_INT32 = 0,
PADDLE_ELEMENT_TYPE_UINT32 = 1,
PADDLE_ELEMENT_TYPE_INT64 = 2,
PADDLE_ELEMENT_TYPE_UINT64 = 3,
PADDLE_ELEMENT_TYPE_FLOAT32 = 4,
PADDLE_ELEMENT_TYPE_FLOAT64 = 5,
} paddle_element_type;
struct paddle_optimizer;
struct paddle_optimizer* paddle_create_SGD_optimizer(double learning_rate);
void paddle_release_optimizer(struct paddle_optimizer* o);
int paddle_update_parameter(struct paddle_optimizer* o,
void* buffer,
paddle_element_type element_type,
const void* gradient,
int num_bytes);
#endif /* PADDLE_PSERVER_OPTIMIZER_H */
......@@ -28,7 +28,8 @@ const (
type Parameter struct {
Name string
ElementType ElementType
Content []byte
Content *byte
Length int
}
// ParameterWithConfig contains the parameter and the configuration.
......@@ -44,14 +45,16 @@ type Gradient Parameter
type Service struct {
initialized chan struct{}
mu sync.Mutex
opt *optimizer
mu sync.Mutex
// injection from parameter to optimizer
optMap map[string]*optimizer
paramMap map[string]Parameter
}
// NewService creates a new service.
func NewService() *Service {
s := &Service{opt: newOptimizer(sgd, 0.005)}
s := &Service{}
s.optMap = make(map[string]*optimizer)
s.paramMap = make(map[string]Parameter)
s.initialized = make(chan struct{})
return s
......@@ -74,6 +77,7 @@ func (s *Service) InitParam(paramWithConfigs ParameterWithConfig, dummy *int) er
// properly memory aligned, if not, make copy to a memory
// aligned region.
s.paramMap[paramWithConfigs.Param.Name] = paramWithConfigs.Param
s.optMap[paramWithConfigs.Param.Name] = newOptimizer(paramWithConfigs)
return nil
}
......@@ -106,8 +110,12 @@ func (s *Service) SendGrad(g Gradient, dummy *int) error {
if !ok {
return fmt.Errorf("parameter: %s does not exist", g.Name)
}
o, ok := s.optMap[g.Name]
if !ok {
return fmt.Errorf("optimizer: %s does not exist", g.Name)
}
return s.opt.UpdateParameter(p, g)
return o.UpdateParameter(p, g)
}
// GetParam gets parameters from the parameter server.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册