Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
3158efe9
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
3158efe9
编写于
6月 04, 2017
作者:
D
dzhwinter
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
"move cmake scripts too"
上级
62cd5c7a
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
101 addition
and
0 deletion
+101
-0
paddle/CMakeLists.txt
paddle/CMakeLists.txt
+1
-0
proto/CMakeLists.txt
proto/CMakeLists.txt
+1
-0
proto/OptimizerConfig.proto
proto/OptimizerConfig.proto
+99
-0
未找到文件。
paddle/CMakeLists.txt
浏览文件 @
3158efe9
...
...
@@ -8,6 +8,7 @@ add_subdirectory(gserver)
add_subdirectory
(
pserver
)
add_subdirectory
(
trainer
)
add_subdirectory
(
scripts
)
add_subdirectory
(
optimizer
)
# Do not build go directory until go cmake is working smoothly.
# if(CMAKE_Go_COMPILER)
...
...
proto/CMakeLists.txt
浏览文件 @
3158efe9
...
...
@@ -5,6 +5,7 @@ set(proto_filenames
ParameterConfig.proto
ParameterService.proto
TrainerConfig.proto
OptimizerConfig.proto
ParameterServerConfig.proto
)
set
(
PROTO_GEN
)
...
...
proto/OptimizerConfig.proto
0 → 100644
浏览文件 @
3158efe9
syntax
=
"proto2"
;
option
optimize_for
=
LITE_RUNTIME
;
package
paddle
;
message
SGDConfig
{
// SGD
// momentum: float >= 0. Parameter updates momentum.
// decay: float >= 0. Learning rate decay over each update.
// nesterov: boolean. Whether to apply Nesterov momentum.
optional
double
momentum
=
21
[
default
=
0.0
];
optional
double
decay
=
23
[
default
=
0.0
];
optional
bool
nesterov
=
24
[
default
=
false
];
message
AdadeltaConfig
{
// Adadelta
// It is recommended to leave it at the default value.
// rho: float >= 0.
// epsilon: float >= 0. Fuzz factor.
// decay: float >= 0. Learning rate decay over each update.
// reference : [Adadelta - an adaptive learning rate method](http://arxiv.org/abs/1212.5701)
optional
double
rho
=
33
[
default
=
0.90
];
optional
double
epsilon
=
31
[
default
=
1e-5
];
optional
double
decay
=
32
[
default
=
0.0
];
}
message
AdagradConfig
{
// Adagrad
// epsilon: float >= 0.
// decay: float >= 0. Learning rate decay over each update.
// reference : [Adaptive Subgradient Methods for Online Learning and Stochastic Optimization](http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)
optional
double
epsilon
=
41
[
default
=
1e-5
];
optional
double
decay
=
42
[
default
=
0.0
];
}
message
AdamConfig
{
// Adaj
// beta_1: float, 0 < beta < 1. Generally close to 1.
// beta_2: float, 0 < beta < 1. Generally close to 1.
// epsilon: float >= 0. Fuzz factor.
// decay: float >= 0. Learning rate decay over each update.
// reference : [Adam - A Method for Stochastic Optimization](http://arxiv.org/abs/1412.6980v8)
optional
double
beta_1
=
41
;
optional
double
beta_2
=
42
;
optional
double
epsilon
=
43
;
optional
double
decay
=
44
;
}
message
LearningRateConfig
{
// learninRate Policy
required
double
learning_rate
=
40
[
default
=
1.0
];
optional
double
lr_decay_a
=
25
;
optional
double
lr_decay_b
=
26
;
}
message
OptimizerConfig
{
// common config of optimizer
required
string
optimizer_name
=
1
;
// algorithm config
enum
OptimizerType
{
SGD
=
1
;
Adadelta
=
2
;
Adagrad
=
3
;
Adam
=
4
;
}
required
OptimizerType
optimizer_type
=
2
;
optional
SGDConfig
sgd
=
3
;
optional
AdadeltaConfig
adadelta
=
4
;
optional
AdagradConfig
adagrad
=
5
;
optional
AdamConfig
adam
=
6
;
// learning rate runtime policy config
// lr_policy : string
// ConstLr = 0;
// LinearLr = 1;
required
string
lr_policy
=
11
;
required
LearningRateConfig
lr_config
=
12
;
optional
uint64
num_sample_passed
=
13
[
default
=
0
];
// reqularizer config
enum
RegularizerType
{
L1
=
1
;
L2
=
2
;
L1L2
=
3
;
}
optional
RegularizerType
regularizer_type
=
21
;
// common config of optimizer
optional
double
clipnorm
=
101
;
optional
double
clipvalue
=
102
;
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录