ParameterConfig.proto 3.3 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
Z
zhangjinchao01 已提交
2 3 4 5 6 7 8 9 10 11 12 13

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yu Yang 已提交
14
syntax = "proto2";
Z
zhangjinchao01 已提交
15

16 17
option optimize_for = LITE_RUNTIME;

Z
zhangjinchao01 已提交
18 19 20 21 22 23 24 25 26 27 28 29
package paddle;

/**
 * Configuration structure for parameter
 */

enum ParameterInitStrategy {
  PARAMETER_INIT_NORMAL = 0;
  PARAMETER_INIT_UNIFORM = 1;
}

message ParameterUpdaterHookConfig {
X
xzl 已提交
30
  // hook type such as  'pruning'
Z
zhangjinchao01 已提交
31
  required string type = 1;
L
liaogang 已提交
32 33
  // this represents the ratio of zero element to be set by the Parameter
  optional double sparsity_ratio = 2 [ default = 0.6 ];
Z
zhangjinchao01 已提交
34 35 36 37 38
}

message ParameterConfig {
  required string name = 1;
  required uint64 size = 2;
L
liaogang 已提交
39 40 41 42
  optional double learning_rate = 3 [ default = 1.0 ];
  optional double momentum = 4 [ default = 0.0 ];
  optional double initial_mean = 5 [ default = 0.0 ];
  optional double initial_std = 6 [ default = 0.01 ];
Z
zhangjinchao01 已提交
43
  // use L2-regularization if decay_rate set and decay_rate_l1 not set
L
liaogang 已提交
44
  optional double decay_rate = 7 [ default = 0.0 ];
Z
zhangjinchao01 已提交
45
  // use L1-regularization if decay_rate_l1 set
L
liaogang 已提交
46
  optional double decay_rate_l1 = 8 [ default = 0.0 ];
Z
zhangjinchao01 已提交
47 48 49 50
  // dims of Parameter, e.g. dims[0] as height, dims[1] as width..
  repeated uint64 dims = 9;
  // the gpu device which the parameter in.
  // Only used by ParallelNeuralNetork. Ignored otherwise.
L
liaogang 已提交
51
  optional int32 device = 10 [ default = -1 ];
Z
zhangjinchao01 已提交
52 53 54
  // how to init the parameter: 0 -> normal, 1 -> uniform
  // 0: treat initial_mean as mean, intial_std as standard deviation
  // 1: range is (initial_mean - initial_std) to (initial_mean + initial_std)
L
liaogang 已提交
55
  optional int32 initial_strategy = 11 [ default = 0 ];
Z
zhangjinchao01 已提交
56
  // define the variance when init the parameter, by height of the Matrix
L
liaogang 已提交
57
  optional bool initial_smart = 12 [ default = false ];
Z
zhangjinchao01 已提交
58
  // apply regularization every # batches
L
liaogang 已提交
59
  optional int32 num_batches_regularization = 13 [ default = 1 ];
Z
zhangjinchao01 已提交
60
  // if is_sparse is true, para is sparse, else para is dense
L
liaogang 已提交
61 62 63 64
  optional bool is_sparse = 14 [ default = false ];
  // if para is sparse, format should be "csc" or "csr", empty means is not
  // sparse
  optional string format = 15 [ default = "" ];
Z
zhangjinchao01 已提交
65
  // sparse remote update or not
L
liaogang 已提交
66
  optional bool sparse_remote_update = 16 [ default = false ];
Z
zhangjinchao01 已提交
67
  // gradient clipping threshold, no clipping by default
L
liaogang 已提交
68
  optional double gradient_clipping_threshold = 17 [ default = 0.0 ];
Z
zhangjinchao01 已提交
69
  // static parameters are fixed when training
L
liaogang 已提交
70
  optional bool is_static = 18 [ default = false ];
Z
zhangjinchao01 已提交
71 72 73 74 75 76
  // para_id should NOT be set by config_parser. It is for
  // internal use.
  optional uint64 para_id = 19;

  repeated ParameterUpdaterHookConfig update_hooks = 20;
  // setup load mat -> csr
L
liaogang 已提交
77
  optional bool need_compact = 21 [ default = false ];
Z
zhangjinchao01 已提交
78
  // whether to do sparse update for this parameter
L
liaogang 已提交
79
  optional bool sparse_update = 22 [ default = false ];
Z
zhangjinchao01 已提交
80 81

  // whether this parameter is shared or not.
L
liaogang 已提交
82
  optional bool is_shared = 23 [ default = false ];
Z
zhangjinchao01 已提交
83
  // parameter block size
L
liaogang 已提交
84
  optional uint64 parameter_block_size = 24 [ default = 0 ];
Z
zhangjinchao01 已提交
85
}