Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
8a42a549
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
8a42a549
编写于
12月 19, 2016
作者:
E
emailweixu
提交者:
GitHub
12月 19, 2016
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #915 from reyoung/feature/add_unittest
Add unittest to cover SgdThreadUpdater's enableBufType
上级
80b45ad1
7aad9f53
变更
6
显示空白变更内容
内联
并排
Showing
6 changed file
with
58 addition
and
2 deletion
+58
-2
paddle/trainer/ThreadParameterUpdater.cpp
paddle/trainer/ThreadParameterUpdater.cpp
+3
-0
paddle/trainer/tests/CMakeLists.txt
paddle/trainer/tests/CMakeLists.txt
+2
-1
paddle/trainer/tests/fake_file_list.list
paddle/trainer/tests/fake_file_list.list
+1
-0
paddle/trainer/tests/simple_sparse_neural_network.py
paddle/trainer/tests/simple_sparse_neural_network.py
+23
-0
paddle/trainer/tests/simple_sparse_neural_network_dp.py
paddle/trainer/tests/simple_sparse_neural_network_dp.py
+21
-0
paddle/trainer/tests/test_TrainerOnePass.cpp
paddle/trainer/tests/test_TrainerOnePass.cpp
+8
-1
未找到文件。
paddle/trainer/ThreadParameterUpdater.cpp
浏览文件 @
8a42a549
...
@@ -55,6 +55,9 @@ void SgdThreadUpdater::init(std::vector<ParameterPtr>& parameters) {
...
@@ -55,6 +55,9 @@ void SgdThreadUpdater::init(std::vector<ParameterPtr>& parameters) {
// not create parameter buf for PARAMETER_GRADIENT for sparse update in
// not create parameter buf for PARAMETER_GRADIENT for sparse update in
// Parameter::enableType(). But gradient parameter buf is still used
// Parameter::enableType(). But gradient parameter buf is still used
// in SgdThreadUpdater. We need to explicitly create it.
// in SgdThreadUpdater. We need to explicitly create it.
//
// The AverageOptimizer::restore/apply method will use PARAMETER_GRADIENT
// as a temp buffer.
para
->
enableBufType
(
PARAMETER_GRADIENT
);
para
->
enableBufType
(
PARAMETER_GRADIENT
);
}
}
}
}
...
...
paddle/trainer/tests/CMakeLists.txt
浏览文件 @
8a42a549
...
@@ -27,7 +27,8 @@ add_test(NAME test_Trainer
...
@@ -27,7 +27,8 @@ add_test(NAME test_Trainer
add_unittest_without_exec
(
test_TrainerOnePass
add_unittest_without_exec
(
test_TrainerOnePass
test_TrainerOnePass.cpp
)
test_TrainerOnePass.cpp
)
add_test
(
NAME test_TrainerOnePass
add_test
(
NAME test_TrainerOnePass
COMMAND
${
PROJ_ROOT
}
/paddle/.set_python_path.sh -d
${
PROJ_ROOT
}
/python/
COMMAND
${
PROJ_ROOT
}
/paddle/.set_python_path.sh -d
${
PROJ_ROOT
}
/python/:
${
PROJ_ROOT
}
/paddle/trainer/tests
${
PROJ_ROOT
}
/paddle/.set_port.sh -p port
${
CMAKE_CURRENT_BINARY_DIR
}
/test_TrainerOnePass
${
PROJ_ROOT
}
/paddle/.set_port.sh -p port
${
CMAKE_CURRENT_BINARY_DIR
}
/test_TrainerOnePass
WORKING_DIRECTORY
${
PROJ_ROOT
}
/paddle/
)
WORKING_DIRECTORY
${
PROJ_ROOT
}
/paddle/
)
...
...
paddle/trainer/tests/fake_file_list.list
0 → 100644
浏览文件 @
8a42a549
do_not_matter.txt
paddle/trainer/tests/simple_sparse_neural_network.py
0 → 100644
浏览文件 @
8a42a549
from
paddle.trainer_config_helpers
import
*
settings
(
batch_size
=
128
,
learning_method
=
AdaGradOptimizer
(),
learning_rate
=
1e-4
)
file_list
=
'trainer/tests/fake_file_list.list'
define_py_data_sources2
(
train_list
=
file_list
,
test_list
=
file_list
,
module
=
"simple_sparse_neural_network_dp"
,
obj
=
"process"
)
embedding
=
embedding_layer
(
input
=
data_layer
(
name
=
"word_ids"
,
size
=
65536
),
size
=
128
,
param_attr
=
ParamAttr
(
sparse_update
=
True
))
prediction
=
fc_layer
(
input
=
embedding
,
size
=
10
,
act
=
SoftmaxActivation
())
outputs
(
classification_cost
(
input
=
prediction
,
label
=
data_layer
(
name
=
'label'
,
size
=
10
)))
paddle/trainer/tests/simple_sparse_neural_network_dp.py
0 → 100644
浏览文件 @
8a42a549
from
paddle.trainer.PyDataProvider2
import
provider
,
integer_sequence
,
integer_value
import
random
def
init_hook
(
settings
,
is_train
,
**
kwargs
):
settings
.
is_train
=
is_train
@
provider
(
input_types
=
{
'word_ids'
:
integer_value
(
65536
),
'label'
:
integer_value
(
10
)},
min_pool_size
=
0
,
init_hook
=
init_hook
)
def
process
(
settings
,
filename
):
if
settings
.
is_train
:
data_size
=
2
**
20
else
:
data_size
=
2
**
10
for
_
in
xrange
(
data_size
):
yield
random
.
randint
(
0
,
65535
),
random
.
randint
(
0
,
9
)
paddle/trainer/tests/test_TrainerOnePass.cpp
浏览文件 @
8a42a549
...
@@ -27,6 +27,9 @@ static const string& configFile1 = "trainer/tests/sample_trainer_config.conf";
...
@@ -27,6 +27,9 @@ static const string& configFile1 = "trainer/tests/sample_trainer_config.conf";
static
const
string
&
configFile2
=
static
const
string
&
configFile2
=
"trainer/tests/sample_trainer_config_parallel.conf"
;
"trainer/tests/sample_trainer_config_parallel.conf"
;
static
const
string
&
configFileSimpleSparse
=
"trainer/tests/simple_sparse_neural_network.py"
;
DECLARE_bool
(
use_gpu
);
DECLARE_bool
(
use_gpu
);
DECLARE_string
(
config
);
DECLARE_string
(
config
);
DECLARE_int32
(
gpu_id
);
DECLARE_int32
(
gpu_id
);
...
@@ -298,11 +301,15 @@ TEST(checkRemoteUpdater, cpuDeltaTrainerOldUpdater) {
...
@@ -298,11 +301,15 @@ TEST(checkRemoteUpdater, cpuDeltaTrainerOldUpdater) {
checkRemoteParameterUpdaterTest
(
configFile1
,
false
,
false
,
1
,
true
,
10
);
checkRemoteParameterUpdaterTest
(
configFile1
,
false
,
false
,
1
,
true
,
10
);
}
}
TEST
(
SgdThreadUpdater
,
simpleSparseNN
)
{
trainerOnePassTest
(
configFileSimpleSparse
,
false
,
false
,
1
,
0.5
,
true
);
}
int
main
(
int
argc
,
char
**
argv
)
{
int
main
(
int
argc
,
char
**
argv
)
{
testing
::
InitGoogleTest
(
&
argc
,
argv
);
initMain
(
argc
,
argv
);
initMain
(
argc
,
argv
);
initPython
(
argc
,
argv
);
initPython
(
argc
,
argv
);
gNumDevices
=
hl_get_device_count
();
gNumDevices
=
hl_get_device_count
();
testing
::
InitGoogleTest
(
&
argc
,
argv
);
FLAGS_num_passes
=
1
;
// train one pass
FLAGS_num_passes
=
1
;
// train one pass
FLAGS_saving_period
=
100000
;
// do not save parameteres
FLAGS_saving_period
=
100000
;
// do not save parameteres
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录