Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
e566b94f
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
e566b94f
编写于
12月 26, 2017
作者:
Y
Yang Yu
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Revert C++ changes
上级
4450a312
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
1 addition
and
25 deletion
+1
-25
paddle/operators/tensor_array_read_write_op.cc
paddle/operators/tensor_array_read_write_op.cc
+0
-11
paddle/operators/while_op.cc
paddle/operators/while_op.cc
+1
-14
未找到文件。
paddle/operators/tensor_array_read_write_op.cc
浏览文件 @
e566b94f
...
...
@@ -136,17 +136,6 @@ class ReadFromArrayOp : public ArrayOp {
auto
&
dev_ctx
=
*
pool
.
Borrow
(
place
);
framework
::
CopyFrom
(
x_array
[
offset
],
place
,
dev_ctx
,
out_tensor
);
out_tensor
->
set_lod
(
x_array
[
offset
].
lod
());
if
(
Input
(
"X"
)
==
"dynamic_rnn_0_output_array_fc_0.tmp_0_0@GRAD"
)
{
VLOG
(
10
)
<<
"Offset = "
<<
offset
;
if
(
x_array
[
offset
].
numel
()
!=
0
)
{
auto
d
=
x_array
[
offset
].
dims
();
std
::
ostringstream
sout
;
for
(
int64_t
i
=
0
;
i
<
d
[
0
];
++
i
)
{
sout
<<
x_array
[
offset
].
data
<
float
>
()[
0
*
d
[
1
]]
<<
", "
;
}
VLOG
(
10
)
<<
"Grad = "
<<
sout
.
str
();
}
}
}
else
{
VLOG
(
10
)
<<
"offset "
<<
offset
<<
" >= "
<<
x_array
.
size
();
}
...
...
paddle/operators/while_op.cc
浏览文件 @
e566b94f
...
...
@@ -129,9 +129,6 @@ class WhileGradOp : public framework::OperatorBase {
auto
&
og_inside
=
detail
::
Ref
(
cur_scope
.
Var
(
inside_og_name
),
"Cannot find inside gradient %s"
,
inside_og_name
);
VLOG
(
10
)
<<
"OG "
<<
outside_og_name
<<
" Type is "
<<
og_outside
.
Type
().
name
();
if
(
og_outside
.
Type
().
hash_code
()
==
typeid
(
framework
::
LoDTensor
).
hash_code
())
{
auto
&
outside_tensor
=
og_outside
.
Get
<
framework
::
LoDTensor
>
();
...
...
@@ -148,6 +145,7 @@ class WhileGradOp : public framework::OperatorBase {
inside_array
.
resize
(
outside_array
.
size
());
for
(
size_t
j
=
0
;
j
<
inside_array
.
size
();
++
j
)
{
VLOG
(
10
)
<<
j
<<
" "
<<
outside_array
[
j
].
numel
();
if
(
outside_array
[
j
].
numel
()
!=
0
)
{
inside_array
[
j
].
set_lod
(
outside_array
[
j
].
lod
());
inside_array
[
j
].
ShareDataWith
(
outside_array
[
j
]);
...
...
@@ -200,17 +198,6 @@ class WhileGradOp : public framework::OperatorBase {
auto
sum_op
=
framework
::
OpRegistry
::
CreateOp
(
"sum"
,
{{
"X"
,
{
pg_names
[
param_id
],
new_inside_name
}}},
{{
"Out"
,
{
pg_names
[
param_id
]}}},
framework
::
AttributeMap
{});
VLOG
(
10
)
<<
"Accumulate the gradient of "
<<
pg_names
[
param_id
];
if
(
pg_names
[
param_id
]
==
"W@GRAD"
)
{
auto
&
w_g
=
detail
::
Ref
(
cur_scope
.
FindVar
(
new_inside_name
))
.
Get
<
framework
::
LoDTensor
>
();
VLOG
(
10
)
<<
"W_G is"
<<
w_g
.
data
<
float
>
()[
0
];
}
else
{
VLOG
(
10
)
<<
pg_names
[
param_id
];
}
sum_op
->
Run
(
cur_scope
,
dev_place
);
cur_scope
.
Rename
(
new_inside_name
,
inside_grad_name
);
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录