Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
fb63cd89
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
fb63cd89
编写于
1月 10, 2019
作者:
F
flame
提交者:
GitHub
1月 10, 2019
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add python ir graph API (#14917)
上级
7d13d207
变更
7
隐藏空白更改
内联
并排
Showing
7 changed file
with
286 addition
and
4 deletion
+286
-4
paddle/fluid/framework/details/multi_devices_graph_pass.cc
paddle/fluid/framework/details/multi_devices_graph_pass.cc
+1
-1
paddle/fluid/framework/ir/graph.h
paddle/fluid/framework/ir/graph.h
+0
-1
paddle/fluid/pybind/CMakeLists.txt
paddle/fluid/pybind/CMakeLists.txt
+1
-1
paddle/fluid/pybind/ir.cc
paddle/fluid/pybind/ir.cc
+103
-0
paddle/fluid/pybind/ir.h
paddle/fluid/pybind/ir.h
+25
-0
paddle/fluid/pybind/pybind.cc
paddle/fluid/pybind/pybind.cc
+10
-1
python/paddle/fluid/tests/unittests/test_ir_graph.py
python/paddle/fluid/tests/unittests/test_ir_graph.py
+146
-0
未找到文件。
paddle/fluid/framework/details/multi_devices_graph_pass.cc
浏览文件 @
fb63cd89
...
...
@@ -226,7 +226,7 @@ std::unique_ptr<ir::Graph> MultiDevSSAGraphBuilderBase::ApplyImpl(
* Only variables should be the leaves of graph.
*/
AddOutputToLeafOps
(
&
result
);
result
.
Erase
<
GraphOps
>
(
kGraphOps
);
result
.
Erase
(
kGraphOps
);
return
graph
;
}
...
...
paddle/fluid/framework/ir/graph.h
浏览文件 @
fb63cd89
...
...
@@ -109,7 +109,6 @@ class Graph {
attr_dels_
[
attr_name
]
=
[]()
{};
}
template
<
typename
AttrType
>
void
Erase
(
const
std
::
string
&
attr_name
)
{
PADDLE_ENFORCE
(
attrs_
.
count
(
attr_name
)
!=
0
,
"%s not set in the graph"
,
attr_name
);
...
...
paddle/fluid/pybind/CMakeLists.txt
浏览文件 @
fb63cd89
...
...
@@ -3,7 +3,7 @@ set(PYBIND_DEPS pybind python proto_desc memory executor async_executor prune fe
if
(
WITH_PYTHON
)
list
(
APPEND PYBIND_DEPS py_func_op
)
endif
()
set
(
PYBIND_SRCS pybind.cc exception.cc protobuf.cc const_value.cc recordio.cc async_executor_py.cc imperative.cc
)
set
(
PYBIND_SRCS pybind.cc exception.cc protobuf.cc const_value.cc recordio.cc async_executor_py.cc imperative.cc
ir.cc
)
if
(
WITH_PYTHON
)
if
(
WITH_AMD_GPU
)
...
...
paddle/fluid/pybind/ir.cc
0 → 100644
浏览文件 @
fb63cd89
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "paddle/fluid/pybind/ir.h"
#include <string>
#include <unordered_map>
#include "paddle/fluid/framework/ir/graph.h"
#include "paddle/fluid/framework/ir/node.h"
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/var_desc.h"
#include "pybind11/stl.h"
namespace
py
=
pybind11
;
using
paddle
::
framework
::
ir
::
Graph
;
using
paddle
::
framework
::
ir
::
Node
;
using
paddle
::
framework
::
OpDesc
;
using
paddle
::
framework
::
ProgramDesc
;
using
paddle
::
framework
::
VarDesc
;
using
pybind11
::
return_value_policy
;
namespace
paddle
{
namespace
pybind
{
void
BindGraph
(
py
::
module
*
m
)
{
py
::
class_
<
Graph
,
std
::
shared_ptr
<
Graph
>>
(
*
m
,
"Graph"
,
"The graph is a Directed Acyclic Single Static Assignment Graph, see "
"`paddle::ir::Graph` for details."
)
.
def
(
py
::
init
<
const
ProgramDesc
&>
())
.
def
(
"has"
,
&
Graph
::
Has
)
.
def
(
"get_int"
,
&
Graph
::
Get
<
int
>
)
.
def
(
"get_float"
,
&
Graph
::
Get
<
float
>
)
.
def
(
"get_double"
,
&
Graph
::
Get
<
double
>
)
.
def
(
"get_string"
,
&
Graph
::
Get
<
std
::
string
>
)
.
def
(
"set"
,
[](
Graph
&
self
,
const
std
::
string
&
attr_name
,
int
attr
)
{
return
self
.
Set
(
attr_name
,
new
int
(
attr
));
})
.
def
(
"set"
,
[](
Graph
&
self
,
const
std
::
string
&
attr_name
,
const
std
::
string
&
attr
)
{
return
self
.
Set
(
attr_name
,
new
std
::
string
(
attr
));
})
.
def
(
"set"
,
[](
Graph
&
self
,
const
std
::
string
&
attr_name
,
float
attr
)
{
return
self
.
Set
(
attr_name
,
new
float
(
attr
));
})
.
def
(
"set"
,
[](
Graph
&
self
,
const
std
::
string
&
attr_name
,
double
attr
)
{
return
self
.
Set
(
attr_name
,
new
double
(
attr
));
})
.
def
(
"erase"
,
&
Graph
::
Erase
)
.
def
(
"nodes"
,
&
Graph
::
Nodes
,
return_value_policy
::
reference
)
.
def
(
"create_var_node"
,
[](
Graph
&
self
,
VarDesc
&
var_desc
)
{
return
self
.
CreateVarNode
(
&
var_desc
);
},
return_value_policy
::
reference
)
.
def
(
"create_op_node"
,
[](
Graph
&
self
,
OpDesc
&
op_desc
)
{
return
self
.
CreateOpNode
(
&
op_desc
);
},
return_value_policy
::
reference
)
.
def
(
"create_control_dep_var"
,
&
Graph
::
CreateControlDepVar
,
return_value_policy
::
reference
)
.
def
(
"create_empty_node"
,
&
Graph
::
CreateEmptyNode
,
return_value_policy
::
reference
)
.
def
(
"release_nodes"
,
&
Graph
::
ReleaseNodes
)
.
def
(
"remove_node"
,
[](
Graph
&
self
,
Node
&
node
)
{
return
self
.
RemoveNode
(
&
node
);
})
.
def
(
"retrieve_node"
,
&
Graph
::
RetrieveNode
,
return_value_policy
::
reference
)
.
def
(
"resolve_hazard"
,
&
Graph
::
ResolveHazard
);
}
void
BindNode
(
py
::
module
*
m
)
{
py
::
class_
<
Node
>
node
(
*
m
,
"Node"
);
node
.
def
(
"name"
,
&
Node
::
Name
)
.
def
(
"node_type"
,
&
Node
::
NodeType
)
.
def
(
"var"
,
&
Node
::
Var
)
.
def
(
"op"
,
&
Node
::
Op
)
.
def
(
"id"
,
&
Node
::
id
)
.
def
(
"is_op"
,
&
Node
::
IsOp
)
.
def
(
"is_var"
,
&
Node
::
IsVar
)
.
def
(
"is_ctrl_var"
,
&
Node
::
IsCtrlVar
)
.
def_readwrite
(
"inputs"
,
&
Node
::
inputs
)
.
def_readwrite
(
"outputs"
,
&
Node
::
outputs
);
py
::
enum_
<
Node
::
Type
>
(
node
,
"Type"
)
.
value
(
"Operation"
,
Node
::
Type
::
kOperation
)
.
value
(
"Variable"
,
Node
::
Type
::
kVariable
)
.
export_values
();
}
}
// namespace pybind
}
// namespace paddle
paddle/fluid/pybind/ir.h
0 → 100644
浏览文件 @
fb63cd89
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <pybind11/pybind11.h>
#include "paddle/fluid/framework/ir/graph.h"
namespace
paddle
{
namespace
pybind
{
void
BindGraph
(
pybind11
::
module
*
m
);
void
BindNode
(
pybind11
::
module
*
m
);
}
// namespace pybind
}
// namespace paddle
paddle/fluid/pybind/pybind.cc
浏览文件 @
fb63cd89
...
...
@@ -49,6 +49,7 @@ limitations under the License. */
#include "paddle/fluid/pybind/const_value.h"
#include "paddle/fluid/pybind/exception.h"
#include "paddle/fluid/pybind/imperative.h"
#include "paddle/fluid/pybind/ir.h"
#include "paddle/fluid/pybind/protobuf.h"
#include "paddle/fluid/pybind/pybind.h" // NOLINT
#include "paddle/fluid/pybind/recordio.h"
...
...
@@ -775,7 +776,12 @@ All parameter, weight, gradient are variables in Paddle.
})
.
def
(
"set_int"
,
[](
ir
::
Pass
&
self
,
const
std
::
string
&
name
,
int
val
)
{
self
.
Set
<
const
int
>
(
name
,
new
int
(
val
));
})
.
def
(
"type"
,
&
ir
::
Pass
::
Type
);
.
def
(
"type"
,
&
ir
::
Pass
::
Type
)
.
def
(
"apply"
,
[](
ir
::
Pass
&
self
,
std
::
shared_ptr
<
ir
::
Graph
>
graph
)
{
std
::
unique_ptr
<
ir
::
Graph
>
origin_graph
(
graph
.
get
());
auto
optim_graph
=
self
.
Apply
(
std
::
move
(
origin_graph
));
graph
.
reset
(
optim_graph
.
release
());
});
py
::
class_
<
ir
::
PassBuilder
,
std
::
shared_ptr
<
ir
::
PassBuilder
>>
pb
(
m
,
"PassBuilder"
);
...
...
@@ -1042,6 +1048,9 @@ All parameter, weight, gradient are variables in Paddle.
BindRecordIOWriter
(
&
m
);
BindAsyncExecutor
(
&
m
);
BindGraph
(
&
m
);
BindNode
(
&
m
);
}
}
// namespace pybind
}
// namespace paddle
python/paddle/fluid/tests/unittests/test_ir_graph.py
0 → 100644
浏览文件 @
fb63cd89
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
os
import
unittest
import
six
from
paddle
import
fluid
class
TestIRGraph
(
unittest
.
TestCase
):
"""
TODO(fc500110): `resolve_hazard` api will be tested when it can be used.
"""
def
test_nodes
(
self
):
graph
=
build_graph
()
self
.
assertTrue
(
{
node
.
name
()
for
node
in
graph
.
nodes
()}
==
{
"x1"
,
"x2"
,
"out"
,
"sum"
})
def
test_has_set_get
(
self
):
graph
=
build_graph
()
for
attr_name
in
[
"int"
,
"float"
,
"string"
]:
self
.
assertFalse
(
graph
.
has
(
attr_name
))
graph
.
set
(
"int"
,
1
)
graph
.
set
(
"float"
,
0.5
)
graph
.
set
(
"string"
,
"string"
)
for
attr_name
in
[
"int"
,
"float"
,
"string"
]:
self
.
assertTrue
(
graph
.
has
(
attr_name
))
self
.
assertTrue
(
graph
.
get_int
(
"int"
)
==
1
)
self
.
assertTrue
(
graph
.
get_float
(
"float"
)
==
0.5
)
self
.
assertTrue
(
graph
.
get_string
(
"string"
)
==
"string"
)
def
test_erase
(
self
):
graph
=
build_graph
()
graph
.
set
(
"test"
,
0
)
self
.
assertTrue
(
graph
.
has
(
"test"
))
graph
.
erase
(
"test"
)
self
.
assertFalse
(
graph
.
has
(
"test"
))
def
test_create_var_node
(
self
):
prog
=
fluid
.
core
.
ProgramDesc
()
block
=
prog
.
block
(
0
)
shape
=
[
10
,
20
]
x1
=
block
.
var
(
six
.
b
(
"x1"
))
x1
.
set_type
(
fluid
.
core
.
VarDesc
.
VarType
.
LOD_TENSOR
)
x1
.
set_shape
(
shape
)
graph
=
fluid
.
core
.
Graph
(
prog
)
node
=
graph
.
create_var_node
(
x1
)
self
.
assertTrue
(
node
.
node_type
()
==
fluid
.
core
.
Node
.
Type
.
Variable
)
def
test_create_op_node
(
self
):
prog
=
fluid
.
core
.
ProgramDesc
()
block
=
prog
.
block
(
0
)
sum_op_desc
=
block
.
append_op
()
graph
=
fluid
.
core
.
Graph
(
prog
)
node
=
graph
.
create_op_node
(
sum_op_desc
)
self
.
assertTrue
(
node
.
node_type
()
==
fluid
.
core
.
Node
.
Type
.
Operation
)
def
test_create_control_dep_var
(
self
):
graph
=
build_graph
()
name
=
"__control_var@{}"
.
format
(
len
(
graph
.
nodes
()))
node
=
graph
.
create_control_dep_var
()
self
.
assertTrue
(
node
.
name
()
==
name
)
def
test_create_empty_node
(
self
):
prog
=
fluid
.
core
.
ProgramDesc
()
graph
=
fluid
.
core
.
Graph
(
prog
)
n1
=
graph
.
create_empty_node
(
'x'
,
fluid
.
core
.
Node
.
Type
.
Operation
)
self
.
assertTrue
(
n1
.
name
()
==
'x'
)
n2
=
graph
.
create_empty_node
(
'y'
,
fluid
.
core
.
Node
.
Type
.
Variable
)
self
.
assertTrue
(
n2
.
name
()
==
'y'
)
def
test_release_nodes
(
self
):
graph
=
build_graph
()
nodes
=
graph
.
release_nodes
()
self
.
assertTrue
(
len
(
graph
.
nodes
())
==
0
)
self
.
assertTrue
({
node
.
name
()
for
node
in
nodes
}
==
{
"x1"
,
"x2"
,
"out"
,
"sum"
})
def
test_remove_node
(
self
):
graph
=
build_graph
()
nodes
=
graph
.
nodes
()
for
node
in
nodes
:
if
node
.
name
()
==
"sum"
:
break
self
.
assertTrue
({
node
.
name
()
for
node
in
nodes
}
==
{
"x1"
,
"x2"
,
"out"
,
"sum"
})
nodes
.
remove
(
node
)
self
.
assertTrue
({
node
.
name
()
for
node
in
nodes
}
==
{
"x1"
,
"x2"
,
"out"
})
def
test_retrieve_node
(
self
):
graph
=
build_graph
()
nodes
=
[]
for
i
in
range
(
len
(
graph
.
nodes
())):
nodes
.
append
(
graph
.
retrieve_node
(
i
))
for
node
in
nodes
:
self
.
assertTrue
(
node
in
graph
.
nodes
())
def
resolve_hazard
(
self
):
pass
def
build_graph
():
prog
=
fluid
.
core
.
ProgramDesc
()
block
=
prog
.
block
(
0
)
shape
=
[
10
,
20
]
# prepare input/output
x1
=
block
.
var
(
six
.
b
(
"x1"
))
x1
.
set_type
(
fluid
.
core
.
VarDesc
.
VarType
.
LOD_TENSOR
)
x1
.
set_shape
(
shape
)
x2
=
block
.
var
(
six
.
b
(
"x2"
))
x2
.
set_type
(
fluid
.
core
.
VarDesc
.
VarType
.
LOD_TENSOR
)
x2
.
set_shape
(
shape
)
out
=
block
.
var
(
six
.
b
(
"out"
))
out
.
set_type
(
fluid
.
core
.
VarDesc
.
VarType
.
LOD_TENSOR
)
sum_op_desc
=
block
.
append_op
()
sum_op_desc
.
set_type
(
"sum"
)
sum_op_desc
.
set_input
(
"X"
,
[
"x1"
,
"x2"
])
sum_op_desc
.
set_output
(
"Out"
,
[
"out"
])
sum_op_desc
.
check_attrs
()
sum_op_desc
.
infer_shape
(
block
)
graph
=
fluid
.
core
.
Graph
(
prog
)
return
graph
if
__name__
==
"__main__"
:
unittest
.
main
()
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录