Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
ca743508
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
ca743508
编写于
1月 25, 2022
作者:
J
jim19930609
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Adjusted CMakeFiles to support compilation for final state auto generated codes
上级
3723caba
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
97 addition
and
47 deletion
+97
-47
paddle/fluid/eager/api/generated/eager_generated/backwards/CMakeLists.txt
...er/api/generated/eager_generated/backwards/CMakeLists.txt
+2
-0
paddle/fluid/eager/api/generated/eager_generated/forwards/CMakeLists.txt
...ger/api/generated/eager_generated/forwards/CMakeLists.txt
+2
-0
paddle/fluid/eager/auto_code_generator/CMakeLists.txt
paddle/fluid/eager/auto_code_generator/CMakeLists.txt
+1
-1
paddle/fluid/eager/auto_code_generator/final_state_generator/CMakeLists.txt
.../auto_code_generator/final_state_generator/CMakeLists.txt
+5
-4
paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py
...er/auto_code_generator/final_state_generator/eager_gen.py
+41
-39
paddle/fluid/eager/auto_code_generator/generate_file_structures.py
...uid/eager/auto_code_generator/generate_file_structures.py
+46
-3
未找到文件。
paddle/fluid/eager/api/generated/eager_generated/backwards/CMakeLists.txt
浏览文件 @
ca743508
cc_library
(
scale_node SRCS scale_node.cc DEPS global_utils pten pten_api grad_node_info
)
cc_library
(
scale_node SRCS scale_node.cc DEPS global_utils pten pten_api grad_node_info
)
#cc_library(final_dygraph_node SRCS nodes.cc DEPS ${eager_deps})
#add_dependencies(final_dygraph_node eager_final_state_codegen)
paddle/fluid/eager/api/generated/eager_generated/forwards/CMakeLists.txt
浏览文件 @
ca743508
cc_library
(
eager_scale SRCS scale.cc DEPS pten_api pten autograd_meta scale_node
)
cc_library
(
eager_scale SRCS scale.cc DEPS pten_api pten autograd_meta scale_node
)
#cc_library(final_dygraph_function SRCS dygraph_functions.cc DEPS ${eager_deps})
#add_dependencies(final_dygraph_function eager_final_state_codegen)
paddle/fluid/eager/auto_code_generator/CMakeLists.txt
浏览文件 @
ca743508
add_subdirectory
(
final_state_generator
)
#
add_subdirectory(final_state_generator)
set
(
EAGER_GENERETOR_DEPS
${
GLOB_OP_LIB
}
${
GLOB_OPERATOR_DEPS
}
pybind proto_desc executor layer tracer engine imperative_profiler imperative_flag
)
set
(
EAGER_GENERETOR_DEPS
${
GLOB_OP_LIB
}
${
GLOB_OPERATOR_DEPS
}
pybind proto_desc executor layer tracer engine imperative_profiler imperative_flag
)
...
...
paddle/fluid/eager/auto_code_generator/final_state_generator/CMakeLists.txt
浏览文件 @
ca743508
...
@@ -2,13 +2,14 @@ set(api_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml")
...
@@ -2,13 +2,14 @@ set(api_yaml_path "${PADDLE_SOURCE_DIR}/python/paddle/utils/code_gen/api.yaml")
set
(
backward_yaml_path
"
${
PADDLE_SOURCE_DIR
}
/python/paddle/utils/code_gen/backward.yaml"
)
set
(
backward_yaml_path
"
${
PADDLE_SOURCE_DIR
}
/python/paddle/utils/code_gen/backward.yaml"
)
set
(
tmp_forwards_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.cc"
)
set
(
tmp_forwards_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.cc"
)
set
(
tmp_forwards_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.h"
)
set
(
tmp_forwards_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/tmp_dygraph_functions.h"
)
set
(
tmp_nodes_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_node.cc"
)
set
(
tmp_nodes_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_node
s
.cc"
)
set
(
tmp_nodes_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_node.h"
)
set
(
tmp_nodes_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/tmp_node
s
.h"
)
set
(
forwards_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.cc"
)
set
(
forwards_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.cc"
)
set
(
forwards_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.h"
)
set
(
forwards_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/forwards/dygraph_functions.h"
)
set
(
nodes_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/node.cc"
)
set
(
nodes_cc_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/node
s
.cc"
)
set
(
nodes_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/node.h"
)
set
(
nodes_h_path
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/api/generated/eager_generated/backwards/node
s
.h"
)
message
(
"Final State Eager CodeGen"
)
add_custom_target
(
eager_final_state_codegen
add_custom_target
(
eager_final_state_codegen
COMMAND
"
${
PYTHON_EXECUTABLE
}
"
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py"
COMMAND
"
${
PYTHON_EXECUTABLE
}
"
"
${
PADDLE_SOURCE_DIR
}
/paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py"
"--api_yaml_path=
${
api_yaml_path
}
"
"--api_yaml_path=
${
api_yaml_path
}
"
...
...
paddle/fluid/eager/auto_code_generator/final_state_generator/eager_gen.py
浏览文件 @
ca743508
...
@@ -72,6 +72,16 @@ def GetConstReference(string):
...
@@ -72,6 +72,16 @@ def GetConstReference(string):
return
ret
return
ret
def
RemoveConstAndReference
(
string
):
ret
=
string
if
string
.
startswith
(
"const "
):
ret
=
ret
[
6
:]
if
string
.
endswith
(
"&"
):
ret
=
ret
[:
-
1
]
return
ret
def
GetAutoGradMetaName
(
string
):
def
GetAutoGradMetaName
(
string
):
return
f
"
{
string
}
_autograd_meta"
return
f
"
{
string
}
_autograd_meta"
...
@@ -439,7 +449,7 @@ def GenerateNodeDeclaration(fwd_api_name, backward_fwd_input_map,
...
@@ -439,7 +449,7 @@ def GenerateNodeDeclaration(fwd_api_name, backward_fwd_input_map,
{} {} = {};
{} {} = {};
"""
"""
attribute_members_str
+=
ATTRIBUTE_MEMBER_TEMPLATE
.
format
(
attribute_members_str
+=
ATTRIBUTE_MEMBER_TEMPLATE
.
format
(
GetConst
Reference
(
atype
),
saved_attr_name
,
default_val
)
RemoveConstAnd
Reference
(
atype
),
saved_attr_name
,
default_val
)
# End: SetAttributes & Attribute Members
# End: SetAttributes & Attribute Members
NODE_DECLARATION_TEMPLATE
=
"""
NODE_DECLARATION_TEMPLATE
=
"""
...
@@ -490,17 +500,13 @@ def GenerateNodeDefinition(fwd_api_name, bwd_api_name, backward_fwd_input_map,
...
@@ -490,17 +500,13 @@ def GenerateNodeDefinition(fwd_api_name, bwd_api_name, backward_fwd_input_map,
for
name
,
(
_
,
is_fwd_input
,
for
name
,
(
_
,
is_fwd_input
,
grad_api_position
),
in
backward_fwd_input_map
.
items
():
grad_api_position
),
in
backward_fwd_input_map
.
items
():
tensor_wrapper_name
=
GetSavedName
(
name
)
tensor_wrapper_name
=
GetSavedName
(
name
)
if
is_fwd_input
:
grad_api_args
[
grad_api_args
[
grad_api_position
]
=
f
"egr::EagerUtils::SyncToPtenTensors( egr::EagerUtils::RecoverTensorWrapper(&this->
{
tensor_wrapper_name
}
, nullptr) )"
grad_api_position
]
=
f
"egr::EagerUtils::SyncToPtenTensors( egr::EagerUtils::RecoverTensorWrapper(&this->
{
tensor_wrapper_name
}
, true) )"
else
:
grad_api_args
[
grad_api_position
]
=
f
"egr::EagerUtils::SyncToPtenTensors( egr::EagerUtils::RecoverTensorWrapper(&this->
{
tensor_wrapper_name
}
, false) )"
for
_
,
(
_
,
fwd_position
,
for
_
,
(
_
,
fwd_position
,
grad_api_position
)
in
backward_grad_input_map
.
items
():
grad_api_position
)
in
backward_grad_input_map
.
items
():
grad_api_args
[
grad_api_args
[
grad_api_position
]
=
f
"egr::EagerUtils::SyncToPtenTensors(
*
grads[
{
fwd_position
}
] )"
grad_api_position
]
=
f
"egr::EagerUtils::SyncToPtenTensors( grads[
{
fwd_position
}
] )"
for
name
,
_
,
_
,
grad_api_position
in
backward_attrs_list
:
for
name
,
_
,
_
,
grad_api_position
in
backward_attrs_list
:
saved_attribute_name
=
GetSavedName
(
name
)
saved_attribute_name
=
GetSavedName
(
name
)
...
@@ -508,35 +514,28 @@ def GenerateNodeDefinition(fwd_api_name, bwd_api_name, backward_fwd_input_map,
...
@@ -508,35 +514,28 @@ def GenerateNodeDefinition(fwd_api_name, bwd_api_name, backward_fwd_input_map,
grad_api_args_str
=
", "
.
join
(
grad_api_args
)
grad_api_args_str
=
", "
.
join
(
grad_api_args
)
# Construct grad_api returns
# Construct grad_api returns
num_outputs
=
len
(
backward_grad_output_map
.
keys
())
num_
bwd_
outputs
=
len
(
backward_grad_output_map
.
keys
())
returns_
list
=
[
""
for
i
in
range
(
num_outputs
)]
returns_
str
=
f
"std::vector<std::vector<egr::EagerTensor>> returns(
{
num_bwd_outputs
}
);
\n
"
for
_
,
(
ttype
,
fwd_position
,
for
_
,
(
ttype
,
fwd_position
,
grad_api_position
)
in
backward_grad_output_map
.
items
():
grad_api_position
)
in
backward_grad_output_map
.
items
():
# Infer Grad API Return Type
# Infer Grad API Return Type
if
num_outputs
==
1
:
if
num_
bwd_
outputs
==
1
:
# Single tensor output, return as is
# Single tensor output, return as is
if
IsPlainTensorType
(
ttype
):
if
IsPlainTensorType
(
ttype
):
returns_
list
[
0
]
=
"{grad_api_returns}
"
returns_
str
+=
"returns[0] = { egr::EagerUtils::CreateEagerTensorFromTensor(grad_api_returns) };
\n
"
else
:
else
:
assert
IsVectorTensorType
(
ttype
)
assert
IsVectorTensorType
(
ttype
)
returns_
list
[
0
]
=
"grad_api_returns
"
returns_
str
+=
"returns[0] = egr::EagerUtils::CreateEagerTensorFromTensor(grad_api_returns);
\n
"
else
:
else
:
# Rearrange output order accordingly
# Rearrange output order accordingly
if
IsPlainTensorType
(
ttype
):
returns_str
+=
f
"returns[
{
fwd_position
}
] = egr::EagerUtils::CreateEagerTensorFromTensor( grad_api_returns[
{
grad_api_position
}
] );
\n
"
returns_list
[
returns_str
+=
f
"return returns;
\n
"
fwd_position
]
=
f
"{{ grad_api_returns[
{
grad_api_position
}
] }}"
else
:
assert
IsVectorTensorType
(
ttype
)
returns_list
[
fwd_position
]
=
f
"grad_api_returns[
{
grad_api_position
}
]"
returns_str
=
", "
.
join
(
returns_list
)
returns_str
=
f
"{{
{
returns_str
}
}}"
FUNCTION_TEMPLATE
=
"""
FUNCTION_TEMPLATE
=
"""
std::vector<std::vector<egr::EagerTensor>> GradNode{}::operator()(const std::vector<std::vector<egr::EagerTensor>>& grads) {{
std::vector<std::vector<egr::EagerTensor>> GradNode{}::operator()(const std::vector<std::vector<egr::EagerTensor>>& grads) {{
// Call grad_api function
// Call grad_api function
auto grad_api_returns = {}({});
auto grad_api_returns =
paddle::experimental::
{}({});
return {};
{}
}}
}}
"""
"""
...
@@ -566,12 +565,12 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
...
@@ -566,12 +565,12 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
for
name
,
(
ttype
,
pos
)
in
forward_inputs_position_map
.
items
():
for
name
,
(
ttype
,
pos
)
in
forward_inputs_position_map
.
items
():
input_autograd_meta_name
=
GetAutoGradMetaName
(
name
)
input_autograd_meta_name
=
GetAutoGradMetaName
(
name
)
if
IsPlainTensorType
(
ttype
):
if
IsPlainTensorType
(
ttype
):
input_autograd_meta
=
f
" egr::
EagerTensor
*
{
input_autograd_meta_name
}
= egr::EagerUtils::nullable_autograd_meta(
{
name
}
);"
input_autograd_meta
=
f
" egr::
AutogradMeta
*
{
input_autograd_meta_name
}
= egr::EagerUtils::nullable_autograd_meta(
{
name
}
);"
else
:
else
:
assert
IsVectorTensorType
(
ttype
)
assert
IsVectorTensorType
(
ttype
)
input_autograd_meta_vec_name
=
GetAutoGradMetaVectorName
(
name
)
input_autograd_meta_vec_name
=
GetAutoGradMetaVectorName
(
name
)
input_autograd_meta
=
f
" std::vector<egr::
EagerTensor
*>
{
input_autograd_meta_vec_name
}
= egr::EagerUtils::nullable_autograd_meta(
{
name
}
);
\n
"
input_autograd_meta
=
f
" std::vector<egr::
AutogradMeta
*>
{
input_autograd_meta_vec_name
}
= egr::EagerUtils::nullable_autograd_meta(
{
name
}
);
\n
"
input_autograd_meta
+=
f
" std::vector<egr::
EagerTensor
*>*
{
input_autograd_meta_name
}
= &
{
input_autograd_meta_vec_name
}
;"
input_autograd_meta
+=
f
" std::vector<egr::
AutogradMeta
*>*
{
input_autograd_meta_name
}
= &
{
input_autograd_meta_vec_name
}
;"
inputs_autograd_meta_list
.
append
(
input_autograd_meta
)
inputs_autograd_meta_list
.
append
(
input_autograd_meta
)
compute_require_grad_args_list
.
append
(
input_autograd_meta_name
)
compute_require_grad_args_list
.
append
(
input_autograd_meta_name
)
...
@@ -587,19 +586,19 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
...
@@ -587,19 +586,19 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
output_autograd_meta_vec_name
=
GetAutoGradMetaVectorName
(
name
)
output_autograd_meta_vec_name
=
GetAutoGradMetaVectorName
(
name
)
if
num_fwd_outputs
==
1
:
if
num_fwd_outputs
==
1
:
if
IsPlainTensorType
(
rtype
):
if
IsPlainTensorType
(
rtype
):
output_autograd_meta
=
f
" egr::
EagerTensor*
{
output_autograd_meta_name
}
= egr::EagerUtils::autograd_meta(
outputs);"
output_autograd_meta
=
f
" egr::
AutogradMeta*
{
output_autograd_meta_name
}
= egr::EagerUtils::autograd_meta(&
outputs);"
else
:
else
:
assert
IsVectorTensorType
(
rtype
)
assert
IsVectorTensorType
(
rtype
)
output_autograd_meta
=
f
" std::vector<egr::
EagerTensor*>
{
output_autograd_meta_vec_name
}
= egr::EagerUtils::nullable_autograd_meta(
{
outputs
}
);
\n
"
output_autograd_meta
=
f
" std::vector<egr::
AutogradMeta*>
{
output_autograd_meta_vec_name
}
= egr::EagerUtils::autograd_meta(&
{
outputs
}
);
\n
"
output_autograd_meta
+=
f
" std::vector<egr::
EagerTensor
*>*
{
output_autograd_meta_name
}
= &
{
output_autograd_meta_vec_name
}
;"
output_autograd_meta
+=
f
" std::vector<egr::
AutogradMeta
*>*
{
output_autograd_meta_name
}
= &
{
output_autograd_meta_vec_name
}
;"
else
:
else
:
# Tuple api_result
# Tuple api_result
if
IsPlainTensorType
(
rtype
):
if
IsPlainTensorType
(
rtype
):
outputs_autograd_meta
=
f
" egr::
EagerTensor*
{
output_autograd_meta_name
}
= egr::EagerUtils::autograd_meta(
outputs[
{
pos
}
]);"
outputs_autograd_meta
=
f
" egr::
AutogradMeta*
{
output_autograd_meta_name
}
= egr::EagerUtils::autograd_meta(&
outputs[
{
pos
}
]);"
else
:
else
:
assert
IsVectorTensorType
(
rtype
)
assert
IsVectorTensorType
(
rtype
)
output_autograd_meta
=
f
" std::vector<egr::
EagerTensor*>
{
output_autograd_meta_vec_name
}
= egr::EagerUtils::nullable_autograd_meta(
outputs[
{
pos
}
]);
\n
"
output_autograd_meta
=
f
" std::vector<egr::
AutogradMeta*>
{
output_autograd_meta_vec_name
}
= egr::EagerUtils::autograd_meta(&
outputs[
{
pos
}
]);
\n
"
output_autograd_meta
+=
f
" std::vector<egr::
EagerTensor
*>*
{
output_autograd_meta_name
}
= &
{
output_autograd_meta_vec_name
}
;"
output_autograd_meta
+=
f
" std::vector<egr::
AutogradMeta
*>*
{
output_autograd_meta_name
}
= &
{
output_autograd_meta_vec_name
}
;"
outputs_autograd_meta_list
.
append
(
output_autograd_meta
)
outputs_autograd_meta_list
.
append
(
output_autograd_meta
)
pass_stop_gradient_args_list
.
append
(
output_autograd_meta_name
)
pass_stop_gradient_args_list
.
append
(
output_autograd_meta_name
)
...
@@ -622,8 +621,11 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
...
@@ -622,8 +621,11 @@ def GenerateNodeCreationCodes(fwd_api_name, bwd_api_name,
# SetTensorWrappers
# SetTensorWrappers
set_tensor_wrappers_list
=
[]
set_tensor_wrappers_list
=
[]
for
name
,
(
_
,
_
,
_
)
in
backward_fwd_input_map
.
items
():
for
name
,
(
_
,
is_fwd_input
,
_
)
in
backward_fwd_input_map
.
items
():
set_tensor_wrappers
=
f
" grad_node->SetTensorWrapper
{
name
}
(
{
name
}
);"
if
is_fwd_input
:
set_tensor_wrappers
=
f
" grad_node->SetTensorWrapper
{
name
}
(
{
name
}
, true);"
else
:
set_tensor_wrappers
=
f
" grad_node->SetTensorWrapper
{
name
}
(
{
name
}
, false);"
set_tensor_wrappers_list
.
append
(
set_tensor_wrappers
)
set_tensor_wrappers_list
.
append
(
set_tensor_wrappers
)
set_tensor_wrappers_str
=
"
\n
"
.
join
(
set_tensor_wrappers_list
)
set_tensor_wrappers_str
=
"
\n
"
.
join
(
set_tensor_wrappers_list
)
...
@@ -747,7 +749,7 @@ def GenerateForwardDefinition(fwd_api_name, bwd_api_name,
...
@@ -747,7 +749,7 @@ def GenerateForwardDefinition(fwd_api_name, bwd_api_name,
inputs_call_args_str
=
", "
.
join
(
inputs_call_list
)
inputs_call_args_str
=
", "
.
join
(
inputs_call_list
)
# Forward Full Logic
# Forward Full Logic
forward_call_str
=
f
"auto api_result =
{
fwd_api_name
}
(
{
inputs_call_args_str
}
);"
forward_call_str
=
f
"auto api_result =
paddle::experimental::
{
fwd_api_name
}
(
{
inputs_call_args_str
}
);"
# Get return type list & outputs
# Get return type list & outputs
num_outputs
=
len
(
forward_outputs_position_map
.
keys
())
num_outputs
=
len
(
forward_outputs_position_map
.
keys
())
...
@@ -814,7 +816,7 @@ def GenerateNodeCCFile(filepath, node_definition_str):
...
@@ -814,7 +816,7 @@ def GenerateNodeCCFile(filepath, node_definition_str):
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/eager/utils.h"
#include "paddle/fluid/eager/utils.h"
#include "paddle/fluid/eager/api/utils/global_utils.h"
#include "paddle/fluid/eager/api/utils/global_utils.h"
#include "paddle/fluid/eager/api/generated/eager_generated/
node
s/nodes.h"
#include "paddle/fluid/eager/api/generated/eager_generated/
backward
s/nodes.h"
"""
"""
file_contents
+=
node_definition_str
file_contents
+=
node_definition_str
...
@@ -837,8 +839,8 @@ def GenerateNodeHFile(filepath, node_declaration_str):
...
@@ -837,8 +839,8 @@ def GenerateNodeHFile(filepath, node_declaration_str):
def
GenerateForwardCCFile
(
filepath
,
forward_definition_str
):
def
GenerateForwardCCFile
(
filepath
,
forward_definition_str
):
file_contents
=
"""
file_contents
=
"""
#include "paddle/fluid/eager/api/generated/eager_generated/
dygraph_forward_api
.h"
#include "paddle/fluid/eager/api/generated/eager_generated/
forwards/dygraph_functions
.h"
#include "paddle/fluid/eager/api/generated/eager_generated/
node
s/nodes.h"
#include "paddle/fluid/eager/api/generated/eager_generated/
backward
s/nodes.h"
#include "paddle/fluid/eager/api/utils/global_utils.h"
#include "paddle/fluid/eager/api/utils/global_utils.h"
#include "paddle/fluid/eager/legacy/op_runner.h"
#include "paddle/fluid/eager/legacy/op_runner.h"
...
...
paddle/fluid/eager/auto_code_generator/generate_file_structures.py
浏览文件 @
ca743508
...
@@ -15,9 +15,45 @@
...
@@ -15,9 +15,45 @@
import
sys
import
sys
import
os
import
os
if
__name__
==
"__main__"
:
assert
len
(
sys
.
argv
)
==
2
def
GenerateFileStructureForFinalDygraph
(
eager_dir
):
eager_dir
=
sys
.
argv
[
1
]
"""
paddle/fluid/eager
|- generated
| |- CMakeLists.txt
| | "add_subdirectory(forwards), add_subdirectory(backwards)"
|
| |- forwards
| |- "dygraph_functions.cc"
| |- "dygraph_functions.h"
|
| |- backwards
| |- "nodes.cc"
| |- "nodes.h"
"""
# Directory Generation
generated_dir
=
os
.
path
.
join
(
eager_dir
,
"api/generated/eager_generated"
)
forwards_dir
=
os
.
path
.
join
(
generated_dir
,
"forwards"
)
nodes_dir
=
os
.
path
.
join
(
generated_dir
,
"backwards"
)
dirs
=
[
generated_dir
,
forwards_dir
,
nodes_dir
]
for
directory
in
dirs
:
if
not
os
.
path
.
exists
(
directory
):
os
.
mkdir
(
directory
)
# Empty files
dygraph_forward_api_h_path
=
os
.
path
.
join
(
generated_dir
,
"dygraph_functions.h"
)
empty_files
=
[
dygraph_forward_api_h_path
]
empty_files
.
append
(
os
.
path
.
join
(
forwards_dir
,
"dygraph_functions.cc"
))
empty_files
.
append
(
os
.
path
.
join
(
nodes_dir
,
"nodes.cc"
))
empty_files
.
append
(
os
.
path
.
join
(
nodes_dir
,
"nodes.h"
))
for
path
in
empty_files
:
if
not
os
.
path
.
exists
(
path
):
open
(
path
,
'a'
).
close
()
def
GenerateFileStructureForIntermediateDygraph
(
eager_dir
):
"""
"""
paddle/fluid/eager
paddle/fluid/eager
|- generated
|- generated
...
@@ -79,3 +115,10 @@ if __name__ == "__main__":
...
@@ -79,3 +115,10 @@ if __name__ == "__main__":
with
open
(
generated_level_cmakelist_path
,
"w"
)
as
f
:
with
open
(
generated_level_cmakelist_path
,
"w"
)
as
f
:
f
.
write
(
"add_subdirectory(forwards)
\n
add_subdirectory(nodes)"
)
f
.
write
(
"add_subdirectory(forwards)
\n
add_subdirectory(nodes)"
)
if
__name__
==
"__main__"
:
assert
len
(
sys
.
argv
)
==
2
eager_dir
=
sys
.
argv
[
1
]
GenerateFileStructureForIntermediateDygraph
(
eager_dir
)
GenerateFileStructureForFinalDygraph
(
eager_dir
)
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录