未验证 提交 5e878ecc 编写于 作者: J Jiabin Yang 提交者: GitHub

optimize log (#46349)

上级 812e4b47
......@@ -31,12 +31,16 @@ static void CopyOrAddTensor(paddle::experimental::Tensor* tensor,
const paddle::experimental::Tensor& t,
bool is_fake_empty) {
if (is_fake_empty) {
VLOG(3) << "Move Tensor ptr: " << t.impl();
*tensor = t;
} else {
if (!tensor->defined() || !tensor->initialized()) {
// Simply copy tensor->impl
VLOG(3) << "Move Tensor ptr: " << t.impl();
*tensor = t;
} else {
VLOG(3) << "Add Tensor ptr: " << t.impl()
<< " with Tensor ptr: " << tensor->impl();
// Accumulation
if (LIKELY(t.is_dense_tensor())) {
if (LIKELY(tensor->is_dense_tensor())) {
......
......@@ -1414,13 +1414,13 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase):
var_str = f"\n{indent} std::string input_str = \"\";"
var_str += f"\n{indent} std::string output_str = \"\";"
for name, (ttype, pos) in forward_inputs_position_map.items():
var_str += f"\n{indent} const char* TENSOR_{name.upper()}_TEMPLATE = \"({name}, [%s]), \";"
var_str += f"\n{indent} const char* TENSOR_{name.upper()}_TEMPLATE = \" \\n( {name} , [%s]), \";"
var_str += f"\n{indent} std::string input_{name}_str = paddle::string::Sprintf(TENSOR_{name.upper()}_TEMPLATE, egr::EagerUtils::TensorStr({name}));"
var_str += f"\n{indent} input_str += input_{name}_str; "
before_log_str = BEFORE_LOG_PRINT_TEMPLATE.format(var_str)
for name, (ttype, pos) in forward_outputs_position_map.items():
var_str += f"\n{indent} const char* TENSOR_{name.upper()}_TEMPLATE = \"({name}, [%s]), \";"
var_str += f"\n{indent} const char* TENSOR_{name.upper()}_TEMPLATE = \" \\n( {name} , [%s]), \";"
var_str += f"\n{indent} std::string output_{name}_str = paddle::string::Sprintf(TENSOR_{name.upper()}_TEMPLATE, egr::EagerUtils::TensorStr({name}));"
var_str += f"\n{indent} output_str += output_{name}_str; "
......@@ -1930,14 +1930,14 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase):
for name, (ttype, fwd_position,
grad_api_position) in backward_grad_inputs_map.items():
new_name = self.TransformToNextGradName(name)
var_str += f"\n{indent} const char* TENSOR_{new_name.upper()}_TEMPLATE = \"({new_name}, [%s]), \";"
var_str += f"\n{indent} const char* TENSOR_{new_name.upper()}_TEMPLATE = \" \\n( {new_name} , [%s]), \";"
var_str += f"\n{indent} std::string input_{new_name}_str = paddle::string::Sprintf(TENSOR_{new_name.upper()}_TEMPLATE, egr::EagerUtils::TensorStr({new_name}));"
var_str += f"\n{indent} input_str += input_{new_name}_str; "
for name, (backward_input_type, is_fwd_input,
grad_api_position), in backward_forward_inputs_map.items():
new_name = self.TransformToNextGradName(name)
var_str += f"\n{indent} const char* TENSOR_{new_name.upper()}_TEMPLATE = \"({new_name}, [%s]), \";"
var_str += f"\n{indent} const char* TENSOR_{new_name.upper()}_TEMPLATE = \" \\n( {new_name} , [%s]), \";"
var_str += f"\n{indent} std::string input_{new_name}_str = paddle::string::Sprintf(TENSOR_{new_name.upper()}_TEMPLATE, egr::EagerUtils::TensorStr({new_name}));"
var_str += f"\n{indent} input_str += input_{new_name}_str; "
......@@ -1946,7 +1946,7 @@ class DygraphNodeGenerator(DygraphFunctionGeneratorBase):
for name, (ttype, fwd_position,
grad_api_position) in backward_grad_outputs_map.items():
new_name = self.TransformToNextGradName(name)
var_str += f"\n{indent} const char* TENSOR_{new_name.upper()}_TEMPLATE = \"({new_name}, [%s]), \";"
var_str += f"\n{indent} const char* TENSOR_{new_name.upper()}_TEMPLATE = \" \\n ( {new_name} , [%s]), \";"
var_str += f"\n{indent} std::string output_{new_name}_str = paddle::string::Sprintf(TENSOR_{new_name.upper()}_TEMPLATE, egr::EagerUtils::TensorStr({new_name}));"
var_str += f"\n{indent} output_str += output_{new_name}_str; "
......
......@@ -226,7 +226,6 @@ std::vector<paddle::experimental::Tensor> RunBackward(
while (!queue.empty()) {
GradNodeBase* node = queue.front();
VLOG(3) << "Preparing GradNode:" << node->name() << " addr:" << node;
VLOG(4) << EagerUtils::GradNodeStr(*node);
paddle::platform::RecordEvent node_record_event(
std::string((*node).name()),
paddle::platform::TracerEventType::Operator,
......@@ -338,7 +337,7 @@ std::vector<paddle::experimental::Tensor> RunBackward(
node_input_buffers_dict[next_node] = std::move(grad_tensor_holder);
}
VLOG(3) << "Sum grad inputs for edge slot: " << edge_rank.first
VLOG(3) << "Sum or Move grad inputs for edge slot: " << edge_rank.first
<< ", rank: " << edge_rank.second;
node_input_buffers_dict[next_node]->add(edge_rank.first,
......
......@@ -557,6 +557,9 @@ class GeneralGrad {
} else {
copied_next_node = orig_next_node->Copy();
orig_to_copied_node_map_[orig_next_node.get()] = copied_next_node;
VLOG(3) << "Copied Node: " << orig_next_node->name()
<< " ptr: " << orig_next_node
<< " to ptr: " << copied_next_node;
copied_grad_nodes_.push_back(copied_next_node);
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册