未验证 提交 5745a63f 编写于 作者: L Leo Chen 提交者: GitHub

add log for memory stats (#54083)

* add log for memory stats

* fix string_split in einsum
上级 ae360000
...@@ -22,6 +22,7 @@ limitations under the License. */ ...@@ -22,6 +22,7 @@ limitations under the License. */
#include "paddle/fluid/platform/errors.h" #include "paddle/fluid/platform/errors.h"
#include "paddle/fluid/platform/macros.h" #include "paddle/fluid/platform/macros.h"
#include "paddle/phi/common/thread_data_registry.h" #include "paddle/phi/common/thread_data_registry.h"
#include "paddle/utils/string/string_helper.h"
namespace paddle { namespace paddle {
namespace memory { namespace memory {
...@@ -74,6 +75,13 @@ class Stat : public StatBase { ...@@ -74,6 +75,13 @@ class Stat : public StatBase {
thread_data_registry.GetMutableCurrentThreadData(); thread_data_registry.GetMutableCurrentThreadData();
thread_local_stat->current += increment; thread_local_stat->current += increment;
VLOG(8) << string::split_string(
phi::enforce::demangle(typeid(*thread_local_stat).name()),
"::")
.back()
<< ": Update current_value with " << increment
<< ", after update, current value = " << GetCurrentValue();
if (thread_local_stat->current > thread_local_stat->peak) { if (thread_local_stat->current > thread_local_stat->peak) {
thread_local_stat->peak = thread_local_stat->current; thread_local_stat->peak = thread_local_stat->current;
int64_t current_value = GetCurrentValue(); int64_t current_value = GetCurrentValue();
...@@ -81,8 +89,13 @@ class Stat : public StatBase { ...@@ -81,8 +89,13 @@ class Stat : public StatBase {
while (prev_value < current_value && while (prev_value < current_value &&
!peak_value_.compare_exchange_weak(prev_value, current_value)) { !peak_value_.compare_exchange_weak(prev_value, current_value)) {
} }
VLOG(8) << "Update peak_value, after update, peak_value = " VLOG(8) << string::split_string(
<< peak_value_.load() << " , current value = " << current_value; phi::enforce::demangle(typeid(*thread_local_stat).name()),
"::")
.back()
<< ": Update current_value with " << increment
<< ", after update, peak_value = " << peak_value_.load()
<< " , current value = " << current_value;
} }
} }
......
...@@ -152,7 +152,7 @@ void EinsumGradKernel(const Context& dev_ctx, ...@@ -152,7 +152,7 @@ void EinsumGradKernel(const Context& dev_ctx,
if (x.size() == 1) { // Unary if (x.size() == 1) { // Unary
auto splits = paddle::string::split_string(equation, "->"); auto splits = paddle::string::split_string(equation, "->");
auto left = splits[0]; auto left = splits[0];
right = splits[1].substr(1); right = splits[1];
auto new_equation = right + "->" + gather_labels_except_reduction(left); auto new_equation = right + "->" + gather_labels_except_reduction(left);
auto new_operands = std::vector<const DenseTensor*>(); auto new_operands = std::vector<const DenseTensor*>();
new_operands.push_back(&out_grad); new_operands.push_back(&out_grad);
...@@ -171,8 +171,7 @@ void EinsumGradKernel(const Context& dev_ctx, ...@@ -171,8 +171,7 @@ void EinsumGradKernel(const Context& dev_ctx,
auto splits = paddle::string::split_string(equation, "->"); auto splits = paddle::string::split_string(equation, "->");
auto left = splits[0]; auto left = splits[0];
auto ops = paddle::string::split_string(left, ","); auto ops = paddle::string::split_string(left, ",");
right = splits[1].substr(1); right = splits[1];
auto equation_for_A = auto equation_for_A =
ops[1] + "," + right + "->" + gather_labels_except_reduction(ops[0]); ops[1] + "," + right + "->" + gather_labels_except_reduction(ops[0]);
auto equation_for_B = auto equation_for_B =
......
...@@ -326,19 +326,16 @@ inline static void ParseEinsumEquation( ...@@ -326,19 +326,16 @@ inline static void ParseEinsumEquation(
std::vector<int>* output_dims, std::vector<int>* output_dims,
std::string* right, std::string* right,
std::vector<std::string>* input_strs) { std::vector<std::string>* input_strs) {
VLOG(5) << "Start ParseEinsumEquation"; VLOG(5) << "Start ParseEinsumEquation " << equation;
auto results = paddle::string::split_string(equation, "->"); auto results = paddle::string::split_string(equation, "->");
auto left = results[0]; auto left = results[0];
ReplaceEllipsis(left); ReplaceEllipsis(left);
*right = results[1].substr(1); *right = results[1];
ReplaceEllipsis(*right); ReplaceEllipsis(*right);
auto op_labels = paddle::string::split_string(left, ","); auto op_labels = paddle::string::split_string(left, ",");
// split_string("i,") -> ["i"], we push back a "". // split_string("i,") -> ["i", ""], we push back a "".
// split_string("->") -> [], we push back a "". // split_string("->") -> [], we push back a "".
if (op_labels.size() == 0) if (op_labels.size() == 0) op_labels.push_back("");
op_labels.push_back("");
else if (left[left.size() - 1] == ',')
op_labels.push_back("");
std::for_each(op_labels.begin(), op_labels.end(), ReplaceEllipsis); std::for_each(op_labels.begin(), op_labels.end(), ReplaceEllipsis);
GlobalInfo(op_labels, *right, labeltype, all_labels); GlobalInfo(op_labels, *right, labeltype, all_labels);
InferLabelShape(op_labels, inputs, labelshape, ellipsis_dims, broadcast_dims); InferLabelShape(op_labels, inputs, labelshape, ellipsis_dims, broadcast_dims);
......
...@@ -127,12 +127,10 @@ std::vector<T> split_string(const std::string& str, const std::string& delim) { ...@@ -127,12 +127,10 @@ std::vector<T> split_string(const std::string& str, const std::string& delim) {
while ((pos = str.find(delim, pre_pos)) != std::string::npos) { while ((pos = str.find(delim, pre_pos)) != std::string::npos) {
tmp_str.assign(str, pre_pos, pos - pre_pos); tmp_str.assign(str, pre_pos, pos - pre_pos);
res_list.push_back(tmp_str); res_list.push_back(tmp_str);
pre_pos = pos + 1; pre_pos = pos + delim.size();
} }
tmp_str.assign(str, pre_pos, str.length() - pre_pos); tmp_str.assign(str, pre_pos, str.length() - pre_pos);
if (!tmp_str.empty()) { res_list.push_back(tmp_str);
res_list.push_back(tmp_str);
}
return res_list; return res_list;
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册