未验证 提交 117e951b 编写于 作者: C co63oc 提交者: GitHub

Fix typos (#53912)

上级 e916e80c
......@@ -443,11 +443,11 @@ inline void RunProgramAPI(
VLOG(4) << "don't require any grad, set this scope can reused";
VLOG(4) << "is_test: " << is_test
<< ", require_any_grad: " << require_any_grad;
global_inner_scope->SetCanReuesd(true);
global_inner_scope->SetCanReused(true);
details::GcScope(global_inner_scope);
} else {
VLOG(4) << "not test, set this scope can not reused";
global_inner_scope->SetCanReuesd(false);
global_inner_scope->SetCanReused(false);
}
}
......@@ -582,7 +582,7 @@ inline void RunProgramGradAPI(
*backward_global_block,
global_inner_scope);
VLOG(4) << "after backward gc all vars";
global_inner_scope->SetCanReuesd(true);
global_inner_scope->SetCanReused(true);
details::GcScope(global_inner_scope);
}
}
......@@ -599,9 +599,9 @@ class GradNodeRunProgram : public egr::GradNodeBase {
// Normally out_scope_vec.size() == 1. for safty, we add for-loop here.
for (size_t i = 0; i < out_scope_vec->size(); ++i) {
paddle::framework::Scope *global_inner_scope = out_scope_vec->at(i);
global_inner_scope->SetCanReuesd(true);
global_inner_scope->SetCanReused(true);
details::GcScope(global_inner_scope);
VLOG(4) << "global_inner_scope SetCanReuesd";
VLOG(4) << "global_inner_scope SetCanReused";
}
}
}
......
......@@ -122,9 +122,9 @@ class Scope {
std::string Rename(const std::string& origin_name) const;
// only for dygraph_to_static
bool CanReuesd() const { return can_reused_; }
bool CanReused() const { return can_reused_; }
void SetCanReuesd(bool can_reused) { can_reused_ = can_reused; }
void SetCanReused(bool can_reused) { can_reused_ = can_reused; }
protected:
struct KeyHasher {
......
......@@ -1210,7 +1210,7 @@ All parameter, weight, gradient are variables in Paddle.
Delete all sub-scopes of the current scope.
)DOC")
.def("_kids", &Scope::kids)
.def_property("_can_reuesd", &Scope::CanReuesd, &Scope::SetCanReuesd);
.def_property("_can_reused", &Scope::CanReused, &Scope::SetCanReused);
m.def(
"Scope",
......
......@@ -259,7 +259,7 @@ class PartialProgramLayer:
return scope
else:
for scope in self._scope_cache[program_id]:
if scope._can_reuesd:
if scope._can_reused:
return scope
scope = core.Scope()
self._scope_cache[program_id].append(scope)
......
......@@ -70,7 +70,7 @@ def pairwise_distance(x, y, p=2.0, epsilon=1e-6, keepdim=False, name=None):
"""
if in_dygraph_mode():
sub = _C_ops.subtract(x, y)
# p_norm op has not uesd epsilon, so change it to the following.
# p_norm op has not used epsilon, so change it to the following.
if epsilon != 0.0:
epsilon = paddle.fluid.dygraph.base.to_variable(
[epsilon], dtype=sub.dtype
......
......@@ -127,7 +127,7 @@ def batch_norm(
"""
Applies Batch Normalization as described in the paper Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift .
nn.functional.batch_norm is uesd for nn.BatchNorm1D, nn.BatchNorm2D, nn.BatchNorm3D. Please use above API for BatchNorm.
nn.functional.batch_norm is used for nn.BatchNorm1D, nn.BatchNorm2D, nn.BatchNorm3D. Please use above API for BatchNorm.
Parameters:
x(Tesnor): input value. It's data type should be float32, float64.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册