From 117e951b5eecd181b874db4bcdbb3f7977b5d690 Mon Sep 17 00:00:00 2001 From: co63oc Date: Thu, 18 May 2023 14:25:34 +0800 Subject: [PATCH] Fix typos (#53912) --- paddle/fluid/eager/to_static/run_program_op_node.h | 10 +++++----- paddle/fluid/framework/scope.h | 4 ++-- paddle/fluid/pybind/pybind.cc | 2 +- python/paddle/jit/dy2static/partial_program.py | 2 +- python/paddle/nn/functional/distance.py | 2 +- python/paddle/nn/functional/norm.py | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/paddle/fluid/eager/to_static/run_program_op_node.h b/paddle/fluid/eager/to_static/run_program_op_node.h index b4deb4e4ac3..548af27a8ac 100644 --- a/paddle/fluid/eager/to_static/run_program_op_node.h +++ b/paddle/fluid/eager/to_static/run_program_op_node.h @@ -443,11 +443,11 @@ inline void RunProgramAPI( VLOG(4) << "don't require any grad, set this scope can reused"; VLOG(4) << "is_test: " << is_test << ", require_any_grad: " << require_any_grad; - global_inner_scope->SetCanReuesd(true); + global_inner_scope->SetCanReused(true); details::GcScope(global_inner_scope); } else { VLOG(4) << "not test, set this scope can not reused"; - global_inner_scope->SetCanReuesd(false); + global_inner_scope->SetCanReused(false); } } @@ -582,7 +582,7 @@ inline void RunProgramGradAPI( *backward_global_block, global_inner_scope); VLOG(4) << "after backward gc all vars"; - global_inner_scope->SetCanReuesd(true); + global_inner_scope->SetCanReused(true); details::GcScope(global_inner_scope); } } @@ -599,9 +599,9 @@ class GradNodeRunProgram : public egr::GradNodeBase { // Normally out_scope_vec.size() == 1. for safty, we add for-loop here. for (size_t i = 0; i < out_scope_vec->size(); ++i) { paddle::framework::Scope *global_inner_scope = out_scope_vec->at(i); - global_inner_scope->SetCanReuesd(true); + global_inner_scope->SetCanReused(true); details::GcScope(global_inner_scope); - VLOG(4) << "global_inner_scope SetCanReuesd"; + VLOG(4) << "global_inner_scope SetCanReused"; } } } diff --git a/paddle/fluid/framework/scope.h b/paddle/fluid/framework/scope.h index b87a2948780..b0b418c85a3 100644 --- a/paddle/fluid/framework/scope.h +++ b/paddle/fluid/framework/scope.h @@ -122,9 +122,9 @@ class Scope { std::string Rename(const std::string& origin_name) const; // only for dygraph_to_static - bool CanReuesd() const { return can_reused_; } + bool CanReused() const { return can_reused_; } - void SetCanReuesd(bool can_reused) { can_reused_ = can_reused; } + void SetCanReused(bool can_reused) { can_reused_ = can_reused; } protected: struct KeyHasher { diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index 0a01e9e52f5..05eccd45c8f 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -1210,7 +1210,7 @@ All parameter, weight, gradient are variables in Paddle. Delete all sub-scopes of the current scope. )DOC") .def("_kids", &Scope::kids) - .def_property("_can_reuesd", &Scope::CanReuesd, &Scope::SetCanReuesd); + .def_property("_can_reused", &Scope::CanReused, &Scope::SetCanReused); m.def( "Scope", diff --git a/python/paddle/jit/dy2static/partial_program.py b/python/paddle/jit/dy2static/partial_program.py index ad2e62b9e04..042977988d4 100644 --- a/python/paddle/jit/dy2static/partial_program.py +++ b/python/paddle/jit/dy2static/partial_program.py @@ -259,7 +259,7 @@ class PartialProgramLayer: return scope else: for scope in self._scope_cache[program_id]: - if scope._can_reuesd: + if scope._can_reused: return scope scope = core.Scope() self._scope_cache[program_id].append(scope) diff --git a/python/paddle/nn/functional/distance.py b/python/paddle/nn/functional/distance.py index e8e209be18a..067a49816dd 100644 --- a/python/paddle/nn/functional/distance.py +++ b/python/paddle/nn/functional/distance.py @@ -70,7 +70,7 @@ def pairwise_distance(x, y, p=2.0, epsilon=1e-6, keepdim=False, name=None): """ if in_dygraph_mode(): sub = _C_ops.subtract(x, y) - # p_norm op has not uesd epsilon, so change it to the following. + # p_norm op has not used epsilon, so change it to the following. if epsilon != 0.0: epsilon = paddle.fluid.dygraph.base.to_variable( [epsilon], dtype=sub.dtype diff --git a/python/paddle/nn/functional/norm.py b/python/paddle/nn/functional/norm.py index 5cf69fb42b6..e1c3d985a68 100644 --- a/python/paddle/nn/functional/norm.py +++ b/python/paddle/nn/functional/norm.py @@ -127,7 +127,7 @@ def batch_norm( """ Applies Batch Normalization as described in the paper Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift . - nn.functional.batch_norm is uesd for nn.BatchNorm1D, nn.BatchNorm2D, nn.BatchNorm3D. Please use above API for BatchNorm. + nn.functional.batch_norm is used for nn.BatchNorm1D, nn.BatchNorm2D, nn.BatchNorm3D. Please use above API for BatchNorm. Parameters: x(Tesnor): input value. It's data type should be float32, float64. -- GitLab