Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
39a1ab69
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2299
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
39a1ab69
编写于
3月 08, 2023
作者:
C
chenxujun
提交者:
GitHub
3月 08, 2023
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Fix typos (#51338)
上级
079f41c8
变更
7
显示空白变更内容
内联
并排
Showing
7 changed file
with
12 addition
and
12 deletion
+12
-12
paddle/fluid/distributed/collective/process_group_nccl.cc
paddle/fluid/distributed/collective/process_group_nccl.cc
+1
-1
paddle/fluid/distributed/collective/reducer.cc
paddle/fluid/distributed/collective/reducer.cc
+3
-3
paddle/fluid/distributed/fleet_executor/compute_interceptor.cc
...e/fluid/distributed/fleet_executor/compute_interceptor.cc
+1
-1
paddle/fluid/distributed/index_dataset/index_wrapper.cc
paddle/fluid/distributed/index_dataset/index_wrapper.cc
+2
-2
paddle/fluid/distributed/ps.proto
paddle/fluid/distributed/ps.proto
+1
-1
paddle/fluid/distributed/ps/service/communicator/communicator.cc
...fluid/distributed/ps/service/communicator/communicator.cc
+2
-2
paddle/fluid/distributed/ps/service/communicator/communicator.h
.../fluid/distributed/ps/service/communicator/communicator.h
+2
-2
未找到文件。
paddle/fluid/distributed/collective/process_group_nccl.cc
浏览文件 @
39a1ab69
...
...
@@ -129,7 +129,7 @@ ncclComm_t ProcessGroupNCCL::NCCLComm(const Place& place) const {
iter
,
place_to_comm_ctx_
.
end
(),
phi
::
errors
::
NotFound
(
"Cannot find the NCCL comm
m
unicator in this process group."
));
"Cannot find the NCCL communicator in this process group."
));
return
iter
->
second
->
nccl_comm
();
}
...
...
paddle/fluid/distributed/collective/reducer.cc
浏览文件 @
39a1ab69
...
...
@@ -803,7 +803,7 @@ void EagerReducer::MarkVarReady(const size_t var_index,
"parameters participate in the backward calculation "
"again at a later time (e.g. after the forward function, "
"the loss calculation uses the unused "
"paramters of the forward and trigger backward), "
"param
e
ters of the forward and trigger backward), "
"its gradient will be wrong."
;
PADDLE_ENFORCE_EQ
(
has_marked_unused_vars_
,
...
...
@@ -868,7 +868,7 @@ void EagerReducer::MarkVarReady(const size_t var_index,
"parameters without generating gradients during training. "
"For example, if is_sparese=True is used in Embedding, "
"the current step of this parameter cannot generate gradient "
"because of stop_gradient/deta
t
ch, where error will occur."
,
"because of stop_gradient/detach, where error will occur."
,
var_index
,
tensors_
[
var_index
].
name
()));
...
...
@@ -996,7 +996,7 @@ void EagerReducer::ProcessUnusedDenseVars() {
// NOTE(haohongxiang): Calling SetFakeEmpty here is to make sure that
// gradient accumulation can continue normally after clear_gradients()
// especiall in cases including complex control flow.
// especiall
y
in cases including complex control flow.
std
::
static_pointer_cast
<
egr
::
GradNodeAccumulation
>
(
GetGradNodeFromTensor
(
&
tensors_
[
var_index
]))
->
SetFakeEmpty
(
false
);
...
...
paddle/fluid/distributed/fleet_executor/compute_interceptor.cc
浏览文件 @
39a1ab69
...
...
@@ -192,7 +192,7 @@ void ComputeInterceptor::RunOps() {
microbatch_scopes_
.
size
(),
platform
::
errors
::
InvalidArgument
(
"Step out of range. There are %ld "
"microbatch_scopes, but rece
vic
e scope index %ld"
,
"microbatch_scopes, but rece
iv
e scope index %ld"
,
microbatch_scopes_
.
size
(),
cur_scope_id_
));
}
...
...
paddle/fluid/distributed/index_dataset/index_wrapper.cc
浏览文件 @
39a1ab69
...
...
@@ -52,7 +52,7 @@ int TreeIndex::Load(const std::string filename) {
platform
::
errors
::
InvalidArgument
(
"Read from file: %s failed. Valid Format is "
"an integer representing the length of the following string, "
"and the string itself.We got an iteger[% d], "
"and the string itself.We got an i
n
teger[% d], "
"but the following string's length is [%d]."
,
filename
,
num
,
...
...
@@ -75,7 +75,7 @@ int TreeIndex::Load(const std::string filename) {
// PADDLE_ENFORCE_NE(node.id(), 0,
// platform::errors::InvalidArgument(
// "Node'id should not be equ
e
l to zero."));
// "Node'id should not be equ
a
l to zero."));
if
(
node
.
is_leaf
())
{
id_codes_map_
[
node
.
id
()]
=
code
;
}
...
...
paddle/fluid/distributed/ps.proto
浏览文件 @
39a1ab69
...
...
@@ -81,7 +81,7 @@ message ServerServiceParameter {
optional
string
server_class
=
1
[
default
=
"DownpourBrpcPsServer"
];
optional
string
client_class
=
2
[
default
=
"DownpourBrpcPsClient"
];
optional
string
service_class
=
3
[
default
=
"DownpourPsService"
];
optional
uint32
start_server_port
=
4
[
default
=
0
];
//will find a ava
li
able port from it
optional
uint32
start_server_port
=
4
[
default
=
0
];
//will find a ava
il
able port from it
optional
uint32
server_thread_num
=
5
[
default
=
12
];
}
...
...
paddle/fluid/distributed/ps/service/communicator/communicator.cc
浏览文件 @
39a1ab69
...
...
@@ -174,7 +174,7 @@ void Communicator::RpcSendDenseParam(const std::vector<std::string> &varnames,
float
*
w
=
tensor
->
mutable_data
<
float
>
(
place
);
paddle
::
distributed
::
Region
reg
(
w
,
tensor
->
numel
());
regions
.
emplace_back
(
reg
);
VLOG
(
1
)
<<
"rpc_send_dense_param Var "
<<
t
<<
" ta
lb
e_id "
<<
table_id
VLOG
(
1
)
<<
"rpc_send_dense_param Var "
<<
t
<<
" ta
bl
e_id "
<<
table_id
<<
" Temp_data[0] "
<<
w
[
0
]
<<
" Temp_data[-1] "
<<
w
[
tensor
->
numel
()
-
1
];
}
...
...
@@ -1514,7 +1514,7 @@ void FLCommunicator::InitBrpcClient(
if
(
_worker_ptr
.
get
()
==
nullptr
)
{
VLOG
(
0
)
<<
"fl-ps > FLCommunicator::InitBrpcClient get _worker_ptr"
;
_worker_ptr
=
fleet
->
worker_ptr_
;
// FleetWrapper::InitWorker must be excuted
fleet
->
worker_ptr_
;
// FleetWrapper::InitWorker must be ex
e
cuted
// before, but no need for Coordinator
}
if
(
coordinator_client_ptr_
==
nullptr
)
{
...
...
paddle/fluid/distributed/ps/service/communicator/communicator.h
浏览文件 @
39a1ab69
...
...
@@ -277,7 +277,7 @@ class Communicator {
virtual
void
RpcRecvSparse
(
const
std
::
string
&
varname
,
int
table_id
,
Scope
*
scope
);
// 7. send glo
ab
l step
// 7. send glo
ba
l step
virtual
void
SendGlobalStep
(
const
CommContext
&
ctx
,
int
batches
,
Scope
*
send_scope
);
...
...
@@ -572,7 +572,7 @@ class SyncCommunicator : public HalfAsyncCommunicator {
:
HalfAsyncCommunicator
(
envs
)
{}
void
InitEnvs
()
{
// enfore to recv after send
// enfor
c
e to recv after send
independent_recv_
=
false
;
min_send_grad_num_before_recv_
=
0
;
max_merge_var_num_
=
std
::
stoi
(
envs
.
at
(
"communicator_max_merge_var_num"
));
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录