Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
b0911390
P
Paddle
项目概览
PaddlePaddle
/
Paddle
1 年多 前同步成功
通知
2302
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
b0911390
编写于
4月 16, 2019
作者:
D
dongdaxiang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add nccl wrapper for python API
上级
fff795e5
变更
6
显示空白变更内容
内联
并排
Showing
6 changed file
with
177 addition
and
92 deletion
+177
-92
paddle/fluid/framework/fleet/CMakeLists.txt
paddle/fluid/framework/fleet/CMakeLists.txt
+2
-0
paddle/fluid/framework/fleet/nccl_wrapper.cc
paddle/fluid/framework/fleet/nccl_wrapper.cc
+66
-0
paddle/fluid/framework/fleet/nccl_wrapper.h
paddle/fluid/framework/fleet/nccl_wrapper.h
+27
-92
paddle/fluid/pybind/nccl_wrapper_py.cc
paddle/fluid/pybind/nccl_wrapper_py.cc
+52
-0
paddle/fluid/pybind/nccl_wrapper_py.h
paddle/fluid/pybind/nccl_wrapper_py.h
+28
-0
paddle/fluid/pybind/pybind.cc
paddle/fluid/pybind/pybind.cc
+2
-0
未找到文件。
paddle/fluid/framework/fleet/CMakeLists.txt
浏览文件 @
b0911390
...
@@ -3,3 +3,5 @@ if(WITH_PSLIB)
...
@@ -3,3 +3,5 @@ if(WITH_PSLIB)
else
()
else
()
cc_library
(
fleet_wrapper SRCS fleet_wrapper.cc DEPS framework_proto variable_helper scope
)
cc_library
(
fleet_wrapper SRCS fleet_wrapper.cc DEPS framework_proto variable_helper scope
)
endif
(
WITH_PSLIB
)
endif
(
WITH_PSLIB
)
cc_library
(
nccl_wrapper SRCS nccl_wrapper.cc DEPS framework_proto variable_helper scope
)
paddle/fluid/framework/fleet/nccl_wrapper.cc
0 → 100644
浏览文件 @
b0911390
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "paddle/fluid/framework/fleet/nccl_wrapper.h"
#include <utility>
#include "paddle/fluid/framework/data_feed.h"
#include "paddle/fluid/framework/scope.h"
namespace
paddle
{
namespace
framework
{
std
::
shared_ptr
<
NCCLWrapper
>
NCCLWrapper
::
s_instance_
=
NULL
;
bool
NCCLWrapper
::
is_initialized_
=
false
;
void
NCCLWrapper
::
InitNCCL
()
{
platform
::
dynload
::
ncclCommInitRank
(
&
(
nccl_info_
.
comm_
),
nccl_info_
.
global_ranks_
,
nccl_info_
.
nccl_id_
,
nccl_info_
.
my_global_rank_
);
return
;
}
void
NCCLWrapper
::
SetNCCLId
(
const
NCCLInfo
&
nccl_info
)
{
nccl_info_
.
nccl_id_
=
nccl_info
.
nccl_id_
;
}
NCCLInfo
NCCLWrapper
::
GetNCCLId
()
{
PADDLE_ENFORCE
(
platform
::
dynload
::
ncclGetUniqueId
(
&
(
nccl_info_
.
nccl_id_
)));
return
nccl_info_
;
}
void
NCCLWrapper
::
SetRankInfo
(
const
int
local_rank
,
const
int
global_rank
,
const
int
ranks
)
{
nccl_info_
.
local_rank_
=
local_rank
;
nccl_info_
.
my_global_rank_
=
global_rank
;
nccl_info_
.
global_ranks_
=
ranks
;
PADDLE_ENFORCE
(
cudaSetDevice
(
local_rank
));
PADDLE_ENFORCE
(
cudaStreamCreate
(
&
(
nccl_info_
.
stream_
)));
return
;
}
void
NCCLWrapper
::
SyncVar
(
const
int
root_rank
,
const
Scope
&
scope
,
const
std
::
vector
<
std
::
string
>&
var_names
)
{
for
(
auto
&
name
:
var_names
)
{
auto
var
=
scope
.
FindVar
(
name
);
LoDTensor
*
tensor
=
var
->
GetMutable
<
LoDTensor
>
();
int32_t
total_size
=
tensor
->
numel
();
platform
::
dynload
::
ncclBcast
(
reinterpret_cast
<
void
*>
(
tensor
->
data
<
float
>
()),
total_size
,
ncclFloat
,
root_rank
,
nccl_info_
.
comm_
,
nccl_info_
.
stream_
);
cudaStreamSynchronize
(
nccl_info_
.
stream_
);
}
}
}
// end namespace framework
}
// end namespace paddle
paddle/fluid/framework/fleet/nccl_wrapper.h
浏览文件 @
b0911390
...
@@ -29,114 +29,49 @@ limitations under the License. */
...
@@ -29,114 +29,49 @@ limitations under the License. */
namespace
paddle
{
namespace
paddle
{
namespace
framework
{
namespace
framework
{
class
NCCLInfo
{
public:
NCCLInfo
()
{}
virtual
~
NCCLInfo
()
{}
public:
int
local_rank_
;
int
global_ranks_
;
int
my_global_rank_
;
ncclUniqueId
nccl_id_
;
ncclComm_t
comm_
;
cudaStream_t
stream_
;
};
class
NCCLWrapper
{
class
NCCLWrapper
{
public:
public:
virtual
~
NCCLWrapper
()
{}
virtual
~
NCCLWrapper
()
{}
NCCLWrapper
()
{}
NCCLWrapper
()
{}
// Pull sparse variables from server in Sync mode
void
InitNCCL
();
// Param<in>: scope, table_id, var_names, fea_keys
void
SetNCCLId
(
const
NCCLInfo
&
nccl_info
);
// Param<out>: fea_values
NCCLInfo
GetNCCLId
();
void
PullSparseVarsSync
(
const
Scope
&
scope
,
const
uint64_t
table_id
,
void
SetRankInfo
(
const
int
local_rank
,
const
int
global_rank
,
const
std
::
vector
<
std
::
string
>&
var_names
,
const
int
ranks
);
std
::
vector
<
uint64_t
>*
fea_keys
,
void
SyncVar
(
const
int
root_rank
,
const
Scope
&
scope
,
std
::
vector
<
std
::
vector
<
float
>>*
fea_values
,
int
fea_dim
);
void
PullDenseVarsSync
(
const
Scope
&
scope
,
const
uint64_t
table_id
,
const
std
::
vector
<
std
::
string
>&
var_names
);
void
PullDenseVarsAsync
(
const
Scope
&
scope
,
const
uint64_t
table_id
,
const
std
::
vector
<
std
::
string
>&
var_names
,
std
::
vector
<::
std
::
future
<
int32_t
>>*
pull_dense_status
);
void
PushDenseParamSync
(
const
Scope
&
scope
,
const
uint64_t
table_id
,
const
std
::
vector
<
std
::
string
>&
var_names
);
const
std
::
vector
<
std
::
string
>&
var_names
);
// Push dense variables to server in async mode
static
std
::
shared_ptr
<
NCCLWrapper
>
GetInstance
()
{
// Param<in>: scope, table_id, var_names,
// Param<out>: push_sparse_status
void
PushDenseVarsAsync
(
const
Scope
&
scope
,
const
uint64_t
table_id
,
const
std
::
vector
<
std
::
string
>&
var_names
,
std
::
vector
<::
std
::
future
<
int32_t
>>*
push_sparse_status
);
void
PushDenseVarsSync
(
Scope
*
scope
,
const
uint64_t
table_id
,
const
std
::
vector
<
std
::
string
>&
var_names
);
// Push sparse variables with labels to server in Async mode
// This is specially designed for click/show stats in server
// Param<in>: scope, table_id, var_grad_names,
// fea_keys, fea_labels, sparse_grad_names
// Param<out>: push_values, push_sparse_status
void
PushSparseVarsWithLabelAsync
(
const
Scope
&
scope
,
const
uint64_t
table_id
,
const
std
::
vector
<
uint64_t
>&
fea_keys
,
const
std
::
vector
<
float
>&
fea_labels
,
const
std
::
vector
<
std
::
string
>&
sparse_key_names
,
const
std
::
vector
<
std
::
string
>&
sparse_grad_names
,
const
int
emb_dim
,
std
::
vector
<
std
::
vector
<
float
>>*
push_values
,
std
::
vector
<::
std
::
future
<
int32_t
>>*
push_sparse_status
);
// Push sparse variables to server in Async mode
// Param<In>: scope, table_id, fea_keys, sparse_grad_names
// Param<Out>: push_values, push_sparse_status
/*
void PushSparseVarsAsync(
const Scope& scope,
const uint64_t table_id,
const std::vector<uint64_t>& fea_keys,
const std::vector<std::string>& sparse_grad_names,
std::vector<std::vector<float>>* push_values,
std::vector<::std::future<int32_t>>* push_sparse_status);
*/
void
InitServer
(
const
std
::
string
&
dist_desc
,
int
index
);
void
InitWorker
(
const
std
::
string
&
dist_desc
,
const
std
::
vector
<
uint64_t
>&
host_sign_list
,
int
node_num
,
int
index
);
void
StopServer
();
uint64_t
RunServer
();
void
GatherServers
(
const
std
::
vector
<
uint64_t
>&
host_sign_list
,
int
node_num
);
// gather client ip
void
GatherClients
(
const
std
::
vector
<
uint64_t
>&
host_sign_list
);
// get client info
std
::
vector
<
uint64_t
>
GetClientsInfo
();
// create client to client connection
void
CreateClient2ClientConnection
();
// register client to client communication
typedef
std
::
function
<
int32_t
(
int
,
int
,
const
std
::
string
&
)
>
MsgHandlerFunc
;
int
RegisterClientToClientMsgHandler
(
int
msg_type
,
MsgHandlerFunc
handler
);
// send client to client message
std
::
future
<
int32_t
>
SendClientToClientMsg
(
int
msg_type
,
int
to_client_id
,
const
std
::
string
&
msg
);
template
<
typename
T
>
void
Serialize
(
const
std
::
vector
<
T
*>&
t
,
std
::
string
*
str
);
template
<
typename
T
>
void
Deserialize
(
std
::
vector
<
T
>*
t
,
const
std
::
string
&
str
);
static
std
::
shared_ptr
<
FleetWrapper
>
GetInstance
()
{
if
(
NULL
==
s_instance_
)
{
if
(
NULL
==
s_instance_
)
{
s_instance_
.
reset
(
new
paddle
::
framework
::
Fleet
Wrapper
());
s_instance_
.
reset
(
new
paddle
::
framework
::
NCCL
Wrapper
());
}
}
return
s_instance_
;
return
s_instance_
;
}
}
#ifdef PADDLE_WITH_PSLIB
public:
static
std
::
shared_ptr
<
paddle
::
distributed
::
PSlib
>
pslib_ptr_
;
NCCLInfo
nccl_info_
;
#endif
private:
private:
static
std
::
shared_ptr
<
FleetWrapper
>
s_instance_
;
static
std
::
shared_ptr
<
NCCLWrapper
>
s_instance_
;
#ifdef PADDLE_WITH_PSLIB
std
::
map
<
uint64_t
,
std
::
vector
<
paddle
::
ps
::
Region
>>
_regions
;
#endif
protected:
protected:
static
bool
is_initialized_
;
static
bool
is_initialized_
;
DISABLE_COPY_AND_ASSIGN
(
Fleet
Wrapper
);
DISABLE_COPY_AND_ASSIGN
(
NCCL
Wrapper
);
};
};
}
// end namespace framework
}
// end namespace framework
...
...
paddle/fluid/pybind/nccl_wrapper_py.cc
0 → 100644
浏览文件 @
b0911390
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <fcntl.h>
#ifdef _POSIX_C_SOURCE
#undef _POSIX_C_SOURCE
#endif
#ifdef _XOPEN_SOURCE
#undef _XOPEN_SOURCE
#endif
#include <string>
#include <vector>
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/text_format.h"
#include "paddle/fluid/framework/async_executor.h"
#include "paddle/fluid/framework/data_feed.h"
#include "paddle/fluid/framework/data_feed.pb.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/inference/io.h"
#include "paddle/fluid/platform/place.h"
#include "paddle/fluid/platform/variant.h"
#include "paddle/fluid/pybind/nccl_wrapper_py.h"
namespace
py
=
pybind11
;
namespace
pd
=
paddle
::
framework
;
namespace
paddle
{
namespace
pybind
{
void
BindNCCLWrapper
(
py
::
module
*
m
)
{
py
::
class_
<
framework
::
NCCLWrapper
>
(
*
m
,
"Nccl"
)
.
def
(
py
::
init
())
.
def
(
"init_nccl"
,
&
framework
::
NCCLWrapper
::
InitNCCL
)
.
def
(
"set_nccl_id"
,
&
framework
::
NCCLWrapper
::
SetNCCLId
)
.
def
(
"set_rank_info"
,
&
framework
::
NCCLWrapper
::
SetRankInfo
)
.
def
(
"sync_var"
,
&
framework
::
NCCLWrapper
::
SyncVar
);
}
// end NCCLWrapper
}
// end namespace pybind
}
// end namespace paddle
paddle/fluid/pybind/nccl_wrapper_py.h
0 → 100644
浏览文件 @
b0911390
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include "pybind11/pybind11.h"
#include "pybind11/stl.h"
namespace
py
=
pybind11
;
namespace
paddle
{
namespace
pybind
{
void
BindNCCLWrapper
(
py
::
module
*
m
);
}
// namespace pybind
}
// namespace paddle
paddle/fluid/pybind/pybind.cc
浏览文件 @
b0911390
...
@@ -58,6 +58,7 @@ limitations under the License. */
...
@@ -58,6 +58,7 @@ limitations under the License. */
#include "paddle/fluid/pybind/imperative.h"
#include "paddle/fluid/pybind/imperative.h"
#include "paddle/fluid/pybind/inference_api.h"
#include "paddle/fluid/pybind/inference_api.h"
#include "paddle/fluid/pybind/ir.h"
#include "paddle/fluid/pybind/ir.h"
#include "paddle/fluid/pybind/nccl_wrapper_py.h"
#include "paddle/fluid/pybind/protobuf.h"
#include "paddle/fluid/pybind/protobuf.h"
#include "paddle/fluid/pybind/pybind.h" // NOLINT
#include "paddle/fluid/pybind/pybind.h" // NOLINT
#include "paddle/fluid/pybind/reader_py.h"
#include "paddle/fluid/pybind/reader_py.h"
...
@@ -1405,6 +1406,7 @@ All parameter, weight, gradient are variables in Paddle.
...
@@ -1405,6 +1406,7 @@ All parameter, weight, gradient are variables in Paddle.
BindRecordIOWriter
(
&
m
);
BindRecordIOWriter
(
&
m
);
BindAsyncExecutor
(
&
m
);
BindAsyncExecutor
(
&
m
);
BindFleetWrapper
(
&
m
);
BindFleetWrapper
(
&
m
);
BindNCCLWrapper
(
&
m
);
BindGraph
(
&
m
);
BindGraph
(
&
m
);
BindNode
(
&
m
);
BindNode
(
&
m
);
BindInferenceApi
(
&
m
);
BindInferenceApi
(
&
m
);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录