未验证 提交 c958ba74 编写于 作者: 小飞猪 提交者: GitHub

[xdoctest][task 248-249,266-267,269] reformat example code with google style...

[xdoctest][task 248-249,266-267,269] reformat example code with google style in `incubate/distributed/fleet/*`,`incubate/nn/layer/*` (#56772)

* [Doctest]fix No.248-249,266-267,269, test=docs_preview

* fix style

* fix

* add env:DISTRIBUTED
上级 e9364a38
...@@ -46,9 +46,10 @@ class FleetUtil: ...@@ -46,9 +46,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.rank0_print("my log") >>> fleet_util = FleetUtil()
>>> fleet_util.rank0_print("my log")
""" """
...@@ -81,9 +82,10 @@ class FleetUtil: ...@@ -81,9 +82,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.rank0_print("my log") >>> fleet_util = FleetUtil()
>>> fleet_util.rank0_print("my log")
""" """
if fleet.worker_index() != 0: if fleet.worker_index() != 0:
...@@ -101,9 +103,10 @@ class FleetUtil: ...@@ -101,9 +103,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.rank0_info("my log info") >>> fleet_util = FleetUtil()
>>> fleet_util.rank0_info("my log info")
""" """
if fleet.worker_index() != 0: if fleet.worker_index() != 0:
...@@ -120,9 +123,10 @@ class FleetUtil: ...@@ -120,9 +123,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.rank0_error("my log error") >>> fleet_util = FleetUtil()
>>> fleet_util.rank0_error("my log error")
""" """
if fleet.worker_index() != 0: if fleet.worker_index() != 0:
...@@ -148,9 +152,11 @@ class FleetUtil: ...@@ -148,9 +152,11 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
fleet_util.set_zero(myvar.name, myscope) >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
>>> fleet_util = FleetUtil()
>>> fleet_util.set_zero(myvar.name, myscope)
""" """
param = scope.var(var_name).get_tensor() param = scope.var(var_name).get_tensor()
...@@ -176,23 +182,27 @@ class FleetUtil: ...@@ -176,23 +182,27 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
fleet_util.print_global_auc(myscope, stat_pos=stat_pos.name, >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
stat_neg=stat_neg.name) >>> fleet_util = FleetUtil()
>>> fleet_util.print_global_auc(myscope, stat_pos=stat_pos.name,
# below is part of model ... stat_neg=stat_neg.name)
emb = my_slot_net(slots, label) # emb can be fc layer of size 1
similarity_norm = fluid.layers.sigmoid(paddle.clip(\ >>> # below is part of model
emb, min=-15.0, max=15.0), name="similarity_norm")\ >>> emb = my_slot_net(slots, label) # emb can be fc layer of size 1
binary_predict = fluid.layers.concat(input=[\ >>> similarity_norm = fluid.layers.sigmoid(paddle.clip(
paddle.subtract(\ ... emb, min=-15.0, max=15.0), name="similarity_norm")
fluid.layers.ceil(similarity_norm), similarity_norm),\ >>> binary_predict = fluid.layers.concat(input=[
similarity_norm], axis=1) ... paddle.subtract(
auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ ... fluid.layers.ceil(similarity_norm),
stat_neg] = paddle.static.auc(input=binary_predict,\ ... similarity_norm),
label=label, curve='ROC',\ ... similarity_norm],
num_thresholds=4096) ... axis=1)
>>> auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos,
... stat_neg] = paddle.static.auc(input=binary_predict,
... label=label,curve='ROC',
... num_thresholds=4096)
""" """
auc_value = self.get_global_auc(scope, stat_pos, stat_neg) auc_value = self.get_global_auc(scope, stat_pos, stat_neg)
...@@ -218,11 +228,13 @@ class FleetUtil: ...@@ -218,11 +228,13 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
auc_value, _ = fleet_util.get_global_auc(myscope, >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
stat_pos=stat_pos, >>> fleet_util = FleetUtil()
stat_neg=stat_neg) >>> auc_value, _ = fleet_util.get_global_auc(myscope,
... stat_pos=stat_pos,
... stat_neg=stat_neg)
""" """
if scope.find_var(stat_pos) is None or scope.find_var(stat_neg) is None: if scope.find_var(stat_pos) is None or scope.find_var(stat_neg) is None:
...@@ -288,9 +300,10 @@ class FleetUtil: ...@@ -288,9 +300,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.load_fleet_model("hdfs:/my/model/path", table_id=1) >>> fleet_util = FleetUtil()
>>> fleet_util.load_fleet_model_one_table(1, path="hdfs:/my/model/path")
""" """
fleet.load_one_table(table_id, path) fleet.load_one_table(table_id, path)
...@@ -306,12 +319,13 @@ class FleetUtil: ...@@ -306,12 +319,13 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
>>> fleet_util = FleetUtil()
fleet_util.load_fleet_model("hdfs:/my/model/path") >>> fleet_util.load_fleet_model("hdfs:/my/model/path")
fleet_util.load_fleet_model("hdfs:/my/model/path", mode=0) >>> fleet_util.load_fleet_model("hdfs:/my/model/path", mode=0)
""" """
fleet.init_server(path, mode=mode) fleet.init_server(path, mode=mode)
...@@ -328,9 +342,10 @@ class FleetUtil: ...@@ -328,9 +342,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_fleet_model("hdfs:/my/model/path") >>> fleet_util = FleetUtil()
>>> fleet_util.save_fleet_model("hdfs:/my/model/path")
""" """
fleet.save_persistables(None, path, mode=mode) fleet.save_persistables(None, path, mode=mode)
...@@ -406,15 +421,15 @@ class FleetUtil: ...@@ -406,15 +421,15 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.write_model_donefile(output_path="hdfs:/my/output", >>> fleet_util = FleetUtil()
model_path="hdfs:/my/model", >>> fleet_util.write_model_donefile(output_path="hdfs:/my/output",
day=20190723, ... day=20190723,
pass_id=66, ... pass_id=66,
xbox_base_key=int(time.time()), ... xbox_base_key=int(time.time()),
hadoop_fs_name="hdfs://xxx", ... hadoop_fs_name="hdfs://xxx",
hadoop_fs_ugi="user,passwd") ... hadoop_fs_ugi="user,passwd")
""" """
day = str(day) day = str(day)
...@@ -508,19 +523,18 @@ class FleetUtil: ...@@ -508,19 +523,18 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.write_xbox_donefile( >>> fleet_util = FleetUtil()
output_path="hdfs:/my/output/", >>> fleet_util.write_xbox_donefile(
model_path="hdfs:/my/output/20190722/01", ... output_path="hdfs:/my/output/",
day=20190722, ... day=20190722,
pass_id=1, ... pass_id=1,
xbox_base_key=int(time.time()), ... xbox_base_key=int(time.time()),
data_path="hdfs:/my/data/", ... data_path="hdfs:/my/data/",
hadoop_fs_name="hdfs://xxx", ... hadoop_fs_name="hdfs://xxx",
hadoop_fs_ugi="user,passwd", ... hadoop_fs_ugi="user,passwd",
monitor_data={} ... monitor_data={})
)
""" """
day = str(day) day = str(day)
...@@ -627,16 +641,16 @@ class FleetUtil: ...@@ -627,16 +641,16 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.write_cache_donefile( >>> fleet_util = FleetUtil()
output_path="hdfs:/my/output/", >>> fleet_util.write_cache_donefile(
day=20190722, ... output_path="hdfs:/my/output/",
pass_id=1, ... day=20190722,
key_num=123456, ... pass_id=1,
hadoop_fs_name="hdfs://xxx", ... key_num=123456,
hadoop_fs_ugi="user,passwd", ... hadoop_fs_name="hdfs://xxx",
) ... hadoop_fs_ugi="user,passwd")
""" """
day = str(day) day = str(day)
...@@ -686,9 +700,10 @@ class FleetUtil: ...@@ -686,9 +700,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.load_model("hdfs:/my/path", 20190722, 88) >>> fleet_util = FleetUtil()
>>> fleet_util.load_model("hdfs:/my/path", 20190722, 88)
""" """
day = str(day) day = str(day)
...@@ -711,9 +726,10 @@ class FleetUtil: ...@@ -711,9 +726,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_model("hdfs:/my/path", 20190722, 88) >>> fleet_util = FleetUtil()
>>> fleet_util.save_model("hdfs:/my/path", 20190722, 88)
""" """
day = str(day) day = str(day)
...@@ -735,9 +751,10 @@ class FleetUtil: ...@@ -735,9 +751,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_batch_model("hdfs:/my/path", 20190722) >>> fleet_util = FleetUtil()
>>> fleet_util.save_batch_model("hdfs:/my/path", 20190722)
""" """
day = str(day) day = str(day)
...@@ -759,9 +776,10 @@ class FleetUtil: ...@@ -759,9 +776,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_batch_model("hdfs:/my/path", 20190722, 88) >>> fleet_util = FleetUtil()
>>> fleet_util.save_delta_model("hdfs:/my/path", 20190722, 88)
""" """
day = str(day) day = str(day)
...@@ -783,9 +801,10 @@ class FleetUtil: ...@@ -783,9 +801,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_xbox_base_model("hdfs:/my/path", 20190722, 88) >>> fleet_util = FleetUtil()
>>> fleet_util.save_xbox_base_model("hdfs:/my/path", 20190722)
""" """
day = str(day) day = str(day)
...@@ -813,9 +832,10 @@ class FleetUtil: ...@@ -813,9 +832,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_cache_model("hdfs:/my/path", 20190722, 88) >>> fleet_util = FleetUtil()
>>> fleet_util.save_cache_model("hdfs:/my/path", 20190722, 88)
""" """
day = str(day) day = str(day)
...@@ -848,9 +868,10 @@ class FleetUtil: ...@@ -848,9 +868,10 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
fleet_util.save_cache_base_model("hdfs:/my/path", 20190722) >>> fleet_util = FleetUtil()
>>> fleet_util.save_cache_base_model("hdfs:/my/path", 20190722)
""" """
day = str(day) day = str(day)
...@@ -875,9 +896,11 @@ class FleetUtil: ...@@ -875,9 +896,11 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
fleet_util.pull_all_dense_params(my_scope, my_program) >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
>>> fleet_util = FleetUtil()
>>> fleet_util.pull_all_dense_params(my_scope, my_program)
""" """
fleet._role_maker._barrier_worker() fleet._role_maker._barrier_worker()
...@@ -950,18 +973,20 @@ class FleetUtil: ...@@ -950,18 +973,20 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
fleet_util.save_paddle_inference_model(exe, >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
join_scope, >>> fleet_util = FleetUtil()
join_program, >>> fleet_util.save_paddle_inference_model(exe,
feeded_vars, ... join_scope,
target_vars, ... join_program,
"hdfs:/my/output/path/", ... feeded_vars,
day=20190727, ... target_vars,
pass_id=6, ... "hdfs:/my/output/path/",
hadoop_fs_name="xxx", ... day=20190727,
hadoop_fs_ugi="xxx,xxx") ... pass_id=6,
... hadoop_fs_name="xxx",
... hadoop_fs_ugi="xxx,xxx")
""" """
day = str(day) day = str(day)
pass_id = str(pass_id) pass_id = str(pass_id)
...@@ -1044,38 +1069,40 @@ class FleetUtil: ...@@ -1044,38 +1069,40 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
fleet_util.save_paddle_params(exe, >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
join_scope, >>> fleet_util = FleetUtil()
join_program, >>> fleet_util.save_paddle_params(exe,
"paddle_dense.model.0", ... join_scope,
"hdfs:/my/output/path/", ... join_program,
day=20190727, ... "paddle_dense.model.0",
pass_id=6, ... "hdfs:/my/output/path/",
hadoop_fs_name="xxx", ... day=20190727,
hadoop_fs_ugi="xxx,xxx", ... pass_id=6,
var_names=join_all_var_names) ... hadoop_fs_name="xxx",
fleet_util.save_paddle_params(exe, ... hadoop_fs_ugi="xxx,xxx",
join_scope, ... var_names=join_all_var_names)
join_program, >>> fleet_util.save_paddle_params(exe,
"paddle_dense.model.usr.0", ... join_scope,
"hdfs:/my/output/path/", ... join_program,
day=20190727, ... "paddle_dense.model.usr.0",
pass_id=6, ... "hdfs:/my/output/path/",
hadoop_fs_name="xxx", ... day=20190727,
hadoop_fs_ugi="xxx,xxx", ... pass_id=6,
var_names=join_user_var_names) ... hadoop_fs_name="xxx",
fleet_util.save_paddle_params(exe, ... hadoop_fs_ugi="xxx,xxx",
join_scope, ... var_names=join_user_var_names)
join_program, >>> fleet_util.save_paddle_params(exe,
"paddle_dense.model.item.0", ... join_scope,
"hdfs:/my/output/path/", ... join_program,
day=20190727, ... "paddle_dense.model.item.0",
pass_id=6, ... "hdfs:/my/output/path/",
hadoop_fs_name="xxx", ... day=20190727,
hadoop_fs_ugi="xxx,xxx", ... pass_id=6,
var_names=join_user_item_names) ... hadoop_fs_name="xxx",
... hadoop_fs_ugi="xxx,xxx",
... var_names=join_user_item_names)
""" """
day = str(day) day = str(day)
...@@ -1139,11 +1166,13 @@ class FleetUtil: ...@@ -1139,11 +1166,13 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
last_save_day, last_path, xbox_base_key = \ >>> fleet_util = FleetUtil()
fleet_util.get_last_save_xbox_base("hdfs:/my/path", 20190722, >>> last_save_day, last_path, xbox_base_key = \
88) ... fleet_util.get_last_save_xbox_base("hdfs:/my/path",
... hadoop_fs_name="hdfs://xxx",
... hadoop_fs_ugi="user,passwd")
""" """
donefile_path = output_path + "/xbox_base_done.txt" donefile_path = output_path + "/xbox_base_done.txt"
...@@ -1187,10 +1216,13 @@ class FleetUtil: ...@@ -1187,10 +1216,13 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
last_save_day, last_save_pass, last_path, xbox_base_key = \ >>> fleet_util = FleetUtil()
fleet_util.get_last_save_xbox("hdfs:/my/path", 20190722, 88) >>> last_save_day, last_save_pass, last_path, xbox_base_key = \
... fleet_util.get_last_save_xbox("hdfs:/my/path",
... hadoop_fs_name="hdfs://xxx",
... hadoop_fs_ugi="user,passwd")
""" """
donefile_path = output_path + "/xbox_patch_done.txt" donefile_path = output_path + "/xbox_patch_done.txt"
...@@ -1235,10 +1267,13 @@ class FleetUtil: ...@@ -1235,10 +1267,13 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
last_save_day, last_save_pass, last_path, xbox_base_key = \ >>> fleet_util = FleetUtil()
fleet_util.get_last_save_model("hdfs:/my/path", 20190722, 88) >>> last_save_day, last_save_pass, last_path, xbox_base_key = \
... fleet_util.get_last_save_model("hdfs:/my/path",
... hadoop_fs_name="hdfs://xxx",
... hadoop_fs_ugi="user,passwd")
""" """
last_save_day = -1 last_save_day = -1
...@@ -1279,14 +1314,15 @@ class FleetUtil: ...@@ -1279,14 +1314,15 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
online_pass_interval = fleet_util.get_online_pass_interval( >>> fleet_util = FleetUtil()
days="{20190720..20190729}", >>> online_pass_interval = fleet_util.get_online_pass_interval(
hours="{0..23}", ... days="{20190720..20190729}",
split_interval=5, ... hours="{0..23}",
split_per_pass=2, ... split_interval=5,
is_data_hourly_placed=False) ... split_per_pass=2,
... is_data_hourly_placed=False)
""" """
days = os.popen("echo -n " + days).read().split(" ") days = os.popen("echo -n " + days).read().split(" ")
...@@ -1358,35 +1394,37 @@ class FleetUtil: ...@@ -1358,35 +1394,37 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
metric_list = fleet_util.get_global_metrics(myscope, >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
stat_pos.name, >>> fleet_util = FleetUtil()
stat_neg.name, >>> metric_list = fleet_util.get_global_metrics(myscope,
local_sqrerr.name, ... stat_pos.name,
local_abserr.name, ... stat_neg.name,
local_prob.name, ... local_sqrerr.name,
local_q.name, ... local_abserr.name,
local_pos_ins.name, ... local_prob.name,
local_total_ins.name) ... local_q.name,
... local_pos_ins.name,
# below is part of example model ... local_total_ins.name)
label = paddle.static.data(name="click", shape=[-1, 1],\
dtype="int64", lod_level=0) >>> # below is part of example model
emb = my_slot_net(slots, label) # emb can be fc layer of size 1 >>> label = paddle.static.data(name="click", shape=[-1, 1],\
similarity_norm = fluid.layers.sigmoid(paddle.clip(\ ... dtype="int64", lod_level=0)
emb, min=-15.0, max=15.0), name="similarity_norm")\ >>> emb = my_slot_net(slots, label) # emb can be fc layer of size 1
binary_predict = fluid.layers.concat(input=[\ >>> similarity_norm = fluid.layers.sigmoid(paddle.clip(\
paddle.subtract(\ ... emb, min=-15.0, max=15.0), name="similarity_norm")\
fluid.layers.ceil(similarity_norm), similarity_norm),\ >>> binary_predict = fluid.layers.concat(input=[\
similarity_norm], axis=1) ... paddle.subtract(\
auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ ... fluid.layers.ceil(similarity_norm), similarity_norm),\
stat_neg] = paddle.static.auc(input=binary_predict,\ ... similarity_norm], axis=1)
label=label, curve='ROC',\ >>> auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \
num_thresholds=4096) ... stat_neg] = paddle.static.auc(input=binary_predict,\
local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins,\ ... label=label, curve='ROC',\
local_total_ins = paddle.static.ctr_metric_bundle(\ ... num_thresholds=4096)
similarity_norm, label) >>> local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins,\
... local_total_ins = paddle.static.ctr_metric_bundle(\
... similarity_norm, label)
""" """
if ( if (
...@@ -1558,35 +1596,37 @@ class FleetUtil: ...@@ -1558,35 +1596,37 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> # doctest: +SKIP('dependency on custom variables')
fleet_util.print_global_metrics(myscope, >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
stat_pos.name, >>> fleet_util = FleetUtil()
stat_neg.name, >>> fleet_util.print_global_metrics(myscope,
local_sqrerr.name, ... stat_pos.name,
local_abserr.name, ... stat_neg.name,
local_prob.name, ... local_sqrerr.name,
local_q.name, ... local_abserr.name,
local_pos_ins.name, ... local_prob.name,
local_total_ins.name) ... local_q.name,
... local_pos_ins.name,
# below is part of model ... local_total_ins.name)
label = paddle.static.data(name="click", shape=[-1, 1],\
dtype="int64", lod_level=0) >>> # below is part of model
emb = my_slot_net(slots, label) # emb can be fc layer of size 1 >>> label = paddle.static.data(name="click", shape=[-1, 1],\
similarity_norm = fluid.layers.sigmoid(paddle.clip(\ ... dtype="int64", lod_level=0)
emb, min=-15.0, max=15.0), name="similarity_norm")\ >>> emb = my_slot_net(slots, label) # emb can be fc layer of size 1
binary_predict = fluid.layers.concat(input=[\ >>> similarity_norm = fluid.layers.sigmoid(paddle.clip(\
paddle.subtract(\ ... emb, min=-15.0, max=15.0), name="similarity_norm")\
fluid.layers.ceil(similarity_norm), similarity_norm),\ >>> binary_predict = fluid.layers.concat(input=[\
similarity_norm], axis=1) ... paddle.subtract(\
auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ ... fluid.layers.ceil(similarity_norm), similarity_norm),\
stat_neg] = paddle.static.auc(input=binary_predict,\ ... similarity_norm], axis=1)
label=label, curve='ROC',\ >>> auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \
num_thresholds=4096) ... stat_neg] = paddle.static.auc(input=binary_predict,\
local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins, \ ... label=label, curve='ROC',\
local_total_ins = paddle.static.ctr_metric_bundle(\ ... num_thresholds=4096)
similarity_norm, label) >>> local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins, \
... local_total_ins = paddle.static.ctr_metric_bundle(\
... similarity_norm, label)
""" """
if ( if (
...@@ -1722,12 +1762,13 @@ class FleetUtil: ...@@ -1722,12 +1762,13 @@ class FleetUtil:
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import FleetUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = FleetUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil
program_path = "./program.pbtxt" >>> fleet_util = FleetUtil()
is_text = True >>> program_path = "./program.pbtxt"
output_dir = "/tmp/" >>> is_text = True
fleet_util.parse_program_proto(program_path, is_text, output_dir) >>> output_dir = "/tmp/"
>>> fleet_util.parse_program_proto(program_path, is_text, output_dir)
""" """
program = self.load_program(prog_path, is_text) program = self.load_program(prog_path, is_text)
utils.parse_program(program, output_dir) utils.parse_program(program, output_dir)
...@@ -1740,9 +1781,10 @@ class GPUPSUtil(FleetUtil): ...@@ -1740,9 +1781,10 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = GPUPSUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
fleet_util.rank0_print("my log") >>> fleet_util = GPUPSUtil()
>>> fleet_util.rank0_print("my log")
""" """
def __init__(self, fs_client=None): def __init__(self, fs_client=None):
...@@ -1766,9 +1808,10 @@ class GPUPSUtil(FleetUtil): ...@@ -1766,9 +1808,10 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
fleet_util = GPUPSUtil() >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
fleet_util.init(20190722, 88, 88, "./afs.conf") >>> fleet_util = GPUPSUtil()
>>> fleet_util.init(20190722, 88, 88, "./afs.conf")
""" """
self._afs.init(fs_name, fs_user, fs_passwd, fs_conf) self._afs.init(fs_name, fs_user, fs_passwd, fs_conf)
...@@ -1785,11 +1828,12 @@ class GPUPSUtil(FleetUtil): ...@@ -1785,11 +1828,12 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
>>> fleet_util.set_fsclient(hdfs_client)
""" """
self._afs = fs_client self._afs = fs_client
...@@ -1809,13 +1853,14 @@ class GPUPSUtil(FleetUtil): ...@@ -1809,13 +1853,14 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
last_save_day, last_path, xbox_base_key = \ >>> fleet_util.set_fsclient(hdfs_client)
fleet_util.get_last_save_xbox_base("hdfs:/my/path") >>> last_save_day, last_path, xbox_base_key = \
... fleet_util.get_last_save_xbox_base("hdfs:/my/path")
""" """
donefile_path = output_path + "/xbox_base_done.txt" donefile_path = output_path + "/xbox_base_done.txt"
...@@ -1851,13 +1896,14 @@ class GPUPSUtil(FleetUtil): ...@@ -1851,13 +1896,14 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
last_save_day, last_save_pass, last_path, xbox_base_key = \ >>> fleet_util.set_fsclient(hdfs_client)
fleet_util.get_last_save_xbox("hdfs:/my/path") >>> last_save_day, last_save_pass, last_path, xbox_base_key = \
... fleet_util.get_last_save_xbox("hdfs:/my/path")
""" """
donefile_path = output_path + "/xbox_patch_done.txt" donefile_path = output_path + "/xbox_patch_done.txt"
...@@ -1894,13 +1940,14 @@ class GPUPSUtil(FleetUtil): ...@@ -1894,13 +1940,14 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
last_save_day, last_save_pass, last_path, xbox_base_key = \ >>> fleet_util.set_fsclient(hdfs_client)
fleet_util.get_last_save_model("hdfs:/my/path") >>> last_save_day, last_save_pass, last_path, xbox_base_key = \
... fleet_util.get_last_save_model("hdfs:/my/path")
""" """
last_save_day = -1 last_save_day = -1
...@@ -1942,16 +1989,16 @@ class GPUPSUtil(FleetUtil): ...@@ -1942,16 +1989,16 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
fleet_util.write_model_donefile(output_path="hdfs:/my/output", >>> fleet_util.set_fsclient(hdfs_client)
model_path="hdfs:/my/model", >>> fleet_util.write_model_donefile(output_path="hdfs:/my/output",
day=20190723, ... day=20190723,
pass_id=66, ... pass_id=66,
xbox_base_key=int(time.time())) ... xbox_base_key=int(time.time()))
""" """
day = str(day) day = str(day)
...@@ -2041,19 +2088,19 @@ class GPUPSUtil(FleetUtil): ...@@ -2041,19 +2088,19 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
fleet_util.write_xbox_donefile( >>> fleet_util.set_fsclient(hdfs_client)
output_path="hdfs:/my/output/", >>> fleet_util.write_xbox_donefile(
model_path="hdfs:/my/output/20190722/01", ... output_path="hdfs:/my/output/",
day=20190722, ... day=20190722,
pass_id=1, ... pass_id=1,
xbox_base_key=int(time.time()), ... xbox_base_key=int(time.time()),
data_path="hdfs:/my/data/", ... data_path="hdfs:/my/data/",
monitor_data={}) ... monitor_data={})
""" """
day = str(day) day = str(day)
...@@ -2154,16 +2201,17 @@ class GPUPSUtil(FleetUtil): ...@@ -2154,16 +2201,17 @@ class GPUPSUtil(FleetUtil):
Examples: Examples:
.. code-block:: python .. code-block:: python
from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil >>> # doctest: +REQUIRES(env:DISTRIBUTED)
from paddle.distributed.fleet.utils.fs import AFSClient >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil
hdfs_client = AFSClient() >>> from paddle.distributed.fleet.utils.fs import AFSClient
fleet_util = GPUPSUtil() >>> hdfs_client = AFSClient()
fleet_util.set_fsclient(hdfs_client) >>> fleet_util = GPUPSUtil()
fleet_util.write_cache_donefile( >>> fleet_util.set_fsclient(hdfs_client)
output_path="hdfs:/my/output/", >>> fleet_util.write_cache_donefile(
day=20190722, ... output_path="hdfs:/my/output/",
pass_id=1, ... day=20190722,
key_num=123456) ... pass_id=1,
... key_num=123456)
""" """
day = str(day) day = str(day)
......
...@@ -55,11 +55,13 @@ class HashName(PSDispatcher): ...@@ -55,11 +55,13 @@ class HashName(PSDispatcher):
Examples: Examples:
.. code-block:: python .. code-block:: python
pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] >>> from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import RoundRobin
vars = ["var1","var2","var3","var4","var5"]
rr = RoundRobin(pserver_endpoints) >>> pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
rr.dispatch(vars) >>> vars = ["var1","var2","var3","var4","var5"]
>>> rr = HashName(pserver_endpoints)
>>> rr.dispatch(vars)
""" """
...@@ -95,11 +97,13 @@ class RoundRobin(PSDispatcher): ...@@ -95,11 +97,13 @@ class RoundRobin(PSDispatcher):
Examples: Examples:
.. code-block:: python .. code-block:: python
pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] >>> from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import RoundRobin
vars = ["var1","var2","var3","var4","var5"]
>>> pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
>>> vars = ["var1","var2","var3","var4","var5"]
rr = RoundRobin(pserver_endpoints) >>> rr = RoundRobin(pserver_endpoints)
rr.dispatch(vars) >>> rr.dispatch(vars)
""" """
......
...@@ -46,15 +46,17 @@ class FusedEcMoe(Layer): ...@@ -46,15 +46,17 @@ class FusedEcMoe(Layer):
Examples: Examples:
.. code-block:: python .. code-block:: python
# required: gpu >>> # doctest: +REQUIRES(env:GPU)
import paddle >>> import paddle
from paddle.incubate.nn.layer.fused_ec_moe import FusedEcMoe >>> paddle.device.set_device('gpu')
>>> from paddle.incubate.nn.layer.fused_ec_moe import FusedEcMoe
x = paddle.randn([10, 128, 1024]) # [bsz, seq_len, d_model] >>> x = paddle.randn([10, 128, 1024]) # [bsz, seq_len, d_model]
gate = paddle.randn([10, 128, 8]) # [bsz, seq_len, num_experts] >>> gate = paddle.randn([10, 128, 8]) # [bsz, seq_len, num_experts]
moe = FusedEcMoe(1024, 4096, 8, act_type="gelu") >>> moe = FusedEcMoe(1024, 4096, 8, act_type="gelu")
y = moe(x, gate) >>> y = moe(x, gate)
print(y.shape) # [10, 128, 1024] >>> print(y.shape)
[10, 128, 1024]
""" """
def __init__( def __init__(
......
...@@ -56,14 +56,16 @@ class FusedLinear(Layer): ...@@ -56,14 +56,16 @@ class FusedLinear(Layer):
Examples: Examples:
.. code-block:: python .. code-block:: python
# required: gpu >>> # doctest: +REQUIRES(env:GPU)
import paddle >>> import paddle
from paddle.incubate.nn import FusedLinear >>> paddle.device.set_device('gpu')
>>> from paddle.incubate.nn import FusedLinear
x = paddle.randn([3, 4]) >>> x = paddle.randn([3, 4])
linear = FusedLinear(4, 5) >>> linear = FusedLinear(4, 5)
y = linear(x) >>> y = linear(x)
print(y.shape) # [3, 5] >>> print(y.shape)
[3, 5]
""" """
def __init__( def __init__(
......
...@@ -55,21 +55,25 @@ class ListenAndServ: ...@@ -55,21 +55,25 @@ class ListenAndServ:
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle.fluid as fluid >>> # doctest: +REQUIRES(env:DISTRIBUTED)
import paddle >>> from paddle.incubate.nn.layer.io import ListenAndServ
with fluid.program_guard(main): >>> import paddle
serv = layers.ListenAndServ( >>> paddle.enable_static()
"127.0.0.1:6170", ["X"], optimizer_mode=False) >>> place = paddle.CPUPlace()
with serv.do(): >>> main = paddle.static.Program()
x = paddle.static.data( >>> with paddle.static.program_guard(main):
shape=[32, 32], ... serv = ListenAndServ(
dtype='float32', ... "127.0.0.1:6170", ["X"], optimizer_mode=False)
name="X") ... with serv.do():
paddle.nn.initializer.Constant(value=1.0)(x, main.global_block()) ... x = paddle.static.data(
paddle.scale(x=x, scale=10.0, out=out_var) ... shape=[32, 32],
... dtype='float32',
exe = fluid.Executor(place) ... name="X")
exe.run(main) ... paddle.nn.initializer.Constant(value=1.0)(x, main.global_block())
... paddle.scale(x=x, scale=10.0)
>>> exe = paddle.static.Executor(place)
>>> exe.run(main)
""" """
def __init__(self, endpoint, inputs, fan_in=1, optimizer_mode=True): def __init__(self, endpoint, inputs, fan_in=1, optimizer_mode=True):
...@@ -115,7 +119,9 @@ class ListenAndServ: ...@@ -115,7 +119,9 @@ class ListenAndServ:
return parent_block return parent_block
def complete_op(self): def complete_op(self):
from paddle.incubate.fleet.parameter_server.mode import DistributedMode from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
main_program = self.helper.main_program main_program = self.helper.main_program
current_block = main_program.current_block() current_block = main_program.current_block()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册