diff --git a/python/paddle/incubate/distributed/fleet/fleet_util.py b/python/paddle/incubate/distributed/fleet/fleet_util.py index f809a536ca241e4a3047db4295cf190eb62bb349..34dc040c45ab3c38e990bf8d6d36bbe35cf9afb9 100644 --- a/python/paddle/incubate/distributed/fleet/fleet_util.py +++ b/python/paddle/incubate/distributed/fleet/fleet_util.py @@ -46,9 +46,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.rank0_print("my log") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.rank0_print("my log") """ @@ -81,9 +82,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.rank0_print("my log") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.rank0_print("my log") """ if fleet.worker_index() != 0: @@ -101,9 +103,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.rank0_info("my log info") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.rank0_info("my log info") """ if fleet.worker_index() != 0: @@ -120,9 +123,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.rank0_error("my log error") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.rank0_error("my log error") """ if fleet.worker_index() != 0: @@ -148,9 +152,11 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.set_zero(myvar.name, myscope) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.set_zero(myvar.name, myscope) """ param = scope.var(var_name).get_tensor() @@ -176,23 +182,27 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.print_global_auc(myscope, stat_pos=stat_pos.name, - stat_neg=stat_neg.name) - - # below is part of model - emb = my_slot_net(slots, label) # emb can be fc layer of size 1 - similarity_norm = fluid.layers.sigmoid(paddle.clip(\ - emb, min=-15.0, max=15.0), name="similarity_norm")\ - binary_predict = fluid.layers.concat(input=[\ - paddle.subtract(\ - fluid.layers.ceil(similarity_norm), similarity_norm),\ - similarity_norm], axis=1) - auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ - stat_neg] = paddle.static.auc(input=binary_predict,\ - label=label, curve='ROC',\ - num_thresholds=4096) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.print_global_auc(myscope, stat_pos=stat_pos.name, + ... stat_neg=stat_neg.name) + + >>> # below is part of model + >>> emb = my_slot_net(slots, label) # emb can be fc layer of size 1 + >>> similarity_norm = fluid.layers.sigmoid(paddle.clip( + ... emb, min=-15.0, max=15.0), name="similarity_norm") + >>> binary_predict = fluid.layers.concat(input=[ + ... paddle.subtract( + ... fluid.layers.ceil(similarity_norm), + ... similarity_norm), + ... similarity_norm], + ... axis=1) + >>> auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, + ... stat_neg] = paddle.static.auc(input=binary_predict, + ... label=label,curve='ROC', + ... num_thresholds=4096) """ auc_value = self.get_global_auc(scope, stat_pos, stat_neg) @@ -218,11 +228,13 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - auc_value, _ = fleet_util.get_global_auc(myscope, - stat_pos=stat_pos, - stat_neg=stat_neg) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> auc_value, _ = fleet_util.get_global_auc(myscope, + ... stat_pos=stat_pos, + ... stat_neg=stat_neg) """ if scope.find_var(stat_pos) is None or scope.find_var(stat_neg) is None: @@ -288,9 +300,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.load_fleet_model("hdfs:/my/model/path", table_id=1) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.load_fleet_model_one_table(1, path="hdfs:/my/model/path") """ fleet.load_one_table(table_id, path) @@ -306,12 +319,13 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() - fleet_util.load_fleet_model("hdfs:/my/model/path") + >>> fleet_util.load_fleet_model("hdfs:/my/model/path") - fleet_util.load_fleet_model("hdfs:/my/model/path", mode=0) + >>> fleet_util.load_fleet_model("hdfs:/my/model/path", mode=0) """ fleet.init_server(path, mode=mode) @@ -328,9 +342,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_fleet_model("hdfs:/my/model/path") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_fleet_model("hdfs:/my/model/path") """ fleet.save_persistables(None, path, mode=mode) @@ -406,15 +421,15 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.write_model_donefile(output_path="hdfs:/my/output", - model_path="hdfs:/my/model", - day=20190723, - pass_id=66, - xbox_base_key=int(time.time()), - hadoop_fs_name="hdfs://xxx", - hadoop_fs_ugi="user,passwd") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.write_model_donefile(output_path="hdfs:/my/output", + ... day=20190723, + ... pass_id=66, + ... xbox_base_key=int(time.time()), + ... hadoop_fs_name="hdfs://xxx", + ... hadoop_fs_ugi="user,passwd") """ day = str(day) @@ -508,19 +523,18 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.write_xbox_donefile( - output_path="hdfs:/my/output/", - model_path="hdfs:/my/output/20190722/01", - day=20190722, - pass_id=1, - xbox_base_key=int(time.time()), - data_path="hdfs:/my/data/", - hadoop_fs_name="hdfs://xxx", - hadoop_fs_ugi="user,passwd", - monitor_data={} - ) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.write_xbox_donefile( + ... output_path="hdfs:/my/output/", + ... day=20190722, + ... pass_id=1, + ... xbox_base_key=int(time.time()), + ... data_path="hdfs:/my/data/", + ... hadoop_fs_name="hdfs://xxx", + ... hadoop_fs_ugi="user,passwd", + ... monitor_data={}) """ day = str(day) @@ -627,16 +641,16 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.write_cache_donefile( - output_path="hdfs:/my/output/", - day=20190722, - pass_id=1, - key_num=123456, - hadoop_fs_name="hdfs://xxx", - hadoop_fs_ugi="user,passwd", - ) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.write_cache_donefile( + ... output_path="hdfs:/my/output/", + ... day=20190722, + ... pass_id=1, + ... key_num=123456, + ... hadoop_fs_name="hdfs://xxx", + ... hadoop_fs_ugi="user,passwd") """ day = str(day) @@ -686,9 +700,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.load_model("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.load_model("hdfs:/my/path", 20190722, 88) """ day = str(day) @@ -711,9 +726,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_model("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_model("hdfs:/my/path", 20190722, 88) """ day = str(day) @@ -735,9 +751,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_batch_model("hdfs:/my/path", 20190722) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_batch_model("hdfs:/my/path", 20190722) """ day = str(day) @@ -759,9 +776,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_batch_model("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_delta_model("hdfs:/my/path", 20190722, 88) """ day = str(day) @@ -783,9 +801,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_xbox_base_model("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_xbox_base_model("hdfs:/my/path", 20190722) """ day = str(day) @@ -813,9 +832,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_cache_model("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_cache_model("hdfs:/my/path", 20190722, 88) """ day = str(day) @@ -848,9 +868,10 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_cache_base_model("hdfs:/my/path", 20190722) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_cache_base_model("hdfs:/my/path", 20190722) """ day = str(day) @@ -875,9 +896,11 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.pull_all_dense_params(my_scope, my_program) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.pull_all_dense_params(my_scope, my_program) """ fleet._role_maker._barrier_worker() @@ -950,18 +973,20 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_paddle_inference_model(exe, - join_scope, - join_program, - feeded_vars, - target_vars, - "hdfs:/my/output/path/", - day=20190727, - pass_id=6, - hadoop_fs_name="xxx", - hadoop_fs_ugi="xxx,xxx") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_paddle_inference_model(exe, + ... join_scope, + ... join_program, + ... feeded_vars, + ... target_vars, + ... "hdfs:/my/output/path/", + ... day=20190727, + ... pass_id=6, + ... hadoop_fs_name="xxx", + ... hadoop_fs_ugi="xxx,xxx") """ day = str(day) pass_id = str(pass_id) @@ -1044,38 +1069,40 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.save_paddle_params(exe, - join_scope, - join_program, - "paddle_dense.model.0", - "hdfs:/my/output/path/", - day=20190727, - pass_id=6, - hadoop_fs_name="xxx", - hadoop_fs_ugi="xxx,xxx", - var_names=join_all_var_names) - fleet_util.save_paddle_params(exe, - join_scope, - join_program, - "paddle_dense.model.usr.0", - "hdfs:/my/output/path/", - day=20190727, - pass_id=6, - hadoop_fs_name="xxx", - hadoop_fs_ugi="xxx,xxx", - var_names=join_user_var_names) - fleet_util.save_paddle_params(exe, - join_scope, - join_program, - "paddle_dense.model.item.0", - "hdfs:/my/output/path/", - day=20190727, - pass_id=6, - hadoop_fs_name="xxx", - hadoop_fs_ugi="xxx,xxx", - var_names=join_user_item_names) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.save_paddle_params(exe, + ... join_scope, + ... join_program, + ... "paddle_dense.model.0", + ... "hdfs:/my/output/path/", + ... day=20190727, + ... pass_id=6, + ... hadoop_fs_name="xxx", + ... hadoop_fs_ugi="xxx,xxx", + ... var_names=join_all_var_names) + >>> fleet_util.save_paddle_params(exe, + ... join_scope, + ... join_program, + ... "paddle_dense.model.usr.0", + ... "hdfs:/my/output/path/", + ... day=20190727, + ... pass_id=6, + ... hadoop_fs_name="xxx", + ... hadoop_fs_ugi="xxx,xxx", + ... var_names=join_user_var_names) + >>> fleet_util.save_paddle_params(exe, + ... join_scope, + ... join_program, + ... "paddle_dense.model.item.0", + ... "hdfs:/my/output/path/", + ... day=20190727, + ... pass_id=6, + ... hadoop_fs_name="xxx", + ... hadoop_fs_ugi="xxx,xxx", + ... var_names=join_user_item_names) """ day = str(day) @@ -1139,11 +1166,13 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - last_save_day, last_path, xbox_base_key = \ - fleet_util.get_last_save_xbox_base("hdfs:/my/path", 20190722, - 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> last_save_day, last_path, xbox_base_key = \ + ... fleet_util.get_last_save_xbox_base("hdfs:/my/path", + ... hadoop_fs_name="hdfs://xxx", + ... hadoop_fs_ugi="user,passwd") """ donefile_path = output_path + "/xbox_base_done.txt" @@ -1187,10 +1216,13 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - last_save_day, last_save_pass, last_path, xbox_base_key = \ - fleet_util.get_last_save_xbox("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> last_save_day, last_save_pass, last_path, xbox_base_key = \ + ... fleet_util.get_last_save_xbox("hdfs:/my/path", + ... hadoop_fs_name="hdfs://xxx", + ... hadoop_fs_ugi="user,passwd") """ donefile_path = output_path + "/xbox_patch_done.txt" @@ -1235,10 +1267,13 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - last_save_day, last_save_pass, last_path, xbox_base_key = \ - fleet_util.get_last_save_model("hdfs:/my/path", 20190722, 88) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> last_save_day, last_save_pass, last_path, xbox_base_key = \ + ... fleet_util.get_last_save_model("hdfs:/my/path", + ... hadoop_fs_name="hdfs://xxx", + ... hadoop_fs_ugi="user,passwd") """ last_save_day = -1 @@ -1279,14 +1314,15 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - online_pass_interval = fleet_util.get_online_pass_interval( - days="{20190720..20190729}", - hours="{0..23}", - split_interval=5, - split_per_pass=2, - is_data_hourly_placed=False) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> online_pass_interval = fleet_util.get_online_pass_interval( + ... days="{20190720..20190729}", + ... hours="{0..23}", + ... split_interval=5, + ... split_per_pass=2, + ... is_data_hourly_placed=False) """ days = os.popen("echo -n " + days).read().split(" ") @@ -1358,35 +1394,37 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - metric_list = fleet_util.get_global_metrics(myscope, - stat_pos.name, - stat_neg.name, - local_sqrerr.name, - local_abserr.name, - local_prob.name, - local_q.name, - local_pos_ins.name, - local_total_ins.name) - - # below is part of example model - label = paddle.static.data(name="click", shape=[-1, 1],\ - dtype="int64", lod_level=0) - emb = my_slot_net(slots, label) # emb can be fc layer of size 1 - similarity_norm = fluid.layers.sigmoid(paddle.clip(\ - emb, min=-15.0, max=15.0), name="similarity_norm")\ - binary_predict = fluid.layers.concat(input=[\ - paddle.subtract(\ - fluid.layers.ceil(similarity_norm), similarity_norm),\ - similarity_norm], axis=1) - auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ - stat_neg] = paddle.static.auc(input=binary_predict,\ - label=label, curve='ROC',\ - num_thresholds=4096) - local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins,\ - local_total_ins = paddle.static.ctr_metric_bundle(\ - similarity_norm, label) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> metric_list = fleet_util.get_global_metrics(myscope, + ... stat_pos.name, + ... stat_neg.name, + ... local_sqrerr.name, + ... local_abserr.name, + ... local_prob.name, + ... local_q.name, + ... local_pos_ins.name, + ... local_total_ins.name) + + >>> # below is part of example model + >>> label = paddle.static.data(name="click", shape=[-1, 1],\ + ... dtype="int64", lod_level=0) + >>> emb = my_slot_net(slots, label) # emb can be fc layer of size 1 + >>> similarity_norm = fluid.layers.sigmoid(paddle.clip(\ + ... emb, min=-15.0, max=15.0), name="similarity_norm")\ + >>> binary_predict = fluid.layers.concat(input=[\ + ... paddle.subtract(\ + ... fluid.layers.ceil(similarity_norm), similarity_norm),\ + ... similarity_norm], axis=1) + >>> auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ + ... stat_neg] = paddle.static.auc(input=binary_predict,\ + ... label=label, curve='ROC',\ + ... num_thresholds=4096) + >>> local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins,\ + ... local_total_ins = paddle.static.ctr_metric_bundle(\ + ... similarity_norm, label) """ if ( @@ -1558,35 +1596,37 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - fleet_util.print_global_metrics(myscope, - stat_pos.name, - stat_neg.name, - local_sqrerr.name, - local_abserr.name, - local_prob.name, - local_q.name, - local_pos_ins.name, - local_total_ins.name) - - # below is part of model - label = paddle.static.data(name="click", shape=[-1, 1],\ - dtype="int64", lod_level=0) - emb = my_slot_net(slots, label) # emb can be fc layer of size 1 - similarity_norm = fluid.layers.sigmoid(paddle.clip(\ - emb, min=-15.0, max=15.0), name="similarity_norm")\ - binary_predict = fluid.layers.concat(input=[\ - paddle.subtract(\ - fluid.layers.ceil(similarity_norm), similarity_norm),\ - similarity_norm], axis=1) - auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ - stat_neg] = paddle.static.auc(input=binary_predict,\ - label=label, curve='ROC',\ - num_thresholds=4096) - local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins, \ - local_total_ins = paddle.static.ctr_metric_bundle(\ - similarity_norm, label) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> # doctest: +SKIP('dependency on custom variables') + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> fleet_util.print_global_metrics(myscope, + ... stat_pos.name, + ... stat_neg.name, + ... local_sqrerr.name, + ... local_abserr.name, + ... local_prob.name, + ... local_q.name, + ... local_pos_ins.name, + ... local_total_ins.name) + + >>> # below is part of model + >>> label = paddle.static.data(name="click", shape=[-1, 1],\ + ... dtype="int64", lod_level=0) + >>> emb = my_slot_net(slots, label) # emb can be fc layer of size 1 + >>> similarity_norm = fluid.layers.sigmoid(paddle.clip(\ + ... emb, min=-15.0, max=15.0), name="similarity_norm")\ + >>> binary_predict = fluid.layers.concat(input=[\ + ... paddle.subtract(\ + ... fluid.layers.ceil(similarity_norm), similarity_norm),\ + ... similarity_norm], axis=1) + >>> auc, batch_auc, [batch_stat_pos, batch_stat_neg, stat_pos, \ + ... stat_neg] = paddle.static.auc(input=binary_predict,\ + ... label=label, curve='ROC',\ + ... num_thresholds=4096) + >>> local_sqrerr, local_abserr, local_prob, local_q, local_pos_ins, \ + ... local_total_ins = paddle.static.ctr_metric_bundle(\ + ... similarity_norm, label) """ if ( @@ -1722,12 +1762,13 @@ class FleetUtil: Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import FleetUtil - fleet_util = FleetUtil() - program_path = "./program.pbtxt" - is_text = True - output_dir = "/tmp/" - fleet_util.parse_program_proto(program_path, is_text, output_dir) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import FleetUtil + >>> fleet_util = FleetUtil() + >>> program_path = "./program.pbtxt" + >>> is_text = True + >>> output_dir = "/tmp/" + >>> fleet_util.parse_program_proto(program_path, is_text, output_dir) """ program = self.load_program(prog_path, is_text) utils.parse_program(program, output_dir) @@ -1740,9 +1781,10 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - fleet_util = GPUPSUtil() - fleet_util.rank0_print("my log") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> fleet_util = GPUPSUtil() + >>> fleet_util.rank0_print("my log") """ def __init__(self, fs_client=None): @@ -1766,9 +1808,10 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - fleet_util = GPUPSUtil() - fleet_util.init(20190722, 88, 88, "./afs.conf") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> fleet_util = GPUPSUtil() + >>> fleet_util.init(20190722, 88, 88, "./afs.conf") """ self._afs.init(fs_name, fs_user, fs_passwd, fs_conf) @@ -1785,11 +1828,12 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) """ self._afs = fs_client @@ -1809,13 +1853,14 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) - last_save_day, last_path, xbox_base_key = \ - fleet_util.get_last_save_xbox_base("hdfs:/my/path") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) + >>> last_save_day, last_path, xbox_base_key = \ + ... fleet_util.get_last_save_xbox_base("hdfs:/my/path") """ donefile_path = output_path + "/xbox_base_done.txt" @@ -1851,13 +1896,14 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) - last_save_day, last_save_pass, last_path, xbox_base_key = \ - fleet_util.get_last_save_xbox("hdfs:/my/path") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) + >>> last_save_day, last_save_pass, last_path, xbox_base_key = \ + ... fleet_util.get_last_save_xbox("hdfs:/my/path") """ donefile_path = output_path + "/xbox_patch_done.txt" @@ -1894,13 +1940,14 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) - last_save_day, last_save_pass, last_path, xbox_base_key = \ - fleet_util.get_last_save_model("hdfs:/my/path") + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) + >>> last_save_day, last_save_pass, last_path, xbox_base_key = \ + ... fleet_util.get_last_save_model("hdfs:/my/path") """ last_save_day = -1 @@ -1942,16 +1989,16 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) - fleet_util.write_model_donefile(output_path="hdfs:/my/output", - model_path="hdfs:/my/model", - day=20190723, - pass_id=66, - xbox_base_key=int(time.time())) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) + >>> fleet_util.write_model_donefile(output_path="hdfs:/my/output", + ... day=20190723, + ... pass_id=66, + ... xbox_base_key=int(time.time())) """ day = str(day) @@ -2041,19 +2088,19 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) - fleet_util.write_xbox_donefile( - output_path="hdfs:/my/output/", - model_path="hdfs:/my/output/20190722/01", - day=20190722, - pass_id=1, - xbox_base_key=int(time.time()), - data_path="hdfs:/my/data/", - monitor_data={}) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) + >>> fleet_util.write_xbox_donefile( + ... output_path="hdfs:/my/output/", + ... day=20190722, + ... pass_id=1, + ... xbox_base_key=int(time.time()), + ... data_path="hdfs:/my/data/", + ... monitor_data={}) """ day = str(day) @@ -2154,16 +2201,17 @@ class GPUPSUtil(FleetUtil): Examples: .. code-block:: python - from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil - from paddle.distributed.fleet.utils.fs import AFSClient - hdfs_client = AFSClient() - fleet_util = GPUPSUtil() - fleet_util.set_fsclient(hdfs_client) - fleet_util.write_cache_donefile( - output_path="hdfs:/my/output/", - day=20190722, - pass_id=1, - key_num=123456) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.distributed.fleet.fleet_util import GPUPSUtil + >>> from paddle.distributed.fleet.utils.fs import AFSClient + >>> hdfs_client = AFSClient() + >>> fleet_util = GPUPSUtil() + >>> fleet_util.set_fsclient(hdfs_client) + >>> fleet_util.write_cache_donefile( + ... output_path="hdfs:/my/output/", + ... day=20190722, + ... pass_id=1, + ... key_num=123456) """ day = str(day) diff --git a/python/paddle/incubate/distributed/fleet/parameter_server/ir/ps_dispatcher.py b/python/paddle/incubate/distributed/fleet/parameter_server/ir/ps_dispatcher.py index 4155413cb59f9b4745522ef86fe56aa83e7ebb75..29f7fda393e7a0d1a89304b9f043e38d4ec902a5 100644 --- a/python/paddle/incubate/distributed/fleet/parameter_server/ir/ps_dispatcher.py +++ b/python/paddle/incubate/distributed/fleet/parameter_server/ir/ps_dispatcher.py @@ -55,11 +55,13 @@ class HashName(PSDispatcher): Examples: .. code-block:: python - pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] - vars = ["var1","var2","var3","var4","var5"] + >>> from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import RoundRobin - rr = RoundRobin(pserver_endpoints) - rr.dispatch(vars) + >>> pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] + >>> vars = ["var1","var2","var3","var4","var5"] + + >>> rr = HashName(pserver_endpoints) + >>> rr.dispatch(vars) """ @@ -95,11 +97,13 @@ class RoundRobin(PSDispatcher): Examples: .. code-block:: python - pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] - vars = ["var1","var2","var3","var4","var5"] + >>> from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import RoundRobin + + >>> pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"] + >>> vars = ["var1","var2","var3","var4","var5"] - rr = RoundRobin(pserver_endpoints) - rr.dispatch(vars) + >>> rr = RoundRobin(pserver_endpoints) + >>> rr.dispatch(vars) """ diff --git a/python/paddle/incubate/nn/layer/fused_ec_moe.py b/python/paddle/incubate/nn/layer/fused_ec_moe.py index 407c8753519cef332127018daaf48086f1bc920e..1ae117fce45448cc5a41a6d7fe69dde187ccbe35 100644 --- a/python/paddle/incubate/nn/layer/fused_ec_moe.py +++ b/python/paddle/incubate/nn/layer/fused_ec_moe.py @@ -46,15 +46,17 @@ class FusedEcMoe(Layer): Examples: .. code-block:: python - # required: gpu - import paddle - from paddle.incubate.nn.layer.fused_ec_moe import FusedEcMoe + >>> # doctest: +REQUIRES(env:GPU) + >>> import paddle + >>> paddle.device.set_device('gpu') + >>> from paddle.incubate.nn.layer.fused_ec_moe import FusedEcMoe - x = paddle.randn([10, 128, 1024]) # [bsz, seq_len, d_model] - gate = paddle.randn([10, 128, 8]) # [bsz, seq_len, num_experts] - moe = FusedEcMoe(1024, 4096, 8, act_type="gelu") - y = moe(x, gate) - print(y.shape) # [10, 128, 1024] + >>> x = paddle.randn([10, 128, 1024]) # [bsz, seq_len, d_model] + >>> gate = paddle.randn([10, 128, 8]) # [bsz, seq_len, num_experts] + >>> moe = FusedEcMoe(1024, 4096, 8, act_type="gelu") + >>> y = moe(x, gate) + >>> print(y.shape) + [10, 128, 1024] """ def __init__( diff --git a/python/paddle/incubate/nn/layer/fused_linear.py b/python/paddle/incubate/nn/layer/fused_linear.py index 965aedaa391e97512e1b30bfe84d3eef29a747f0..14b159b9eb896204a24423efbe487c49b7269f8d 100644 --- a/python/paddle/incubate/nn/layer/fused_linear.py +++ b/python/paddle/incubate/nn/layer/fused_linear.py @@ -56,14 +56,16 @@ class FusedLinear(Layer): Examples: .. code-block:: python - # required: gpu - import paddle - from paddle.incubate.nn import FusedLinear + >>> # doctest: +REQUIRES(env:GPU) + >>> import paddle + >>> paddle.device.set_device('gpu') + >>> from paddle.incubate.nn import FusedLinear - x = paddle.randn([3, 4]) - linear = FusedLinear(4, 5) - y = linear(x) - print(y.shape) # [3, 5] + >>> x = paddle.randn([3, 4]) + >>> linear = FusedLinear(4, 5) + >>> y = linear(x) + >>> print(y.shape) + [3, 5] """ def __init__( diff --git a/python/paddle/incubate/nn/layer/io.py b/python/paddle/incubate/nn/layer/io.py index f464d93833b2f98fbc2e713ef02f52f3748fe986..3d8b8ec1fe2fd915e771ac406273c4faa6897c53 100644 --- a/python/paddle/incubate/nn/layer/io.py +++ b/python/paddle/incubate/nn/layer/io.py @@ -55,21 +55,25 @@ class ListenAndServ: Examples: .. code-block:: python - import paddle.fluid as fluid - import paddle - with fluid.program_guard(main): - serv = layers.ListenAndServ( - "127.0.0.1:6170", ["X"], optimizer_mode=False) - with serv.do(): - x = paddle.static.data( - shape=[32, 32], - dtype='float32', - name="X") - paddle.nn.initializer.Constant(value=1.0)(x, main.global_block()) - paddle.scale(x=x, scale=10.0, out=out_var) - - exe = fluid.Executor(place) - exe.run(main) + >>> # doctest: +REQUIRES(env:DISTRIBUTED) + >>> from paddle.incubate.nn.layer.io import ListenAndServ + >>> import paddle + >>> paddle.enable_static() + >>> place = paddle.CPUPlace() + >>> main = paddle.static.Program() + >>> with paddle.static.program_guard(main): + ... serv = ListenAndServ( + ... "127.0.0.1:6170", ["X"], optimizer_mode=False) + ... with serv.do(): + ... x = paddle.static.data( + ... shape=[32, 32], + ... dtype='float32', + ... name="X") + ... paddle.nn.initializer.Constant(value=1.0)(x, main.global_block()) + ... paddle.scale(x=x, scale=10.0) + + >>> exe = paddle.static.Executor(place) + >>> exe.run(main) """ def __init__(self, endpoint, inputs, fan_in=1, optimizer_mode=True): @@ -115,7 +119,9 @@ class ListenAndServ: return parent_block def complete_op(self): - from paddle.incubate.fleet.parameter_server.mode import DistributedMode + from paddle.incubate.distributed.fleet.parameter_server.mode import ( + DistributedMode, + ) main_program = self.helper.main_program current_block = main_program.current_block()