From 5ff45357813c306c2f5e26c001d838fc780400f3 Mon Sep 17 00:00:00 2001 From: Huihuang Zheng Date: Fri, 15 May 2020 20:40:40 +0800 Subject: [PATCH] [Dy2Stat] Fix ProgramTranslator.save_inference_model API Doc (#24584) As the title. --- .../dygraph_to_static/program_translator.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/program_translator.py b/python/paddle/fluid/dygraph/dygraph_to_static/program_translator.py index db7d59096f9..4b57301388e 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/program_translator.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/program_translator.py @@ -559,14 +559,14 @@ class ProgramTranslator(object): Args: dirname (str): the directory to save the inference model. - feed (list[int], optional): the input variable indices of the saved - inference model. If None, all input variables of the - ProgramTranslator would be the inputs of the saved inference - model. Default None. - fetch (list[int], optional): the output variable indices of the - saved inference model. If None, all output variables of the - TracedLayer object would be the outputs of the saved inference - model. Default None. + feed (list[int], optional): the indices of the input variables of the + dygraph functions which will be saved as input variables in + inference model. If None, all input variables of the dygraph function + would be the inputs of the saved inference model. Default None. + fetch (list[int], optional): the indices of the returned variable of the + dygraph functions which will be saved as output variables in + inference model. If None, all output variables of the dygraph function + would be the outputs of the saved inference model. Default None. Returns: None Examples: @@ -599,12 +599,12 @@ class ProgramTranslator(object): adam.minimize(loss) net.clear_gradients() # Save inference model. - # Note that fetch=[0] means we set 'y' as the inference output. + # Note that fetch=[0] means we set 'z' as the inference output. prog_trans = ProgramTranslator() prog_trans.save_inference_model("./dy2stat_infer_model", fetch=[0]) - # In this example, the inference model will be pruned based on input (x) and - # output (y). The pruned inference program is going to be saved in the folder + # In this example, the inference model will be pruned based on output (z). + # The pruned inference program is going to be saved in the folder # "./dy2stat_infer_model" and parameters are going to be saved in separate # files in the folder. """ -- GitLab