diff --git a/doc/api/v2/run_logic.rst b/doc/api/v2/run_logic.rst index 94921e1a7b9c0a95931136bfb65d2560dba8b8ee..c383e87c8c1f83e36acc06605a24923d265e198b 100644 --- a/doc/api/v2/run_logic.rst +++ b/doc/api/v2/run_logic.rst @@ -6,22 +6,26 @@ Parameters ========== .. automodule:: paddle.v2.parameters + :members: Parameters :noindex: Trainer ======= .. automodule:: paddle.v2.trainer + :members: Trainer :noindex: Event ===== .. automodule:: paddle.v2.event + :members: Event :noindex: Inference ========= .. autofunction:: paddle.v2.infer + :members: Inference :noindex: diff --git a/python/paddle/v2/inference.py b/python/paddle/v2/inference.py index ec3c67d89548f68d705a9b5de80e28597e9829da..8d027b9e5bbdbd4297d51e79faae483d8f47e6e4 100644 --- a/python/paddle/v2/inference.py +++ b/python/paddle/v2/inference.py @@ -9,6 +9,16 @@ __all__ = ['infer'] class Inference(object): + """ + Inference combines neural network output and parameters together + to do inference. + + :param outptut_layer: The neural network that should be inferenced. + :type output_layer: paddle.v2.config_base.Layer or the sequence + of paddle.v2.config_base.Layer + :param parameters: The parameters dictionary. + :type parameters: paddle.v2.parameters.Parameters + """ def __init__(self, output_layer, parameters): topo = topology.Topology(output_layer) gm = api.GradientMachine.createFromConfigProto( @@ -39,7 +49,7 @@ class Inference(object): def iter_infer_field(self, field, **kwargs): for result in self.iter_infer(**kwargs): yield [each_result[field] for each_result in result] - + def infer(self, field='value', **kwargs): retv = None for result in self.iter_infer_field(field=field, **kwargs): diff --git a/python/paddle/v2/trainer.py b/python/paddle/v2/trainer.py index 7bd3e2c565ee00c91402e7dea36c7393fb1a9bdf..1a7b6790ac448e0b6054a94bb2cd46c91649086f 100644 --- a/python/paddle/v2/trainer.py +++ b/python/paddle/v2/trainer.py @@ -29,7 +29,8 @@ def default_event_handler(event): class SGD(object): """ Simple SGD Trainer. - TODO(yuyang18): Complete comments + SGD Trainer combines data reader, network topolopy and update_equation together + to train/test a neural network. :param update_equation: The optimizer object. :type update_equation: paddle.v2.optimizer.Optimizer @@ -65,7 +66,9 @@ class SGD(object): """ Training method. Will train num_passes of input data. - :param reader: + :param reader: A reader that reads and yeilds data items. Usually we use a + batched reader to do mini-batch training. + :type reader: collections.Iterable :param num_passes: The total train passes. :param event_handler: Event handler. A method will be invoked when event occurred. @@ -123,6 +126,16 @@ class SGD(object): self.__gradient_machine__.finish() def test(self, reader, feeding=None): + """ + Testing method. Will test input data. + + :param reader: A reader that reads and yeilds data items. + :type reader: collections.Iterable + :param feeding: Feeding is a map of neural network input name and array + index that reader returns. + :type feeding: dict + :return: + """ feeder = DataFeeder(self.__data_types__, feeding) evaluator = self.__gradient_machine__.makeEvaluator() out_args = api.Arguments.createArguments(0)