From 6d58d6dc4803d202c908e6e14693470394ac2b5c Mon Sep 17 00:00:00 2001 From: Helin Wang Date: Tue, 1 May 2018 15:07:39 -0700 Subject: [PATCH] add comments --- python/paddle/fluid/inferencer.py | 6 +++++- python/paddle/fluid/params.py | 8 ++++++++ python/paddle/fluid/trainer.py | 5 +++++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/python/paddle/fluid/inferencer.py b/python/paddle/fluid/inferencer.py index 276bc03109c..21277cb4938 100644 --- a/python/paddle/fluid/inferencer.py +++ b/python/paddle/fluid/inferencer.py @@ -20,9 +20,13 @@ __all__ = [ class Inferencer(object): def __init__(self, network_func, params, place=None): - self.network_func = network_func + # we need to generate a framework.Program by calling + # network_func reference: fluid.program_guard in test_word2vec.py + # move the default_main_program to self.program + # and run the default_startup program self.params = params self.place = place def infer(self, inputs): + # run self.program pass diff --git a/python/paddle/fluid/params.py b/python/paddle/fluid/params.py index fcdb8617a97..8d9d8f21340 100644 --- a/python/paddle/fluid/params.py +++ b/python/paddle/fluid/params.py @@ -27,7 +27,15 @@ class Params(object): self._load(path) def _load(self, path): + # reference: load_persistables in io.py pass def save(self, path): + # reference: save_persistables in io.py + pass + + def add_params(self, scope): + # take the keys from the scope, + # if not already exists in self.scope, + # add the key and value into self.scope. pass diff --git a/python/paddle/fluid/trainer.py b/python/paddle/fluid/trainer.py index a878ed9d780..7d4c2837c10 100644 --- a/python/paddle/fluid/trainer.py +++ b/python/paddle/fluid/trainer.py @@ -34,10 +34,15 @@ class Event(Enum): class Trainer(object): def __init__(self, network_func, optimizer, params=None, place=None): + # we need to generate a framework.Program by calling + # network_func reference: fluid.program_guard in test_word2vec.py + # move the default_main_program to self.program + # and run the default_startup program on an empty self.network_func = network_func self.optimizer = optimizer self.params = params self.place = place + # TODO(helin): support distributed training def train(self, reader, num_epochs, event_handler): pass -- GitLab