提交 6d58d6dc 编写于 作者: H Helin Wang

add comments

上级 a2ffbd53
......@@ -20,9 +20,13 @@ __all__ = [
class Inferencer(object):
def __init__(self, network_func, params, place=None):
self.network_func = network_func
# we need to generate a framework.Program by calling
# network_func reference: fluid.program_guard in test_word2vec.py
# move the default_main_program to self.program
# and run the default_startup program
self.params = params
self.place = place
def infer(self, inputs):
# run self.program
pass
......@@ -27,7 +27,15 @@ class Params(object):
self._load(path)
def _load(self, path):
# reference: load_persistables in io.py
pass
def save(self, path):
# reference: save_persistables in io.py
pass
def add_params(self, scope):
# take the keys from the scope,
# if not already exists in self.scope,
# add the key and value into self.scope.
pass
......@@ -34,10 +34,15 @@ class Event(Enum):
class Trainer(object):
def __init__(self, network_func, optimizer, params=None, place=None):
# we need to generate a framework.Program by calling
# network_func reference: fluid.program_guard in test_word2vec.py
# move the default_main_program to self.program
# and run the default_startup program on an empty
self.network_func = network_func
self.optimizer = optimizer
self.params = params
self.place = place
# TODO(helin): support distributed training
def train(self, reader, num_epochs, event_handler):
pass
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册