提交 296b7e3a 编写于 作者: J JiabinYang

save model per 1000k batches

上级 b102d256
......@@ -135,12 +135,12 @@ def infer_during_train(args):
Scope = fluid.Scope()
inference_prog()
while True:
time.sleep(1)
time.sleep(60)
current_list = os.listdir(args.model_output_dir)
logger.info("current_list is : {}".format(current_list))
logger.info("model_file_list is : {}".format(model_file_list))
# logger.info("current_list is : {}".format(current_list))
# logger.info("model_file_list is : {}".format(model_file_list))
if set(model_file_list) == set(current_list):
logger.info("they are the same")
logger.info("No New models created")
pass
else:
increment_models = list()
......
......@@ -3,6 +3,12 @@
import numpy as np
import preprocess
import logging
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger("fluid")
logger.setLevel(logging.INFO)
class Word2VecReader(object):
def __init__(self, dict_path, data_path, filelist, window_size=5):
......@@ -73,6 +79,8 @@ class Word2VecReader(object):
def _reader():
for file in self.filelist:
with open(self.data_path_ + "/" + file, 'r') as f:
logger.info("running data in {}".format(self.data_path_ +
"/" + file))
for line in f:
line = preprocess.text_strip(line)
word_ids = [
......@@ -88,6 +96,8 @@ class Word2VecReader(object):
def _reader_hs():
for file in self.filelist:
with open(self.data_path_ + "/" + file, 'r') as f:
logger.info("running data in {}".format(self.data_path_ +
"/" + file))
for line in f:
line = preprocess.text_strip(line)
word_ids = [
......
......@@ -196,18 +196,19 @@ def train_loop(args, train_program, reader, py_reader, loss, trainer_id):
os.getenv("CPU_NUM"))
logger.info("Time used: {}, Samples/Sec: {}".format(
elapsed, samples / elapsed))
if batch_id == 200 or batch_id == 100:
model_dir = args.model_output_dir + '/batch-' + str(
batch_id)
fluid.io.save_persistables(executor=exe, dirname=model_dir)
with open(model_dir + "/_success", 'w+') as f:
f.write(str(batch_id))
# calculate infer result each 100 batches
# calculate infer result each 100 batches when using --with_infer_test
if args.with_infer_test:
if batch_id % 1000 == 0 and batch_id != 0:
model_dir = args.model_output_dir + '/batch-' + str(
batch_id)
inference_test(global_scope(), model_dir, args)
if batch_id % 100000 == 0 and batch_id != 0:
model_dir = args.model_output_dir + '/batch-' + str(
batch_id)
fluid.io.save_persistables(executor=exe, dirname=model_dir)
with open(model_dir + "/_success", 'w+') as f:
f.write(str(batch_id))
batch_id += 1
except fluid.core.EOFException:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册