提交 0eae537d 编写于 作者: L liangyongxiong

fix bugs of cache mechanism

上级 baa53b2c
......@@ -55,7 +55,7 @@ class DataLoader:
Get events data from log file.
Returns:
Optional[EventsData], None or events data.
EventsData, indiciates events data.
"""
return self._loader.get_events_data()
......
......@@ -618,7 +618,7 @@ class _DetailCacheManager(_BaseCacheManager):
tag (str): The tag name.
Returns:
NamedTuple, the tuple format is `collections.namedtuple('_Tensor', ['wall_time', 'event_step', 'value'])`.
list, the NameTuple format is `collections.namedtuple('_Tensor', ['wall_time', 'event_step', 'value'])`.
the value will contain the given tag data.
"""
......@@ -627,13 +627,17 @@ class _DetailCacheManager(_BaseCacheManager):
raise TrainJobNotExistError("Can not find the given train job in cache.")
data_loader = loader_pool[train_id].data_loader
events_data = data_loader.get_events_data()
tensors = []
try:
events_data = data_loader.get_events_data()
tensors = events_data.tensors(tag)
except KeyError:
error_msg = "Can not find any data in this train job by given tag."
raise ParamValueError(error_msg)
except AttributeError:
logger.debug("Train job %r has been deleted or it has not loaded data, "
"and set tags to empty list.", train_id)
return tensors
......
......@@ -116,7 +116,17 @@ class TrainTaskManager(BaseProcessor):
profiler_dir=basic_info.profiler_dir,
cache_status=train_job.cache_status.value,
)
plugins = self.get_plugins(train_id)
if train_job.cache_status == CacheStatus.CACHED:
plugins = self.get_plugins(train_id)
else:
plugins = dict(plugins={
'graph': [],
'scalar': [],
'image': [],
'histogram': [],
})
train_job_item.update(plugins)
train_jobs.append(train_job_item)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册