提交 833c706e 编写于 作者: S Shanqing Cai 提交者: TensorFlower Gardener

tfdbg CLI: A few improvements and refactoring

1. Refactor the code in local_cli_wrapper.py for generating introductory messages about the run() call and the TF runtime error to functions in a new python file: cli/cli_shared.py
2. Make the introductory message-generating functions capable of handling nested lists, tuples, dicts and namedtuples as the fetches of run() calls.
3. Use bold font attribute to highlight suggested commands in the introductory messages.
4. Minor tweaks to the title bar color.
5. Minor doc fixes in examples/README.md.
6. Add new font attribute "blink" to curses_ui.py and let the error title message use that attribute.
Change: 139866031
上级 7dcb0597
......@@ -80,6 +80,17 @@ py_library(
deps = [":debugger_cli_common"],
)
py_library(
name = "cli_shared",
srcs = ["cli/cli_shared.py"],
srcs_version = "PY2AND3",
deps = [
":debugger_cli_common",
"//tensorflow/python:framework",
"//tensorflow/python:variables",
],
)
py_library(
name = "analyzer_cli",
srcs = ["cli/analyzer_cli.py"],
......@@ -109,6 +120,7 @@ py_library(
srcs_version = "PY2AND3",
deps = [
":analyzer_cli",
":cli_shared",
":curses_ui",
":debug_data",
":debugger_cli_common",
......@@ -298,6 +310,19 @@ py_test(
],
)
py_test(
name = "cli_shared_test",
size = "small",
srcs = [
"cli/cli_shared_test.py",
],
srcs_version = "PY2AND3",
deps = [
":cli_shared",
"//tensorflow/python:framework_test_lib",
],
)
cuda_py_test(
name = "analyzer_cli_test",
size = "small",
......
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Shared functions and classes for tfdbg command-line interface."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.framework import ops
from tensorflow.python.ops import variables
def _get_fetch_names(fetches):
"""Get a flattened list of the names in run() call fetches.
Args:
fetches: Fetches of the `Session.run()` call. It maybe a Tensor, an
Operation or a Variable. It may also be nested lists, tuples or
dicts. See doc of `Session.run()` for more details.
Returns:
(list of str) A flattened list of fetch names from `fetches`.
"""
lines = []
if isinstance(fetches, (list, tuple)):
for fetch in fetches:
lines.extend(_get_fetch_names(fetch))
elif isinstance(fetches, dict):
for key in fetches:
lines.extend(_get_fetch_names(fetches[key]))
else:
# This ought to be a Tensor, an Operation or a Variable, for which the name
# attribute should be available. (Bottom-out condition of the recursion.)
lines.append(fetches.name)
return lines
def _recommend_command(command, description, indent=2):
"""Generate a RichTextLines object that describes a recommended command.
Args:
command: (str) The command to recommend.
description: (str) A description of what the the command does.
indent: (int) How many spaces to indent in the beginning.
Returns:
(RichTextLines) Formatted text (with font attributes) for recommending the
command.
"""
indent_str = " " * indent
lines = [indent_str + command + ":", indent_str + " " + description]
font_attr_segs = {0: [(indent, indent + len(command), "bold")]}
return debugger_cli_common.RichTextLines(lines, font_attr_segs=font_attr_segs)
def get_run_start_intro(run_call_count, fetches, feed_dict, tensor_filters):
"""Generate formatted intro for run-start UI.
Args:
run_call_count: (int) Run call counter.
fetches: Fetches of the `Session.run()` call. See doc of `Session.run()`
for more details.
feed_dict: Feeds to the `Session.run()` call. See doc of `Session.run()`
for more details.
tensor_filters: (dict) A dict from tensor-filter name to tensor-filter
callable.
Returns:
(RichTextLines) Formatted intro message about the `Session.run()` call.
"""
fetch_lines = _get_fetch_names(fetches)
if not feed_dict:
feed_dict_lines = ["(Empty)"]
else:
feed_dict_lines = []
for feed_key in feed_dict:
if isinstance(feed_key, six.string_types):
feed_dict_lines.append(feed_key)
else:
feed_dict_lines.append(feed_key.name)
intro_lines = [
"======================================",
"About to enter Session run() call #%d:" % run_call_count, "",
"Fetch(es):"
]
intro_lines.extend([" " + line for line in fetch_lines])
intro_lines.extend(["", "Feed dict(s):"])
intro_lines.extend([" " + line for line in feed_dict_lines])
intro_lines.extend([
"======================================", "",
"Select one of the following commands to proceed ---->"
])
out = debugger_cli_common.RichTextLines(intro_lines)
out.extend(
_recommend_command("run",
"Execute the run() call with debug tensor-watching"))
out.extend(
_recommend_command(
"run -n", "Execute the run() call without debug tensor-watching"))
out.extend(
_recommend_command(
"run -f <filter_name>",
"Keep executing run() calls until a dumped tensor passes a given, "
"registered filter (conditional breakpoint mode)."))
more_font_attr_segs = {}
more_lines = [" Registered filter(s):"]
if tensor_filters:
filter_names = []
for filter_name in tensor_filters:
filter_names.append(filter_name)
more_lines.append(" * " + filter_name)
more_font_attr_segs[len(more_lines) - 1] = [(10, len(more_lines[-1]),
"green")]
else:
more_lines.append(" (None)")
more_lines.extend([
"",
"For more details, see help below:"
"",
])
out.extend(
debugger_cli_common.RichTextLines(
more_lines, font_attr_segs=more_font_attr_segs))
return out
def get_run_short_description(run_call_count, fetches, feed_dict):
"""Get a short description of the run() call.
Args:
run_call_count: (int) Run call counter.
fetches: Fetches of the `Session.run()` call. See doc of `Session.run()`
for more details.
feed_dict: Feeds to the `Session.run()` call. See doc of `Session.run()`
for more details.
Returns:
(str) A short description of the run() call, including information about
the fetche(s) and feed(s).
"""
description = "run #%d: " % run_call_count
if isinstance(fetches, (ops.Tensor, ops.Operation, variables.Variable)):
description += "1 fetch (%s); " % fetches.name
else:
# Could be (nested) list, tuple, dict or namedtuple.
num_fetches = len(_get_fetch_names(fetches))
if num_fetches > 1:
description += "%d fetches; " % num_fetches
else:
description += "%d fetch; " % num_fetches
if not feed_dict:
description += "0 feeds"
else:
if len(feed_dict) == 1:
for key in feed_dict:
description += "1 feed (%s)" % key.name
else:
description += "%d feeds" % len(feed_dict)
return description
def get_error_intro(tf_error):
"""Generate formatted intro for TensorFlow run-time error.
Args:
tf_error: (errors.OpError) TensorFlow run-time error object.
Returns:
(RichTextLines) Formatted intro message about the run-time OpError, with
sample commands for debugging.
"""
op_name = tf_error.op.name
intro_lines = [
"--------------------------------------",
"!!! An error occurred during the run !!!",
"",
"You may use the following commands to debug:",
]
intro_font_attr_segs = {1: [(0, len(intro_lines[1]), "blink")]}
out = debugger_cli_common.RichTextLines(
intro_lines, font_attr_segs=intro_font_attr_segs)
out.extend(
_recommend_command("ni %s" % op_name,
"Inspect information about the failing op."))
out.extend(
_recommend_command("li -r %s" % op_name,
"List inputs to the failing op, recursively."))
out.extend(
_recommend_command(
"lt", "List all tensors dumped during the failing run() call."))
more_lines = [
"",
"Op name: " + op_name,
"Error type: " + str(type(tf_error)),
"",
"Details:",
str(tf_error),
"",
"WARNING: Using client GraphDef due to the error, instead of "
"executor GraphDefs.",
"--------------------------------------",
"",
]
out.extend(debugger_cli_common.RichTextLines(more_lines))
return out
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for the shared functions and classes for tfdbg CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class GetRunStartIntroAndDescriptionTest(test_util.TensorFlowTestCase):
def setUp(self):
self.const_a = constant_op.constant(11.0, name="a")
self.const_b = constant_op.constant(22.0, name="b")
self.const_c = constant_op.constant(33.0, name="c")
def tearDown(self):
ops.reset_default_graph()
def testSingleFetchNoFeeds(self):
run_start_intro = cli_shared.get_run_start_intro(12, self.const_a, None, {})
# Verify line about run() call number.
self.assertEqual("About to enter Session run() call #12:",
run_start_intro.lines[1])
# Verify line about fetch.
const_a_name_line = run_start_intro.lines[4]
self.assertEqual(self.const_a.name, const_a_name_line.strip())
# Verify line about feeds.
feeds_line = run_start_intro.lines[7]
self.assertEqual("(Empty)", feeds_line.strip())
# Verify lines about possible commands and their font attributes.
self.assertEqual("run:", run_start_intro.lines[11][2:])
self.assertEqual([(2, 5, "bold")], run_start_intro.font_attr_segs[11])
self.assertEqual("run -n:", run_start_intro.lines[13][2:])
self.assertEqual([(2, 8, "bold")], run_start_intro.font_attr_segs[13])
self.assertEqual("run -f <filter_name>:", run_start_intro.lines[15][2:])
self.assertEqual([(2, 22, "bold")], run_start_intro.font_attr_segs[15])
# Verify short description.
description = cli_shared.get_run_short_description(12, self.const_a, None)
self.assertEqual("run #12: 1 fetch (a:0); 0 feeds", description)
def testTwoFetchesListNoFeeds(self):
fetches = [self.const_a, self.const_b]
run_start_intro = cli_shared.get_run_start_intro(1, fetches, None, {})
const_a_name_line = run_start_intro.lines[4]
const_b_name_line = run_start_intro.lines[5]
self.assertEqual(self.const_a.name, const_a_name_line.strip())
self.assertEqual(self.const_b.name, const_b_name_line.strip())
feeds_line = run_start_intro.lines[8]
self.assertEqual("(Empty)", feeds_line.strip())
# Verify short description.
description = cli_shared.get_run_short_description(1, fetches, None)
self.assertEqual("run #1: 2 fetches; 0 feeds", description)
def testNestedListAsFetches(self):
fetches = [self.const_c, [self.const_a, self.const_b]]
run_start_intro = cli_shared.get_run_start_intro(1, fetches, None, {})
# Verify lines about the fetches.
self.assertEqual(self.const_c.name, run_start_intro.lines[4].strip())
self.assertEqual(self.const_a.name, run_start_intro.lines[5].strip())
self.assertEqual(self.const_b.name, run_start_intro.lines[6].strip())
# Verify short description.
description = cli_shared.get_run_short_description(1, fetches, None)
self.assertEqual("run #1: 3 fetches; 0 feeds", description)
def testNestedDictAsFetches(self):
fetches = {"c": self.const_c, "ab": {"a": self.const_a, "b": self.const_b}}
run_start_intro = cli_shared.get_run_start_intro(1, fetches, None, {})
# Verify lines about the fetches. The ordering of the dict keys is
# indeterminate.
fetch_names = set()
fetch_names.add(run_start_intro.lines[4].strip())
fetch_names.add(run_start_intro.lines[5].strip())
fetch_names.add(run_start_intro.lines[6].strip())
self.assertEqual({"a:0", "b:0", "c:0"}, fetch_names)
# Verify short description.
description = cli_shared.get_run_short_description(1, fetches, None)
self.assertEqual("run #1: 3 fetches; 0 feeds", description)
def testTwoFetchesAsTupleNoFeeds(self):
fetches = (self.const_a, self.const_b)
run_start_intro = cli_shared.get_run_start_intro(1, fetches, None, {})
const_a_name_line = run_start_intro.lines[4]
const_b_name_line = run_start_intro.lines[5]
self.assertEqual(self.const_a.name, const_a_name_line.strip())
self.assertEqual(self.const_b.name, const_b_name_line.strip())
feeds_line = run_start_intro.lines[8]
self.assertEqual("(Empty)", feeds_line.strip())
# Verify short description.
description = cli_shared.get_run_short_description(1, fetches, None)
self.assertEqual("run #1: 2 fetches; 0 feeds", description)
def testTwoFetchesAsNamedTupleNoFeeds(self):
fetches_namedtuple = namedtuple("fetches", "x y")
fetches = fetches_namedtuple(self.const_b, self.const_c)
run_start_intro = cli_shared.get_run_start_intro(1, fetches, None, {})
const_b_name_line = run_start_intro.lines[4]
const_c_name_line = run_start_intro.lines[5]
self.assertEqual(self.const_b.name, const_b_name_line.strip())
self.assertEqual(self.const_c.name, const_c_name_line.strip())
feeds_line = run_start_intro.lines[8]
self.assertEqual("(Empty)", feeds_line.strip())
# Verify short description.
description = cli_shared.get_run_short_description(1, fetches, None)
self.assertEqual("run #1: 2 fetches; 0 feeds", description)
def testWithFeedDict(self):
feed_dict = {
self.const_a: 10.0,
self.const_b: 20.0,
}
run_start_intro = cli_shared.get_run_start_intro(1, self.const_c, feed_dict,
{})
const_c_name_line = run_start_intro.lines[4]
self.assertEqual(self.const_c.name, const_c_name_line.strip())
# Verify lines about the feed dict.
feed_a_line = run_start_intro.lines[7]
feed_b_line = run_start_intro.lines[8]
self.assertEqual(self.const_a.name, feed_a_line.strip())
self.assertEqual(self.const_b.name, feed_b_line.strip())
# Verify short description.
description = cli_shared.get_run_short_description(1, self.const_c,
feed_dict)
self.assertEqual("run #1: 1 fetch (c:0); 2 feeds", description)
def testTensorFilters(self):
feed_dict = {self.const_a: 10.0}
tensor_filters = {
"filter_a": lambda x: True,
"filter_b": lambda x: False,
}
run_start_intro = cli_shared.get_run_start_intro(1, self.const_c, feed_dict,
tensor_filters)
# Verify the listed names of the tensor filters.
filter_names = set()
filter_names.add(run_start_intro.lines[18].split(" ")[-1])
filter_names.add(run_start_intro.lines[19].split(" ")[-1])
self.assertEqual({"filter_a", "filter_b"}, filter_names)
# Verify short description.
description = cli_shared.get_run_short_description(1, self.const_c,
feed_dict)
self.assertEqual("run #1: 1 fetch (c:0); 1 feed (a:0)", description)
class GetErrorIntroTest(test_util.TensorFlowTestCase):
def setUp(self):
self.var_a = variables.Variable(42.0, name="a")
def tearDown(self):
ops.reset_default_graph()
def testShapeError(self):
tf_error = errors.OpError(None, self.var_a.initializer, "foo description",
None)
error_intro = cli_shared.get_error_intro(tf_error)
self.assertEqual("!!! An error occurred during the run !!!",
error_intro.lines[1])
self.assertEqual([(0, len(error_intro.lines[1]), "blink")],
error_intro.font_attr_segs[1])
self.assertEqual(2, error_intro.lines[4].index("ni a/Assign"))
self.assertEqual([(2, 13, "bold")], error_intro.font_attr_segs[4])
self.assertEqual(2, error_intro.lines[6].index("li -r a/Assign"))
self.assertEqual([(2, 16, "bold")], error_intro.font_attr_segs[6])
self.assertEqual(2, error_intro.lines[8].index("lt"))
self.assertEqual([(2, 4, "bold")], error_intro.font_attr_segs[8])
self.assertTrue(error_intro.lines[11].startswith("Op name:"))
self.assertTrue(error_intro.lines[11].endswith("a/Assign"))
self.assertTrue(error_intro.lines[12].startswith("Error type:"))
self.assertTrue(error_intro.lines[12].endswith(str(type(tf_error))))
self.assertEqual("Details:", error_intro.lines[14])
self.assertTrue(error_intro.lines[15].startswith("foo description"))
if __name__ == "__main__":
googletest.main()
......@@ -208,8 +208,10 @@ class CursesUI(object):
self._color_pairs[color_name] = curses.color_pair(color_index)
# A_BOLD is not really a "color". But place it here for convenience.
# A_BOLD or A_BLINK is not really a "color". But place it here for
# convenience.
self._color_pairs["bold"] = curses.A_BOLD
self._color_pairs["blink"] = curses.A_BLINK
# Default color pair to use when a specified color pair does not exist.
self._default_color_pair = self._color_pairs["white"]
......@@ -310,7 +312,7 @@ class CursesUI(object):
"""Set an introductory message to the help output of the command registry.
Args:
help_intro: (list of str) Text lines appended to the beginning of the
help_intro: (RichTextLines) Rich text lines appended to the beginning of
the output of the command "help", as introductory information.
"""
......
......@@ -419,7 +419,8 @@ class CursesTest(test_util.TensorFlowTestCase):
80,
command_sequence=[string_to_codes("help\n"), self._EXIT])
help_intro = ["This is a curses UI.", "All it can do is 'babble'.", ""]
help_intro = debugger_cli_common.RichTextLines(
["This is a curses UI.", "All it can do is 'babble'.", ""])
ui.register_command_handler(
"babble", self._babble, "babble some", prefix_aliases=["b"])
ui.set_help_intro(help_intro)
......@@ -427,7 +428,7 @@ class CursesTest(test_util.TensorFlowTestCase):
self.assertEqual(1, len(ui.unwrapped_outputs))
self.assertEqual(
help_intro + ["babble", " Aliases: b", "", " babble some"],
help_intro.lines + ["babble", " Aliases: b", "", " babble some"],
ui.unwrapped_outputs[0].lines[:7])
def testCommandHistoryNavBackwardOnce(self):
......
......@@ -545,18 +545,19 @@ class CommandHandlerRegistry(object):
"""
if not cmd_prefix:
# Print full help information, in sorted order of the command prefixes.
lines = []
help_info = RichTextLines([])
if self._help_intro:
# If help intro is available, show it at the beginning.
lines.extend(self._help_intro)
help_info.extend(self._help_intro)
sorted_prefixes = sorted(self._handlers)
for cmd_prefix in sorted_prefixes:
lines.extend(self._get_help_for_command_prefix(cmd_prefix))
lines = self._get_help_for_command_prefix(cmd_prefix)
lines.append("")
lines.append("")
help_info.extend(RichTextLines(lines))
return RichTextLines(lines)
return help_info
else:
return RichTextLines(self._get_help_for_command_prefix(cmd_prefix))
......@@ -564,7 +565,7 @@ class CommandHandlerRegistry(object):
"""Set an introductory message to help output.
Args:
help_intro: (list of str) Text lines appended to the beginning of the
help_intro: (RichTextLines) Rich text lines appended to the
beginning of the output of the command "help", as introductory
information.
"""
......
......@@ -480,15 +480,16 @@ class CommandHandlerRegistryTest(test_util.TensorFlowTestCase):
"No operation.\nI.e., do nothing.",
prefix_aliases=["n", "NOOP"])
help_intro = ["Introductory comments.", ""]
help_intro = debugger_cli_common.RichTextLines(
["Introductory comments.", ""])
registry.set_help_intro(help_intro)
output = registry.dispatch_command("help", [])
self.assertEqual(
help_intro + ["help", " Aliases: h", "", " Print this help message.",
"", "", "noop", " Aliases: n, NOOP", "",
" No operation.", " I.e., do nothing.", "", ""],
output.lines)
self.assertEqual(help_intro.lines + [
"help", " Aliases: h", "", " Print this help message.", "", "",
"noop", " Aliases: n, NOOP", "", " No operation.",
" I.e., do nothing.", "", ""
], output.lines)
class RegexFindTest(test_util.TensorFlowTestCase):
......
......@@ -382,7 +382,7 @@ run:
```none
bazel build -c opt tensorflow/python/debug:debug_tflearn_iris && \
bazel-bin/tensorflow/python/debug/debug_tflearn_iris
bazel-bin/tensorflow/python/debug/debug_tflearn_iris --debug
```
**Q**: _Does tfdbg help debugging runtime errors such as shape mismatches?_
......
......@@ -23,16 +23,13 @@ import shutil
import sys
import tempfile
import six
# Google-internal import(s).
from tensorflow.python.debug import debug_data
from tensorflow.python.debug.cli import analyzer_cli
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import curses_ui
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.wrappers import framework
from tensorflow.python.framework import ops
from tensorflow.python.ops import variables
_DUMP_ROOT_PREFIX = "tfdbg_"
......@@ -173,61 +170,19 @@ class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
self._on_run_start_parsers["invoke_stepper"].format_help(),
prefix_aliases=["s"])
if isinstance(request.fetches, list) or isinstance(request.fetches, tuple):
fetch_lines = [fetch.name for fetch in request.fetches]
else:
fetch_lines = [repr(request.fetches)]
if not request.feed_dict:
feed_dict_lines = ["(Empty)"]
else:
feed_dict_lines = []
for feed_key in request.feed_dict:
if isinstance(feed_key, six.string_types):
feed_dict_lines.append(feed_key)
else:
feed_dict_lines.append(feed_key.name)
# TODO(cais): Refactor into its own function.
help_intro = [
"======================================",
"About to enter Session run() call #%d:" % request.run_call_count, "",
"Fetch(es):"
]
help_intro.extend([" " + line for line in fetch_lines])
help_intro.extend(["", "Feed dict(s):"])
help_intro.extend([" " + line for line in feed_dict_lines])
help_intro.extend([
"======================================", "",
"Select one of the following commands to proceed ---->", " run:",
" Execute the run() call with the debug tensor-watching",
" run -n:",
" Execute the run() call without the debug tensor-watching",
" run -f <filter_name>:",
" Keep executing run() calls until a dumped tensor passes ",
" a given, registered filter emerge. Registered filter(s):"
])
if self._tensor_filters:
filter_names = []
for filter_name in self._tensor_filters:
filter_names.append(filter_name)
help_intro.append(" * " + filter_name)
# Register tab completion for the filter names.
run_start_cli.register_tab_comp_context(["run", "r"], filter_names)
else:
help_intro.append(" (None)")
run_start_cli.register_tab_comp_context(["run", "r"],
list(self._tensor_filters.keys()))
help_intro.extend(["",
"For more details, see help below:"
"",])
run_start_cli.set_help_intro(help_intro)
run_start_cli.set_help_intro(
cli_shared.get_run_start_intro(request.run_call_count, request.fetches,
request.feed_dict, self._tensor_filters))
# Create initial screen output detailing the run.
title = "run-start: " + self._run_description
response = run_start_cli.run_ui(
init_command="help", title=title, title_color="yellow")
init_command="help", title=title, title_color="blue_on_white")
if response == debugger_cli_common.EXPLICIT_USER_EXIT:
# Explicit user "exit" command leads to sys.exit(1).
print(
......@@ -262,38 +217,15 @@ class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
self._dump_root, partition_graphs=partition_graphs)
if request.tf_error:
op_name = request.tf_error.op.name
# Prepare help introduction for the TensorFlow error that occurred
# during the run.
help_intro = [
"--------------------------------------",
"!!! An error occurred during the run !!!",
"",
" * Use command \"ni %s\" to see the information about the "
"failing op." % op_name,
" * Use command \"li -r %s\" to see the inputs to the "
"failing op." % op_name,
" * Use command \"lt\" to view the dumped tensors.",
"",
"Op name: " + op_name,
"Error type: " + str(type(request.tf_error)),
"",
"Details:",
str(request.tf_error),
"",
"WARNING: Using client GraphDef due to the error, instead of "
"executor GraphDefs.",
"--------------------------------------",
"",
]
help_intro = cli_shared.get_error_intro(request.tf_error)
init_command = "help"
title_color = "red"
title_color = "red_on_white"
else:
help_intro = None
init_command = "lt"
title_color = "green"
title_color = "black_on_white"
if self._run_till_filter_pass:
if not debug_dump.find(
self._tensor_filters[self._run_till_filter_pass], first_n=1):
......@@ -304,7 +236,7 @@ class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
else:
# Some dumped tensor(s) from this run passed the filter.
init_command = "lt -f %s" % self._run_till_filter_pass
title_color = "red"
title_color = "red_on_white"
self._run_till_filter_pass = None
analyzer = analyzer_cli.DebugAnalyzer(debug_dump)
......@@ -369,7 +301,8 @@ class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
# completion contexts and registered command handlers.
title = "run-end: " + self._run_description
run_end_cli.set_help_intro(help_intro)
if help_intro:
run_end_cli.set_help_intro(help_intro)
run_end_cli.run_ui(
init_command=init_command, title=title, title_color=title_color)
......@@ -461,18 +394,6 @@ class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
"""
self._run_call_count = run_call_count
self._run_description = "run #%d: " % self._run_call_count
if isinstance(fetches, (ops.Tensor, ops.Operation, variables.Variable)):
self._run_description += "fetch: %s; " % fetches.name
else:
# Could be list, tuple, dict or namedtuple.
self._run_description += "%d fetch(es); " % len(fetches)
if not feed_dict:
self._run_description += "0 feeds"
else:
if len(feed_dict) == 1:
self._run_description += "1 feed"
else:
self._run_description += "%d feeds" % len(feed_dict)
self._run_description = cli_shared.get_run_short_description(run_call_count,
fetches,
feed_dict)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册