提交 8644bc9b 编写于 作者: D Dan Mané 提交者: TensorFlower Gardener

Switch implementation of tf.scalar.summary back to the ScalarSummary kernel.

I originally intended to replace all the summary ops with implementations that use the TensorSummary kernel. However, I realized this would produce a lot of work without any user-visible benefit, so I decided not to do so.
Although I had already created a TensorSummary wrapper for tf.summary.scalar, now that replacing all summary implementations is a non-goal, it is better to keep using ScalarSummary for consistency and backcompat.

The behavior wrt. TensorBoard and the EventAccumulator is unchanged.
Change: 136742625
上级 9e601230
...@@ -61,7 +61,7 @@ class SummaryOpsTest(tf.test.TestCase): ...@@ -61,7 +61,7 @@ class SummaryOpsTest(tf.test.TestCase):
def testScalarSummary(self): def testScalarSummary(self):
with self.test_session() as sess: with self.test_session() as sess:
const = tf.constant(10.0) const = tf.constant(10.0)
summ = tf.summary.scalar("foo", const) summ = tf.summary.tensor_summary("foo", const)
result = sess.run(summ) result = sess.run(summ)
value = self._SummarySingleValue(result) value = self._SummarySingleValue(result)
......
...@@ -39,11 +39,11 @@ from google.protobuf import json_format as _json_format ...@@ -39,11 +39,11 @@ from google.protobuf import json_format as _json_format
from tensorflow.core.framework import summary_pb2 as _summary_pb2 from tensorflow.core.framework import summary_pb2 as _summary_pb2
from tensorflow.python.framework import dtypes as _dtypes from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import ops as _ops from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.python.framework.dtypes import as_dtype as _as_dtype
from tensorflow.python.ops import gen_logging_ops as _gen_logging_ops from tensorflow.python.ops import gen_logging_ops as _gen_logging_ops
# exports tensor_summary # exports tensor_summary
# pylint: disable=unused-import
from tensorflow.python.ops.summary_ops import tensor_summary from tensorflow.python.ops.summary_ops import tensor_summary
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented from tensorflow.python.util.all_util import remove_undocumented
from tensorflow.python.util import compat as _compat from tensorflow.python.util import compat as _compat
...@@ -55,7 +55,7 @@ def _collect(val, collections, default_collections): ...@@ -55,7 +55,7 @@ def _collect(val, collections, default_collections):
_ops.add_to_collection(key, val) _ops.add_to_collection(key, val)
def scalar(name, tensor, summary_description=None, collections=None): def scalar(name, tensor, collections=None):
"""Outputs a `Summary` protocol buffer containing a single scalar value. """Outputs a `Summary` protocol buffer containing a single scalar value.
The generated Summary has a Tensor.proto containing the input Tensor. The generated Summary has a Tensor.proto containing the input Tensor.
...@@ -63,8 +63,7 @@ def scalar(name, tensor, summary_description=None, collections=None): ...@@ -63,8 +63,7 @@ def scalar(name, tensor, summary_description=None, collections=None):
Args: Args:
name: A name for the generated node. Will also serve as the series name in name: A name for the generated node. Will also serve as the series name in
TensorBoard. TensorBoard.
tensor: A tensor containing a single floating point or integer value. tensor: A real numeric Tensor containing a single value.
summary_description: Optional summary_description_pb2.SummaryDescription
collections: Optional list of graph collections keys. The new summary op is collections: Optional list of graph collections keys. The new summary op is
added to these collections. Defaults to `[GraphKeys.SUMMARIES]`. added to these collections. Defaults to `[GraphKeys.SUMMARIES]`.
...@@ -74,19 +73,12 @@ def scalar(name, tensor, summary_description=None, collections=None): ...@@ -74,19 +73,12 @@ def scalar(name, tensor, summary_description=None, collections=None):
Raises: Raises:
ValueError: If tensor has the wrong shape or type. ValueError: If tensor has the wrong shape or type.
""" """
dtype = _as_dtype(tensor.dtype) with _ops.name_scope(name, None, [tensor]) as scope:
if dtype.is_quantized or not (dtype.is_integer or dtype.is_floating): # pylint: disable=protected-access
raise ValueError("Can't create scalar summary for type %s." % dtype) val = _gen_logging_ops._scalar_summary(
tags=scope.rstrip('/'), values=tensor, name=scope)
shape = tensor.get_shape() _collect(val, collections, [_ops.GraphKeys.SUMMARIES])
if not shape.is_compatible_with(_tensor_shape.scalar()): return val
raise ValueError("Can't create scalar summary for shape %s." % shape)
if summary_description is None:
summary_description = _summary_pb2.SummaryDescription()
summary_description.type_hint = 'scalar'
return tensor_summary(name, tensor, summary_description, collections)
def image(name, tensor, max_outputs=3, collections=None): def image(name, tensor, max_outputs=3, collections=None):
......
...@@ -27,56 +27,18 @@ from tensorflow.core.framework import types_pb2 ...@@ -27,56 +27,18 @@ from tensorflow.core.framework import types_pb2
class ScalarSummaryTest(tf.test.TestCase): class ScalarSummaryTest(tf.test.TestCase):
def testDtypeErrors(self): def testScalarSummary(self):
def _TryMakingScalarSummary(dtype): with self.test_session() as s:
base = dtype.base_dtype i = tf.constant(3)
if base == tf.bool: with tf.name_scope('outer'):
v = False im = tf.summary.scalar('inner', i)
elif base == tf.string: summary_str = s.run(im)
v = '' summary = tf.Summary()
elif base.is_complex: summary.ParseFromString(summary_str)
v = complex(0, 0) values = summary.value
else: self.assertEqual(len(values), 1)
v = base.min self.assertEqual(values[0].tag, 'outer/inner')
c = tf.constant(v, dtype) self.assertEqual(values[0].simple_value, 3.0)
return tf.summary.scalar('name', c)
for datatype_enum in types_pb2.DataType.values():
if (datatype_enum == types_pb2.DT_INVALID or
datatype_enum == types_pb2.DT_RESOURCE or
datatype_enum == types_pb2.DT_RESOURCE_REF):
continue
dtype = tf.as_dtype(datatype_enum)
if dtype.is_quantized:
# Quantized ops are funky, and not expected to work.
continue
if dtype.is_integer or dtype.is_floating:
_TryMakingScalarSummary(dtype)
# No exception should be thrown
else:
with self.assertRaises(ValueError):
_TryMakingScalarSummary(dtype)
def testShapeErrors(self):
c1 = tf.constant(0)
c2 = tf.zeros(5)
c3 = tf.zeros(5, 5)
tf.summary.scalar('1', c1)
with self.assertRaises(ValueError):
tf.summary.scalar('2', c2)
with self.assertRaises(ValueError):
tf.summary.scalar('3', c3)
def testTensorSummaryOpCreated(self):
c = tf.constant(0)
s = tf.summary.scalar('x', c)
self.assertEqual(s.op.type, 'TensorSummary')
self.assertEqual(s.op.inputs[0], c)
description = s.op.get_attr('description')
summary_description = summary_pb2.SummaryDescription()
json_format.Parse(description, summary_description)
self.assertEqual(summary_description.type_hint, 'scalar')
def testImageSummary(self): def testImageSummary(self):
with self.test_session() as s: with self.test_session() as s:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册