提交 8644bc9b 编写于 作者: D Dan Mané 提交者: TensorFlower Gardener

Switch implementation of tf.scalar.summary back to the ScalarSummary kernel.

I originally intended to replace all the summary ops with implementations that use the TensorSummary kernel. However, I realized this would produce a lot of work without any user-visible benefit, so I decided not to do so.
Although I had already created a TensorSummary wrapper for tf.summary.scalar, now that replacing all summary implementations is a non-goal, it is better to keep using ScalarSummary for consistency and backcompat.

The behavior wrt. TensorBoard and the EventAccumulator is unchanged.
Change: 136742625
上级 9e601230
......@@ -61,7 +61,7 @@ class SummaryOpsTest(tf.test.TestCase):
def testScalarSummary(self):
with self.test_session() as sess:
const = tf.constant(10.0)
summ = tf.summary.scalar("foo", const)
summ = tf.summary.tensor_summary("foo", const)
result = sess.run(summ)
value = self._SummarySingleValue(result)
......
......@@ -39,11 +39,11 @@ from google.protobuf import json_format as _json_format
from tensorflow.core.framework import summary_pb2 as _summary_pb2
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.python.framework.dtypes import as_dtype as _as_dtype
from tensorflow.python.ops import gen_logging_ops as _gen_logging_ops
# exports tensor_summary
# pylint: disable=unused-import
from tensorflow.python.ops.summary_ops import tensor_summary
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
from tensorflow.python.util import compat as _compat
......@@ -55,7 +55,7 @@ def _collect(val, collections, default_collections):
_ops.add_to_collection(key, val)
def scalar(name, tensor, summary_description=None, collections=None):
def scalar(name, tensor, collections=None):
"""Outputs a `Summary` protocol buffer containing a single scalar value.
The generated Summary has a Tensor.proto containing the input Tensor.
......@@ -63,8 +63,7 @@ def scalar(name, tensor, summary_description=None, collections=None):
Args:
name: A name for the generated node. Will also serve as the series name in
TensorBoard.
tensor: A tensor containing a single floating point or integer value.
summary_description: Optional summary_description_pb2.SummaryDescription
tensor: A real numeric Tensor containing a single value.
collections: Optional list of graph collections keys. The new summary op is
added to these collections. Defaults to `[GraphKeys.SUMMARIES]`.
......@@ -74,19 +73,12 @@ def scalar(name, tensor, summary_description=None, collections=None):
Raises:
ValueError: If tensor has the wrong shape or type.
"""
dtype = _as_dtype(tensor.dtype)
if dtype.is_quantized or not (dtype.is_integer or dtype.is_floating):
raise ValueError("Can't create scalar summary for type %s." % dtype)
shape = tensor.get_shape()
if not shape.is_compatible_with(_tensor_shape.scalar()):
raise ValueError("Can't create scalar summary for shape %s." % shape)
if summary_description is None:
summary_description = _summary_pb2.SummaryDescription()
summary_description.type_hint = 'scalar'
return tensor_summary(name, tensor, summary_description, collections)
with _ops.name_scope(name, None, [tensor]) as scope:
# pylint: disable=protected-access
val = _gen_logging_ops._scalar_summary(
tags=scope.rstrip('/'), values=tensor, name=scope)
_collect(val, collections, [_ops.GraphKeys.SUMMARIES])
return val
def image(name, tensor, max_outputs=3, collections=None):
......
......@@ -27,56 +27,18 @@ from tensorflow.core.framework import types_pb2
class ScalarSummaryTest(tf.test.TestCase):
def testDtypeErrors(self):
def _TryMakingScalarSummary(dtype):
base = dtype.base_dtype
if base == tf.bool:
v = False
elif base == tf.string:
v = ''
elif base.is_complex:
v = complex(0, 0)
else:
v = base.min
c = tf.constant(v, dtype)
return tf.summary.scalar('name', c)
for datatype_enum in types_pb2.DataType.values():
if (datatype_enum == types_pb2.DT_INVALID or
datatype_enum == types_pb2.DT_RESOURCE or
datatype_enum == types_pb2.DT_RESOURCE_REF):
continue
dtype = tf.as_dtype(datatype_enum)
if dtype.is_quantized:
# Quantized ops are funky, and not expected to work.
continue
if dtype.is_integer or dtype.is_floating:
_TryMakingScalarSummary(dtype)
# No exception should be thrown
else:
with self.assertRaises(ValueError):
_TryMakingScalarSummary(dtype)
def testShapeErrors(self):
c1 = tf.constant(0)
c2 = tf.zeros(5)
c3 = tf.zeros(5, 5)
tf.summary.scalar('1', c1)
with self.assertRaises(ValueError):
tf.summary.scalar('2', c2)
with self.assertRaises(ValueError):
tf.summary.scalar('3', c3)
def testTensorSummaryOpCreated(self):
c = tf.constant(0)
s = tf.summary.scalar('x', c)
self.assertEqual(s.op.type, 'TensorSummary')
self.assertEqual(s.op.inputs[0], c)
description = s.op.get_attr('description')
summary_description = summary_pb2.SummaryDescription()
json_format.Parse(description, summary_description)
self.assertEqual(summary_description.type_hint, 'scalar')
def testScalarSummary(self):
with self.test_session() as s:
i = tf.constant(3)
with tf.name_scope('outer'):
im = tf.summary.scalar('inner', i)
summary_str = s.run(im)
summary = tf.Summary()
summary.ParseFromString(summary_str)
values = summary.value
self.assertEqual(len(values), 1)
self.assertEqual(values[0].tag, 'outer/inner')
self.assertEqual(values[0].simple_value, 3.0)
def testImageSummary(self):
with self.test_session() as s:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册