optimizer_builder.py 4.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Functions to build DetectionModel training optimizers."""

import tensorflow as tf
19 20


21 22 23
from object_detection.utils import learning_schedules


24
def build(optimizer_config):
25 26 27 28 29 30
  """Create optimizer based on config.

  Args:
    optimizer_config: A Optimizer proto message.

  Returns:
31
    An optimizer and a list of variables for summary.
32 33 34 35 36 37 38

  Raises:
    ValueError: when using an unsupported input data type.
  """
  optimizer_type = optimizer_config.WhichOneof('optimizer')
  optimizer = None

39
  summary_vars = []
40 41
  if optimizer_type == 'rms_prop_optimizer':
    config = optimizer_config.rms_prop_optimizer
42 43
    learning_rate = _create_learning_rate(config.learning_rate)
    summary_vars.append(learning_rate)
44
    optimizer = tf.train.RMSPropOptimizer(
45
        learning_rate,
46 47 48 49 50 51
        decay=config.decay,
        momentum=config.momentum_optimizer_value,
        epsilon=config.epsilon)

  if optimizer_type == 'momentum_optimizer':
    config = optimizer_config.momentum_optimizer
52 53
    learning_rate = _create_learning_rate(config.learning_rate)
    summary_vars.append(learning_rate)
54
    optimizer = tf.train.MomentumOptimizer(
55
        learning_rate,
56 57 58 59
        momentum=config.momentum_optimizer_value)

  if optimizer_type == 'adam_optimizer':
    config = optimizer_config.adam_optimizer
60 61 62
    learning_rate = _create_learning_rate(config.learning_rate)
    summary_vars.append(learning_rate)
    optimizer = tf.train.AdamOptimizer(learning_rate)
63

64

65 66 67 68 69 70 71
  if optimizer is None:
    raise ValueError('Optimizer %s not supported.' % optimizer_type)

  if optimizer_config.use_moving_average:
    optimizer = tf.contrib.opt.MovingAverageOptimizer(
        optimizer, average_decay=optimizer_config.moving_average_decay)

72
  return optimizer, summary_vars
73 74


75
def _create_learning_rate(learning_rate_config):
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
  """Create optimizer learning rate based on config.

  Args:
    learning_rate_config: A LearningRate proto message.

  Returns:
    A learning rate.

  Raises:
    ValueError: when using an unsupported input data type.
  """
  learning_rate = None
  learning_rate_type = learning_rate_config.WhichOneof('learning_rate')
  if learning_rate_type == 'constant_learning_rate':
    config = learning_rate_config.constant_learning_rate
91 92
    learning_rate = tf.constant(config.learning_rate, dtype=tf.float32,
                                name='learning_rate')
93 94 95

  if learning_rate_type == 'exponential_decay_learning_rate':
    config = learning_rate_config.exponential_decay_learning_rate
96
    learning_rate = learning_schedules.exponential_decay_with_burnin(
V
Vivek Rathod 已提交
97
        tf.train.get_or_create_global_step(),
98
        config.initial_learning_rate,
99 100
        config.decay_steps,
        config.decay_factor,
101 102 103 104
        burnin_learning_rate=config.burnin_learning_rate,
        burnin_steps=config.burnin_steps,
        min_learning_rate=config.min_learning_rate,
        staircase=config.staircase)
105 106 107 108 109 110 111 112 113

  if learning_rate_type == 'manual_step_learning_rate':
    config = learning_rate_config.manual_step_learning_rate
    if not config.schedule:
      raise ValueError('Empty learning rate schedule.')
    learning_rate_step_boundaries = [x.step for x in config.schedule]
    learning_rate_sequence = [config.initial_learning_rate]
    learning_rate_sequence += [x.learning_rate for x in config.schedule]
    learning_rate = learning_schedules.manual_stepping(
V
Vivek Rathod 已提交
114
        tf.train.get_or_create_global_step(), learning_rate_step_boundaries,
115
        learning_rate_sequence, config.warmup)
116

V
Vivek Rathod 已提交
117 118 119 120 121 122 123
  if learning_rate_type == 'cosine_decay_learning_rate':
    config = learning_rate_config.cosine_decay_learning_rate
    learning_rate = learning_schedules.cosine_decay_with_warmup(
        tf.train.get_or_create_global_step(),
        config.learning_rate_base,
        config.total_steps,
        config.warmup_learning_rate,
124 125
        config.warmup_steps,
        config.hold_base_rate_steps)
V
Vivek Rathod 已提交
126

127 128 129 130
  if learning_rate is None:
    raise ValueError('Learning_rate %s not supported.' % learning_rate_type)

  return learning_rate