提交 4842fc80 编写于 作者: F Fan Yang 提交者: A. Unique TensorFlower

Use GroupNormalization layer from tf.leras.layers instead of from TF addons.

PiperOrigin-RevId: 524130269
上级 2b804387
......@@ -16,11 +16,9 @@
import tensorflow as tf
from tensorflow_addons.utils import types
@tf.keras.utils.register_keras_serializable(package='Text')
def mish(x: types.TensorLike) -> tf.Tensor:
def mish(x) -> tf.Tensor:
"""Mish activation function.
Mish: A Self Regularized Non-Monotonic Activation Function
......
......@@ -18,7 +18,6 @@ from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, Union
from absl import logging
import tensorflow as tf
import tensorflow_addons as tfa
from official.modeling import tf_utils
from official.vision.ops import spatial_transform_ops
......@@ -351,7 +350,7 @@ class PanopticFPNFusion(tf.keras.Model):
'kernel_regularizer': kernel_regularizer,
'bias_regularizer': bias_regularizer,
}
norm = tfa.layers.GroupNormalization
norm = tf.keras.layers.GroupNormalization
conv2d = tf.keras.layers.Conv2D
activation_fn = tf_utils.get_activation(activation)
if tf.keras.backend.image_data_format() == 'channels_last':
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册