提交 214d1105 编写于 作者: M Matt 提交者: TensorFlower Gardener

PR #47726: updated convolutional_recurrent.py to include 1D, 2D, and 3D ConvLSTM instances.

Imported from GitHub PR https://github.com/tensorflow/tensorflow/pull/47726

Updating ConvLSTM to include 1D & 3D cases, as suggested by [this thread](https://github.com/tensorflow/tensorflow/issues/42399).

- Implementation involves creating Base N-dimensional `ConvRNN`, `ConvLSTMCell`, and `ConvLSTM` classes which are inherited from by the concrete implementations in `ConvLSTM1D`, `ConvLSTM2D`, and `ConvLSTM3D`. Currently limited to 3D as maximum Convolution dimensionality as that is the highest rank implemented in `keras.backend`.

- Rewrote tests to include 1D & 3D cases. The error message present in ConvLSTM2D as documented by [this thread](https://github.com/tensorflow/tensorflow/issues/36901) is still persistent.
Copybara import of the project:

--
b2b92b83478dd2a821af9a83a65077e99fe5300d by Matt Lyon <matthewlyon18@gmail.com>:

updated ConvLSTM

--
960eb402f8c2d4c1f5b14703824c0307ed8bc000 by Matt Lyon <matthewlyon18@gmail.com>:

updates to coding style

--
660d02ebccbd1327d2674ff35bc0af2303c612d5 by Matt Lyon <matthewlyon18@gmail.com>:

fix docstrings

PiperOrigin-RevId: 379750075
上级 95c2966d
path: "tensorflow.keras.layers.ConvLSTM1D"
tf_class {
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM1D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN\'>"
is_instance: "<class \'keras.layers.recurrent.RNN\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activation"
mtype: "<type \'property\'>"
}
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "bias_constraint"
mtype: "<type \'property\'>"
}
member {
name: "bias_initializer"
mtype: "<type \'property\'>"
}
member {
name: "bias_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "data_format"
mtype: "<type \'property\'>"
}
member {
name: "dilation_rate"
mtype: "<type \'property\'>"
}
member {
name: "dropout"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "filters"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "kernel_constraint"
mtype: "<type \'property\'>"
}
member {
name: "kernel_initializer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_size"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "padding"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_activation"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_constraint"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_dropout"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_initializer"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "states"
mtype: "<type \'property\'>"
}
member {
name: "strides"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "unit_forget_bias"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "use_bias"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'recurrent_activation\', \'use_bias\', \'kernel_initializer\', \'recurrent_initializer\', \'bias_initializer\', \'unit_forget_bias\', \'kernel_regularizer\', \'recurrent_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'recurrent_constraint\', \'bias_constraint\', \'return_sequences\', \'return_state\', \'go_backwards\', \'stateful\', \'dropout\', \'recurrent_dropout\'], varargs=None, keywords=kwargs, defaults=[\'1\', \'valid\', \'None\', \'1\', \'tanh\', \'hard_sigmoid\', \'True\', \'glorot_uniform\', \'orthogonal\', \'zeros\', \'True\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'False\', \'False\', \'False\', \'False\', \'0.0\', \'0.0\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\', \'inputs\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'mask\', \'training\', \'initial_state\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_states"
argspec: "args=[\'self\', \'states\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}
path: "tensorflow.keras.layers.ConvLSTM2D"
tf_class {
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM2D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN2D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN\'>"
is_instance: "<class \'keras.layers.recurrent.RNN\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
......
path: "tensorflow.keras.layers.ConvLSTM3D"
tf_class {
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM3D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN\'>"
is_instance: "<class \'keras.layers.recurrent.RNN\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activation"
mtype: "<type \'property\'>"
}
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "bias_constraint"
mtype: "<type \'property\'>"
}
member {
name: "bias_initializer"
mtype: "<type \'property\'>"
}
member {
name: "bias_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "data_format"
mtype: "<type \'property\'>"
}
member {
name: "dilation_rate"
mtype: "<type \'property\'>"
}
member {
name: "dropout"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "filters"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "kernel_constraint"
mtype: "<type \'property\'>"
}
member {
name: "kernel_initializer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_size"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "padding"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_activation"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_constraint"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_dropout"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_initializer"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "states"
mtype: "<type \'property\'>"
}
member {
name: "strides"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "unit_forget_bias"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "use_bias"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'recurrent_activation\', \'use_bias\', \'kernel_initializer\', \'recurrent_initializer\', \'bias_initializer\', \'unit_forget_bias\', \'kernel_regularizer\', \'recurrent_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'recurrent_constraint\', \'bias_constraint\', \'return_sequences\', \'return_state\', \'go_backwards\', \'stateful\', \'dropout\', \'recurrent_dropout\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1, 1)\', \'valid\', \'None\', \'(1, 1, 1)\', \'tanh\', \'hard_sigmoid\', \'True\', \'glorot_uniform\', \'orthogonal\', \'zeros\', \'True\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'False\', \'False\', \'False\', \'False\', \'0.0\', \'0.0\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\', \'inputs\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'mask\', \'training\', \'initial_state\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_states"
argspec: "args=[\'self\', \'states\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}
......@@ -92,10 +92,18 @@ tf_module {
name: "Conv3DTranspose"
mtype: "<type \'type\'>"
}
member {
name: "ConvLSTM1D"
mtype: "<type \'type\'>"
}
member {
name: "ConvLSTM2D"
mtype: "<type \'type\'>"
}
member {
name: "ConvLSTM3D"
mtype: "<type \'type\'>"
}
member {
name: "Convolution1D"
mtype: "<type \'type\'>"
......
path: "tensorflow.keras.layers.ConvLSTM1D"
tf_class {
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM1D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN\'>"
is_instance: "<class \'keras.layers.recurrent.RNN\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activation"
mtype: "<type \'property\'>"
}
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "bias_constraint"
mtype: "<type \'property\'>"
}
member {
name: "bias_initializer"
mtype: "<type \'property\'>"
}
member {
name: "bias_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "data_format"
mtype: "<type \'property\'>"
}
member {
name: "dilation_rate"
mtype: "<type \'property\'>"
}
member {
name: "dropout"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "filters"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "kernel_constraint"
mtype: "<type \'property\'>"
}
member {
name: "kernel_initializer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_size"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "padding"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_activation"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_constraint"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_dropout"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_initializer"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "states"
mtype: "<type \'property\'>"
}
member {
name: "strides"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "unit_forget_bias"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "use_bias"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'recurrent_activation\', \'use_bias\', \'kernel_initializer\', \'recurrent_initializer\', \'bias_initializer\', \'unit_forget_bias\', \'kernel_regularizer\', \'recurrent_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'recurrent_constraint\', \'bias_constraint\', \'return_sequences\', \'return_state\', \'go_backwards\', \'stateful\', \'dropout\', \'recurrent_dropout\'], varargs=None, keywords=kwargs, defaults=[\'1\', \'valid\', \'None\', \'1\', \'tanh\', \'hard_sigmoid\', \'True\', \'glorot_uniform\', \'orthogonal\', \'zeros\', \'True\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'False\', \'False\', \'False\', \'False\', \'0.0\', \'0.0\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\', \'inputs\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'mask\', \'training\', \'initial_state\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_states"
argspec: "args=[\'self\', \'states\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}
path: "tensorflow.keras.layers.ConvLSTM2D"
tf_class {
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM2D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN2D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN\'>"
is_instance: "<class \'keras.layers.recurrent.RNN\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
......
path: "tensorflow.keras.layers.ConvLSTM3D"
tf_class {
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM3D\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvLSTM\'>"
is_instance: "<class \'keras.layers.convolutional_recurrent.ConvRNN\'>"
is_instance: "<class \'keras.layers.recurrent.RNN\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.training.tracking.tracking.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activation"
mtype: "<type \'property\'>"
}
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "bias_constraint"
mtype: "<type \'property\'>"
}
member {
name: "bias_initializer"
mtype: "<type \'property\'>"
}
member {
name: "bias_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "data_format"
mtype: "<type \'property\'>"
}
member {
name: "dilation_rate"
mtype: "<type \'property\'>"
}
member {
name: "dropout"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "filters"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "kernel_constraint"
mtype: "<type \'property\'>"
}
member {
name: "kernel_initializer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "kernel_size"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "padding"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_activation"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_constraint"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_dropout"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_initializer"
mtype: "<type \'property\'>"
}
member {
name: "recurrent_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "states"
mtype: "<type \'property\'>"
}
member {
name: "strides"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "unit_forget_bias"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "use_bias"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'recurrent_activation\', \'use_bias\', \'kernel_initializer\', \'recurrent_initializer\', \'bias_initializer\', \'unit_forget_bias\', \'kernel_regularizer\', \'recurrent_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'recurrent_constraint\', \'bias_constraint\', \'return_sequences\', \'return_state\', \'go_backwards\', \'stateful\', \'dropout\', \'recurrent_dropout\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1, 1)\', \'valid\', \'None\', \'(1, 1, 1)\', \'tanh\', \'hard_sigmoid\', \'True\', \'glorot_uniform\', \'orthogonal\', \'zeros\', \'True\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'False\', \'False\', \'False\', \'False\', \'0.0\', \'0.0\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\', \'inputs\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'mask\', \'training\', \'initial_state\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_states"
argspec: "args=[\'self\', \'states\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}
......@@ -92,10 +92,18 @@ tf_module {
name: "Conv3DTranspose"
mtype: "<type \'type\'>"
}
member {
name: "ConvLSTM1D"
mtype: "<type \'type\'>"
}
member {
name: "ConvLSTM2D"
mtype: "<type \'type\'>"
}
member {
name: "ConvLSTM3D"
mtype: "<type \'type\'>"
}
member {
name: "Convolution1D"
mtype: "<type \'type\'>"
......
......@@ -224,7 +224,9 @@ else:
LSTMCellV1 = LSTMCell
# Convolutional-recurrent layers.
from keras.layers.convolutional_recurrent import ConvLSTM1D
from keras.layers.convolutional_recurrent import ConvLSTM2D
from keras.layers.convolutional_recurrent import ConvLSTM3D
# CuDNN recurrent layers.
from keras.layers.cudnn_recurrent import CuDNNLSTM
......
......@@ -25,7 +25,63 @@ from keras import testing_utils
@keras_parameterized.run_all_keras_modes
class ConvLSTMTest(keras_parameterized.TestCase):
class ConvLSTM1DTest(keras_parameterized.TestCase):
@parameterized.named_parameters(
*testing_utils.generate_combinations_with_testcase_name(
data_format=['channels_first', 'channels_last'],
return_sequences=[True, False]))
def test_conv_lstm(self, data_format, return_sequences):
num_row = 3
filters = 3
num_samples = 1
input_channel = 2
input_num_row = 5
sequence_len = 2
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, sequence_len, input_channel,
input_num_row)
else:
inputs = np.random.rand(num_samples, sequence_len, input_num_row,
input_channel)
# test for return state:
x = keras.Input(batch_shape=inputs.shape)
kwargs = {
'data_format': data_format,
'return_sequences': return_sequences,
'return_state': True,
'stateful': True,
'filters': filters,
'kernel_size': num_row,
'padding': 'valid',
}
layer = keras.layers.ConvLSTM1D(**kwargs)
layer.build(inputs.shape)
outputs = layer(x)
_, states = outputs[0], outputs[1:]
self.assertEqual(len(states), 2)
model = keras.models.Model(x, states[0])
state = model.predict(inputs)
self.assertAllClose(keras.backend.eval(layer.states[0]), state, atol=1e-4)
# test for output shape:
testing_utils.layer_test(
keras.layers.ConvLSTM1D,
kwargs={
'data_format': data_format,
'return_sequences': return_sequences,
'filters': filters,
'kernel_size': num_row,
'padding': 'valid'
},
input_shape=inputs.shape)
@keras_parameterized.run_all_keras_modes
class ConvLSTM2DTest(keras_parameterized.TestCase):
@parameterized.named_parameters(
*testing_utils.generate_combinations_with_testcase_name(
......@@ -66,8 +122,7 @@ class ConvLSTMTest(keras_parameterized.TestCase):
model = keras.models.Model(x, states[0])
state = model.predict(inputs)
self.assertAllClose(
keras.backend.eval(layer.states[0]), state, atol=1e-4)
self.assertAllClose(keras.backend.eval(layer.states[0]), state, atol=1e-4)
# test for output shape:
testing_utils.layer_test(
......@@ -109,8 +164,7 @@ class ConvLSTMTest(keras_parameterized.TestCase):
out1 = model.predict(np.ones_like(inputs))
# train once so that the states change
model.train_on_batch(np.ones_like(inputs),
np.random.random(out1.shape))
model.train_on_batch(np.ones_like(inputs), np.random.random(out1.shape))
out2 = model.predict(np.ones_like(inputs))
# if the state is not reset, output should be different
......@@ -231,5 +285,65 @@ class ConvLSTMTest(keras_parameterized.TestCase):
model.predict([x_1, x_2])
@keras_parameterized.run_all_keras_modes
class ConvLSTM3DTest(keras_parameterized.TestCase):
@parameterized.named_parameters(
*testing_utils.generate_combinations_with_testcase_name(
data_format=['channels_first', 'channels_last'],
return_sequences=[True, False]))
def test_conv_lstm(self, data_format, return_sequences):
num_height = 3
num_width = 3
num_depth = 3
filters = 3
num_samples = 1
input_channel = 2
input_height = 5
input_width = 5
input_depth = 5
sequence_len = 2
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, sequence_len, input_channel,
input_height, input_width, input_depth)
else:
inputs = np.random.rand(num_samples, sequence_len, input_height,
input_width, input_depth, input_channel)
# test for return state:
x = keras.Input(batch_shape=inputs.shape)
kwargs = {
'data_format': data_format,
'return_sequences': return_sequences,
'return_state': True,
'stateful': True,
'filters': filters,
'kernel_size': (num_height, num_width, num_depth),
'padding': 'same'
}
layer = keras.layers.ConvLSTM3D(**kwargs)
layer.build(inputs.shape)
outputs = layer(x)
_, states = outputs[0], outputs[1:]
self.assertEqual(len(states), 2)
model = keras.models.Model(x, states[0])
state = model.predict(inputs)
self.assertAllClose(keras.backend.eval(layer.states[0]), state, atol=1e-4)
# test for output shape:
testing_utils.layer_test(
keras.layers.ConvLSTM3D,
kwargs={
'data_format': data_format,
'return_sequences': return_sequences,
'filters': filters,
'kernel_size': (num_height, num_width, num_depth),
'padding': 'valid'
},
input_shape=inputs.shape)
if __name__ == '__main__':
tf.test.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册