developer-mayuan
12/21/2017 - 1:00 AM

Batch normalization followed by ReLU activation in tensorflow.

Batch normalization followed by ReLU activation in tensorflow.

def batch_norm_relu(inputs, is_training, data_format):
    """
    Performs a batch normalization followed by a ReLU.
    :param inputs:
    :param is_training:
    :param data_format:
    :return:
    """
    inputs = tf.layers.batch_normalization(
        inputs=inputs, axis=1 if data_format == 'channels_first' else 3,
        momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True,
        scale=True, training=is_training, fused=True
    )
    return inputs