# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Contains the Activation layer.""" from keras import activations from keras.engine.base_layer import Layer # isort: off from tensorflow.python.util.tf_export import keras_export @keras_export("keras.layers.Activation") class Activation(Layer): """Applies an activation function to an output. Args: activation: Activation function, such as `tf.nn.relu`, or string name of built-in activation function, such as "relu". Usage: >>> layer = tf.keras.layers.Activation('relu') >>> output = layer([-3.0, -1.0, 0.0, 2.0]) >>> list(output.numpy()) [0.0, 0.0, 0.0, 2.0] >>> layer = tf.keras.layers.Activation(tf.nn.relu) >>> output = layer([-3.0, -1.0, 0.0, 2.0]) >>> list(output.numpy()) [0.0, 0.0, 0.0, 2.0] Input shape: Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the batch axis) when using this layer as the first layer in a model. Output shape: Same shape as input. """ def __init__(self, activation, **kwargs): super().__init__(**kwargs) self.supports_masking = True self.activation = activations.get(activation) def call(self, inputs): return self.activation(inputs) def compute_output_shape(self, input_shape): return input_shape def get_config(self): config = {"activation": activations.serialize(self.activation)} base_config = super().get_config() return dict(list(base_config.items()) + list(config.items()))