Last active
June 12, 2017 20:51
-
-
Save thisismohitgupta/d6feae96d49652fb64a288b511b4c400 to your computer and use it in GitHub Desktop.
Selu activation for keras and tensorflow backend, works better than Relu
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import keras.backend as K | |
from keras.engine import Layer | |
import tensorflow as tf | |
class SelU(Layer): | |
def __init__(self, alpha=1.6732632423543772848170429916717, scale=1.0507009873554804934193349852946, **kwargs): | |
super(SelU, self).__init__(**kwargs) | |
self.alpha = K.cast_to_floatx(alpha) | |
self.scale = K.cast_to_floatx(scale) | |
def call(self, inputs, **kwargs): | |
return self.scale * tf.where(inputs > 0.0, inputs, self.alpha * tf.exp(inputs) - self.alpha) | |
def get_config(self): | |
config = {"alpha": float(self.alpha), "scale": float(self.scale)} | |
base_config = super(SelU, self).get_config() | |
return dict(list(base_config.items()) + list(config.items())) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment