Skip to content

Instantly share code, notes, and snippets.

@hedgefair
Forked from thisismohitgupta/Selu_keras.py
Created June 12, 2017 20:40
Show Gist options
  • Save hedgefair/aaab0bcb442331ab5242df87c8b1befc to your computer and use it in GitHub Desktop.
Save hedgefair/aaab0bcb442331ab5242df87c8b1befc to your computer and use it in GitHub Desktop.
Selu activation for keras and tensorflow backend, works better than Relu
import keras.backend as K
from keras.engine import Layer
import tensorflow as tf
class SelU(Layer):
def __init__(self, alpha=1.6732632423543772848170429916717, scale=1.0507009873554804934193349852946, **kwargs):
super(SelU, self).__init__(**kwargs)
self.alpha = K.cast_to_floatx(alpha)
self.scale = K.cast_to_floatx(scale)
def call(self, inputs, **kwargs):
return self.scale * tf.where(inputs > 0.0, inputs, self.alpha * tf.exp(inputs) - self.alpha)
def get_config(self):
config = {'alpha': float(self.alpha), "scale": float(self.scale)}
base_config = super(SelU, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment