Skip to content

Instantly share code, notes, and snippets.

@rtkclouds
Created November 7, 2023 03:58
Show Gist options
  • Save rtkclouds/fba7be879825bfd52da5d3e112e934ce to your computer and use it in GitHub Desktop.
Save rtkclouds/fba7be879825bfd52da5d3e112e934ce to your computer and use it in GitHub Desktop.
ccore layer
class Rezero(layers.Layer):
def __init__(self):
super().__init__()
self.alpha1 = tf.Variable(0.0, trainable=True)
def call(self, inputs, training):
return self.alpha1*inputs
class CustomRezero(tf.keras.layers.Layer):
def __init__(self, units):
super(CustomRezero, self).__init__()
self.units = units
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='zeros')
self.bias = self.add_weight(shape=(self.units,), initializer='zeros')
self.alpha = self.add_weight(shape=(1,), initializer='zeros')
self.beta = self.add_weight(shape=(1,), initializer='ones')
def call(self, inputs):
output = tf.matmul(inputs, self.kernel) + self.bias
output = output * self.alpha / self.beta
return output
def CCore(x):
nl = 5
skip = x
c = [None]nl
d = [None]nl
projg = Dense(hidden_dim,trainable=False)
for i in range(nl):
proji = Dense(hidden_dim,trainable=False, activation="tanh")
projj = Dense(hidden_dim,trainable=False, activation="linear")
for j in range(nl):
xa = Conv1D(
filters=hidden_dim,
trainable=False,
kernel_size=1 + ((j*i) % 15),
padding='same',
strides=1
)(x)
if i % 2 == 0:
n = proji(xa)
xa = Add()([n, xa])
else:
n = projj(xa)
xa = Add()([n, xa])
c[j] = c[j] or Dense(hidden_dim, trainable=False,activation="linear")
d[i] = d[i] or Dense(hidden_dim,trainable=False, activation="tanh")
n = c[j](xa)
xa = Add()([n, xa])
n = d[i](xa)
xa = Add()([n, xa])
n = projg(xa)
xa = Add()([n, xa])
a = Activation('tanh')(xa)
b = Activation('sigmoid')(xa)
x = Multiply()([a, b])
x=tf.keras.layers.Dropout(rate=.2)(x)
x=Rezero()(x)
skip = Add()([skip, x])
return skip
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment