Skip to content

Instantly share code, notes, and snippets.

@ViniTheSwan
Created July 19, 2021 09:12
Show Gist options
  • Save ViniTheSwan/f23836930bc3141789b27b0f71eebd39 to your computer and use it in GitHub Desktop.
Save ViniTheSwan/f23836930bc3141789b27b0f71eebd39 to your computer and use it in GitHub Desktop.
class Neuron:
def __init__(self,input_size, learning_rate):
self.w = np.random.random((input_size,1))-0.5 # self.w is a 2 dimensional column vector
self.b = np.random.random(1)-0.5
self.learning_rate = learning_rate
#forward pass
def forward(self,x):
a = x.T.dot(self.w) + self.b
return a
def loss(self,x,y):
a = self.forward(x)
#Mean squared error loss
L = (y-a)**2
return L
#backpropagation
def backward(self,x, y, y_hat):
dw = 2.*(y_hat - y) * x.T
db = 2.*(y_hat - y) * 1.
return dw,db
#training
def train(self,x,y):
#ensuring, that x is a 2 dimensional column vector.
x = x.reshape(-1,1)
a = self.forward(x)
dw,db = self.backward(x,y,a)
#gradient descent
self.w = self.w - self.learning_rate*dw.T
self.b = self.b - self.learning_rate*db.T
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment