Skip to content

Instantly share code, notes, and snippets.

@philippspinnler
Last active September 4, 2025 07:06
Show Gist options
  • Save philippspinnler/b145ed1c3195c39b5788c68f13c30c5b to your computer and use it in GitHub Desktop.
Save philippspinnler/b145ed1c3195c39b5788c68f13c30c5b to your computer and use it in GitHub Desktop.
Simple Neuron
# We set a fixed starting point for random numbers so we get the same results each time
import random
random.seed(42)
# Our learning data
x_data = [1, 2, 3, 4, 5] # Input numbers
y_data = [2, 4, 6, 8, 10] # Output numbers (each is 2 times the input)
# Starting guesses for our formula (y = w*x + b)
w = random.random() # This should end up close to 2
b = random.random() # This should end up close to 0
# How quickly to adjust our guesses (small steps)
lr = 0.01
# Try to get better 1000 times
for epoch in range(1000):
total_loss = 0
for x, y in zip(x_data, y_data):
# Make a guess
y_pred = w * x + b # prediction
# Check how wrong we are
loss = (y_pred - y) ** 2 # Square the error to make it positive
total_loss += loss # Add to total mistakes
# Figure out how to improve
grad_w = 2 * (y_pred - y) * x # How to change w
grad_b = 2 * (y_pred - y) # How to change b
# Make small improvements
w -= lr * grad_w # Update w
b -= lr * grad_b # Update b
# Show progress every 100 tries
if epoch % 100 == 0:
print(f"Epoch {epoch}: Loss = {total_loss:.4f}, w = {w:.4f}, b = {b:.4f}")
# Show what we learned
print(f"Trained weight: {w:.4f}, bias: {b:.4f}")
# If it worked, w should be close to 2 and b close to 0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment