Skip to content

Instantly share code, notes, and snippets.

@why-not
Created August 3, 2025 23:48
Show Gist options
  • Select an option

  • Save why-not/e719298614e0e02cc18505dcb1a958da to your computer and use it in GitHub Desktop.

Select an option

Save why-not/e719298614e0e02cc18505dcb1a958da to your computer and use it in GitHub Desktop.
Simple neural network.
import numpy as np
# 1. Activation function (sigmoid)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# 2. Derivative of sigmoid (used during training)
def sigmoid_derivative(x):
return x * (1 - x)
# 3. Convert label to array
def label_to_array(y):
return np.array([[1]]) if y > 0 else np.array([[0]])
# 4. Prepare training data
# Each item is (input number, expected output)
training_data = [
(np.array([[5]]), label_to_array(5)),
(np.array([[-3]]), label_to_array(-3)),
(np.array([[0.1]]), label_to_array(0.1)),
(np.array([[-0.5]]), label_to_array(-0.5)),
(np.array([[7]]), label_to_array(7)),
(np.array([[-9]]), label_to_array(-9)),
]
# 5. Initialize weights and biases
np.random.seed(1) # Make results predictable
# Weights for input to hidden layer (1 input -> 3 hidden neurons)
weights_input_hidden = np.random.randn(1, 3)
bias_hidden = np.random.randn(1, 3)
# Weights for hidden to output layer (3 hidden -> 1 output)
weights_hidden_output = np.random.randn(3, 1)
bias_output = np.random.randn(1, 1)
# 6. Training loop
learning_rate = 0.1
epochs = 10000
for epoch in range(epochs):
for x, y in training_data:
# ---- Forward Pass ----
hidden_input = np.dot(x, weights_input_hidden) + bias_hidden
hidden_output = sigmoid(hidden_input)
final_input = np.dot(hidden_output, weights_hidden_output) + bias_output
final_output = sigmoid(final_input)
# ---- Backward Pass ----
# Calculate output error
output_error = y - final_output
output_delta = output_error * sigmoid_derivative(final_output)
# Calculate hidden layer error
hidden_error = output_delta.dot(weights_hidden_output.T)
hidden_delta = hidden_error * sigmoid_derivative(hidden_output)
# ---- Update Weights ----
weights_hidden_output += hidden_output.T.dot(output_delta) * learning_rate
bias_output += output_delta * learning_rate
weights_input_hidden += x.T.dot(hidden_delta) * learning_rate
bias_hidden += hidden_delta * learning_rate
# 7. Test the network
def predict(number):
x = np.array([[number]])
hidden = sigmoid(np.dot(x, weights_input_hidden) + bias_hidden)
output = sigmoid(np.dot(hidden, weights_hidden_output) + bias_output)
prediction = "Positive" if output[0][0] > 0.5 else "Negative"
print(f"Number: {number}{prediction} (confidence: {output[0][0]:.2f})")
# Try some examples
predict(10)
predict(-4)
predict(0.2)
predict(-0.1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment