Sunday, 28 December 2025

Simple ANN to predict XOR Logic Output


We can see the training results, predicted the XOR function correctly (close to 0, 1, 1, 0).

When prompt to key in two input, we can also see the output is being predicted correctly. 


Python Code from Grok

import numpy as np

# Sigmoid activation function
def sigmoid(x):
    return 1 / (1 + np.exp(-x))

# Derivative of sigmoid for backpropagation
def sigmoid_derivative(x):
    return x * (1 - x)

# Input data (XOR truth table)
X = np.array([[0, 0],
              [0, 1],
              [1, 0],
              [1, 1]])

# Expected output
y = np.array([[0],
              [1],
              [1],
              [0]])

# Seed for reproducibility
np.random.seed(42)

# Network architecture
input_neurons = 2
hidden_neurons = 4
output_neurons = 1

# Randomly initialize weights
weights_input_hidden = np.random.uniform(size=(input_neurons, hidden_neurons))
weights_hidden_output = np.random.uniform(size=(hidden_neurons, output_neurons))

# Learning rate
learning_rate = 1.0

# Training loop
print("Training the neural network on XOR...")
for epoch in range(10000):
    # Forward pass
    hidden_input = np.dot(X, weights_input_hidden)
    hidden_output = sigmoid(hidden_input)
    
    final_input = np.dot(hidden_output, weights_hidden_output)
    predicted_output = sigmoid(final_input)
    
    # Compute error
    error = y - predicted_output
    
    # Backpropagation
    d_output = error * sigmoid_derivative(predicted_output)
    
    error_hidden = d_output.dot(weights_hidden_output.T)
    d_hidden = error_hidden * sigmoid_derivative(hidden_output)
    
    # Update weights
    weights_hidden_output += hidden_output.T.dot(d_output) * learning_rate
    weights_input_hidden += X.T.dot(d_hidden) * learning_rate

print("Training complete!\n")

# Show results on training data
print("Results on training data (XOR):")
for i in range(len(X)):
    print(f"Input: {X[i]} → Predicted: {predicted_output[i][0]:.3f} (expected: {y[i][0]})")

print("\n" + "="*40)
print("Now you can test the network yourself!")
print("="*40)

# Interactive prediction loop
while True:
    try:
        print("\nEnter two binary inputs (0 or 1), separated by space (or type 'quit' to exit):")
        user_input = input("> ").strip()
        
        if user_input.lower() in ['quit', 'exit', 'q']:
            print("Goodbye!")
            break
        
        values = list(map(float, user_input.split()))
        
        if len(values) != 2:
            print("Please enter exactly two numbers.")
            continue
        
        if not all(v in [0, 1] for v in values):
            print("Please enter only 0 or 1 for each input.")
            continue
        
        # Convert to numpy array and predict
        input_data = np.array([values])
        
        hidden_layer = sigmoid(np.dot(input_data, weights_input_hidden))
        output = sigmoid(np.dot(hidden_layer, weights_hidden_output))
        
        prediction = output[0][0]
        rounded = 1 if prediction >= 0.5 else 0
        
        print(f"\nInput:  [{values[0]}, {values[1]}]")
        print(f"Network output: {prediction:.4f}")
        print(f"Predicted class: {rounded} → This is XOR: {int(values[0] != values[1])}")
        
    except ValueError:
        print("Invalid input. Please enter numbers separated by space.")
    except Exception as e:
        print(f"Error: {e}")

 

No comments:

Post a Comment