Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions DIRECTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
* [Rat In Maze](backtracking/rat_in_maze.py)
* [Sudoku](backtracking/sudoku.py)
* [Sum Of Subsets](backtracking/sum_of_subsets.py)
* [Word Break](backtracking/word_break.py)
* [Word Ladder](backtracking/word_ladder.py)
* [Word Search](backtracking/word_search.py)

Expand Down Expand Up @@ -99,6 +100,7 @@
* [Elgamal Key Generator](ciphers/elgamal_key_generator.py)
* [Enigma Machine2](ciphers/enigma_machine2.py)
* [Fractionated Morse Cipher](ciphers/fractionated_morse_cipher.py)
* [Gronsfeld Cipher](ciphers/gronsfeld_cipher.py)
* [Hill Cipher](ciphers/hill_cipher.py)
* [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py)
* [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py)
Expand Down Expand Up @@ -211,6 +213,7 @@
* [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py)
* [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py)
* [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py)
* [Maximum Sum Bst](data_structures/binary_tree/maximum_sum_bst.py)
* [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py)
* [Mirror Binary Tree](data_structures/binary_tree/mirror_binary_tree.py)
* [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py)
Expand Down Expand Up @@ -284,6 +287,7 @@
* [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py)
* [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py)
* [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py)
* [Lexicographical Numbers](data_structures/stacks/lexicographical_numbers.py)
* [Next Greater Element](data_structures/stacks/next_greater_element.py)
* [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py)
* [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py)
Expand Down Expand Up @@ -820,6 +824,7 @@
* [Softplus](neural_network/activation_functions/softplus.py)
* [Squareplus](neural_network/activation_functions/squareplus.py)
* [Swish](neural_network/activation_functions/swish.py)
* [Artificial Neural Network](neural_network/artificial_neural_network.py)
* [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py)
* [Convolution Neural Network](neural_network/convolution_neural_network.py)
* [Input Data](neural_network/input_data.py)
Expand Down Expand Up @@ -1201,6 +1206,7 @@
* [Binary Tree Traversal](searches/binary_tree_traversal.py)
* [Double Linear Search](searches/double_linear_search.py)
* [Double Linear Search Recursion](searches/double_linear_search_recursion.py)
* [Exponential Search](searches/exponential_search.py)
* [Fibonacci Search](searches/fibonacci_search.py)
* [Hill Climbing](searches/hill_climbing.py)
* [Interpolation Search](searches/interpolation_search.py)
Expand Down
84 changes: 84 additions & 0 deletions neural_network/artificial_neural_network.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
"""
Simple Artificial Neural Network (ANN)
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
- Example demonstrates solving the XOR problem.
"""

import numpy as np


class ANN:
def __init__(self, input_size, hidden_size, output_size, learning_rate=0.1):
# Initialize weights
self.weights_input_hidden = np.random.randn(input_size, hidden_size)
self.weights_hidden_output = np.random.randn(hidden_size, output_size)
# Initialize biases
self.bias_hidden = np.zeros((1, hidden_size))
self.bias_output = np.zeros((1, output_size))

# Learning rate
self.learning_rate = learning_rate

def sigmoid(self, x):
return 1 / (1 + np.exp(-x))

def sigmoid_derivative(self, x):
return x * (1 - x)

def feedforward(self, X):
# Hidden layer
self.hidden_input = np.dot(X, self.weights_input_hidden) + self.bias_hidden
self.hidden_output = self.sigmoid(self.hidden_input)

# Output layer
self.final_input = (
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
)
self.final_output = self.sigmoid(self.final_input)

return self.final_output

def backpropagation(self, X, y, output):
# Calculate the error (Mean Squared Error)
error = y - output
# Gradient for output layer
output_gradient = error * self.sigmoid_derivative(output)
# Error for hidden layer
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
# Update weights and biases
self.weights_hidden_output += (
self.hidden_output.T.dot(output_gradient) * self.learning_rate
)
self.bias_output += (
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
)
self.weights_input_hidden += X.T.dot(hidden_gradient) * self.learning_rate
self.bias_hidden += (
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
)

def train(self, X, y, epochs=10000):
for epoch in range(epochs):
output = self.feedforward(X)
self.backpropagation(X, y, output)
if epoch % 1000 == 0:
loss = np.mean(np.square(y - output))
print(f"Epoch {epoch}, Loss: {loss}")

def predict(self, X):
return self.feedforward(X)


if __name__ == "__main__":
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
# Initialize the neural network
ann = ANN(input_size=2, hidden_size=2, output_size=1, learning_rate=0.1)
# Train the neural network
ann.train(X, y, epochs=100)
# Predict
predictions = ann.predict(X)
print("Predictions:")
print(predictions)