Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions machine_learning/mlp_activation_comparison.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
"""
Compare MLP activation functions (ReLU, Sigmoid, Tanh) using sklearn.

This script trains an MLPClassifier on the sklearn digits dataset
with different activation functions and prints their accuracy scores.

Example:
>>> # Run the script directly
>>> # python mlp_activation_comparison.py
Activation: relu, Accuracy: 0.9722
Activation: logistic, Accuracy: 0.9200
Activation: tanh, Accuracy: 0.9444
"""

from __future__ import annotations
from sklearn.datasets import load_digits
from sklearn.model_selection import train_test_split
from sklearn.neural_network import MLPClassifier
from sklearn.metrics import accuracy_score

Check failure on line 19 in machine_learning/mlp_activation_comparison.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (I001)

machine_learning/mlp_activation_comparison.py:15:1: I001 Import block is un-sorted or un-formatted


def compare_mlp_activations() -> None:
"""
Train and evaluate MLPClassifier with ReLU, Sigmoid, and Tanh activations.

Prints the accuracy for each activation function.
"""
digits = load_digits()
x_train, x_test, y_train, y_test = train_test_split(
digits.data, digits.target, test_size=0.2, random_state=42
)

for activation in ["relu", "logistic", "tanh"]:
mlp = MLPClassifier(
hidden_layer_sizes=(50,),
activation=activation,
max_iter=500,
random_state=42,
)
mlp.fit(x_train, y_train)
y_pred = mlp.predict(x_test)
acc = accuracy_score(y_test, y_pred)
print(f"Activation: {activation}, Accuracy: {acc:.4f}")


if __name__ == "__main__":
compare_mlp_activations()
Loading