Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 86 additions & 0 deletions Algorithms.Tests/MachineLearning/KNearestNeighborsTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
using NUnit.Framework;
using Algorithms.MachineLearning;
using System;

namespace Algorithms.Tests.MachineLearning;

[TestFixture]
public class KNearestNeighborsTests
{
[Test]
public void Constructor_InvalidK_ThrowsException()
{
Assert.Throws<ArgumentOutOfRangeException>(() => new KNearestNeighbors<string>(0));
}

[Test]
public void AddSample_NullFeatures_ThrowsException()
{
var knn = new KNearestNeighbors<string>(3);
double[]? features = null;
Assert.Throws<ArgumentNullException>(() => knn.AddSample(features!, "A"));
}

[Test]
public void Predict_NoTrainingData_ThrowsException()
{
var knn = new KNearestNeighbors<string>(1);
Assert.Throws<InvalidOperationException>(() => knn.Predict(new[] { 1.0 }));
}

[Test]
public void Predict_NullFeatures_ThrowsException()
{
var knn = new KNearestNeighbors<string>(1);
knn.AddSample(new[] { 1.0 }, "A");
double[]? features = null;
Assert.Throws<ArgumentNullException>(() => knn.Predict(features!));
}

[Test]
public void EuclideanDistance_DifferentLengths_ThrowsException()
{
Assert.Throws<ArgumentException>(() => KNearestNeighbors<string>.EuclideanDistance(new[] { 1.0 }, new[] { 1.0, 2.0 }));
}

[Test]
public void EuclideanDistance_CorrectResult()
{
double[] a = { 1.0, 2.0 };
double[] b = { 4.0, 6.0 };
double expected = 5.0;
double actual = KNearestNeighbors<string>.EuclideanDistance(a, b);
Assert.That(actual, Is.EqualTo(expected).Within(1e-9));
}

[Test]
public void Predict_SingleNeighbor_CorrectLabel()
{
var knn = new KNearestNeighbors<string>(1);
knn.AddSample(new[] { 1.0, 2.0 }, "A");
knn.AddSample(new[] { 3.0, 4.0 }, "B");
var label = knn.Predict(new[] { 1.1, 2.1 });
Assert.That(label, Is.EqualTo("A"));
}

[Test]
public void Predict_MajorityVote_CorrectLabel()
{
var knn = new KNearestNeighbors<string>(3);
knn.AddSample(new[] { 0.0, 0.0 }, "A");
knn.AddSample(new[] { 0.1, 0.1 }, "A");
knn.AddSample(new[] { 1.0, 1.0 }, "B");
var label = knn.Predict(new[] { 0.05, 0.05 });
Assert.That(label, Is.EqualTo("A"));
}

[Test]
public void Predict_TieBreaker_ReturnsConsistentLabel()
{
var knn = new KNearestNeighbors<string>(2);
knn.AddSample(new[] { 0.0, 0.0 }, "A");
knn.AddSample(new[] { 1.0, 1.0 }, "B");
var label = knn.Predict(new[] { 0.5, 0.5 });
Assert.That(label, Is.EqualTo("A"));
}
}
108 changes: 108 additions & 0 deletions Algorithms/MachineLearning/KNearestNeighbors.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using System.Linq;

namespace Algorithms.MachineLearning;

/// <summary>
/// K-Nearest Neighbors (KNN) classifier implementation.
/// This algorithm classifies data points based on the majority label of their k nearest neighbors.
/// </summary>
/// <typeparam name="TLabel">
/// The type of the label used for classification. This can be any type that represents the class or category of a sample.
/// </typeparam>
public class KNearestNeighbors<TLabel>
{
private readonly List<(double[] Features, TLabel Label)> trainingData = new();
private readonly int k;

/// <summary>
/// Initializes a new instance of the <see cref="KNearestNeighbors{TLabel}"/> classifier.
/// </summary>
/// <param name="k">Number of neighbors to consider for classification.</param>
/// <exception cref="ArgumentOutOfRangeException">Thrown if k is less than 1.</exception>
public KNearestNeighbors(int k)
{
if (k < 1)
{
throw new ArgumentOutOfRangeException(nameof(k), "k must be at least 1.");
}

this.k = k;
}

/// <summary>
/// Calculates the Euclidean distance between two feature vectors.
/// </summary>
/// <param name="a">First feature vector.</param>
/// <param name="b">Second feature vector.</param>
/// <returns>Euclidean distance.</returns>
/// <exception cref="ArgumentException">Thrown if vectors are of different lengths.</exception>
public static double EuclideanDistance(double[] a, double[] b)
{
if (a.Length != b.Length)
{
throw new ArgumentException("Feature vectors must be of the same length.");
}

double sum = 0;
for (int i = 0; i < a.Length; i++)
{
double diff = a[i] - b[i];
sum += diff * diff;
}

return Math.Sqrt(sum);
}

/// <summary>
/// Adds a training sample to the classifier.
/// </summary>
/// <param name="features">Feature vector of the sample.</param>
/// <param name="label">Label of the sample.</param>
public void AddSample(double[] features, TLabel label)
{
if (features == null)
{
throw new ArgumentNullException(nameof(features));
}

trainingData.Add((features, label));
}

/// <summary>
/// Predicts the label for a given feature vector using the KNN algorithm.
/// </summary>
/// <param name="features">Feature vector to classify.</param>
/// <returns>Predicted label.</returns>
/// <exception cref="InvalidOperationException">Thrown if there is no training data.</exception>
public TLabel Predict(double[] features)
{
if (trainingData.Count == 0)
{
throw new InvalidOperationException("No training data available.");
}

if (features == null)
{
throw new ArgumentNullException(nameof(features));
}

// Compute distances to all training samples
var distances = trainingData
.Select(td => (Label: td.Label, Distance: EuclideanDistance(features, td.Features)))
.OrderBy(x => x.Distance)
.Take(k)
.ToList();

// Majority vote
var labelCounts = distances
.GroupBy(x => x.Label)
.Select(g => new { Label = g.Key, Count = g.Count() })
.OrderByDescending(x => x.Count)
.ThenBy(x => x.Label?.GetHashCode() ?? 0)
.ToList();

return labelCounts.First().Label;
}
}
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ find more than one implementation for the same objective but using different alg
* [CollaborativeFiltering](./Algorithms/RecommenderSystem/CollaborativeFiltering)
* [Machine Learning](./Algorithms/MachineLearning)
* [Linear Regression](./Algorithms/MachineLearning/LinearRegression.cs)
* [K-Nearest Neighbors](./Algorithms/MachineLearning/KNearestNeighbors.cs)
* [Logistic Regression](./Algorithms/MachineLearning/LogisticRegression.cs)
* [Searches](./Algorithms/Search)
* [A-Star](./Algorithms/Search/AStar/)
Expand Down