diff --git a/tests/AiDotNet.Tests/UnitTests/FitDetectors/AdaptiveFitDetectorTests.cs b/tests/AiDotNet.Tests/UnitTests/FitDetectors/AdaptiveFitDetectorTests.cs
new file mode 100644
index 000000000..89f25554a
--- /dev/null
+++ b/tests/AiDotNet.Tests/UnitTests/FitDetectors/AdaptiveFitDetectorTests.cs
@@ -0,0 +1,508 @@
+using AiDotNet.Enums;
+using AiDotNet.FitDetectors;
+using AiDotNet.LinearAlgebra;
+using AiDotNet.Models;
+using AiDotNet.Models.Options;
+using AiDotNet.Statistics;
+using Xunit;
+
+namespace AiDotNetTests.UnitTests.FitDetectors
+{
+ ///
+ /// Unit tests for the AdaptiveFitDetector class.
+ ///
+ public class AdaptiveFitDetectorTests
+ {
+ private static ModelEvaluationData, Vector> CreateTestEvaluationData(
+ double trainingVariance, double validationVariance, double testVariance,
+ double trainingR2, double validationR2, double testR2)
+ {
+ var trainingBasicStats = BasicStats.Empty();
+ var validationBasicStats = BasicStats.Empty();
+ var testBasicStats = BasicStats.Empty();
+
+ var trainingPredStats = PredictionStats.Empty();
+ var validationPredStats = PredictionStats.Empty();
+ var testPredStats = PredictionStats.Empty();
+
+ // Use reflection to set variance values
+ var varianceProperty = typeof(BasicStats).GetProperty("Variance");
+ varianceProperty?.SetValue(trainingBasicStats, trainingVariance);
+ varianceProperty?.SetValue(validationBasicStats, validationVariance);
+ varianceProperty?.SetValue(testBasicStats, testVariance);
+
+ // Use reflection to set R2 values
+ var r2Property = typeof(PredictionStats).GetProperty("R2");
+ r2Property?.SetValue(trainingPredStats, trainingR2);
+ r2Property?.SetValue(validationPredStats, validationR2);
+ r2Property?.SetValue(testPredStats, testR2);
+
+ return new ModelEvaluationData, Vector>
+ {
+ TrainingSet = new DataSetStats, Vector>
+ {
+ ActualBasicStats = trainingBasicStats,
+ PredictionStats = trainingPredStats,
+ ErrorStats = ErrorStats.Empty()
+ },
+ ValidationSet = new DataSetStats, Vector>
+ {
+ ActualBasicStats = validationBasicStats,
+ PredictionStats = validationPredStats,
+ ErrorStats = ErrorStats.Empty()
+ },
+ TestSet = new DataSetStats, Vector>
+ {
+ ActualBasicStats = testBasicStats,
+ PredictionStats = testPredStats,
+ ErrorStats = ErrorStats.Empty()
+ }
+ };
+ }
+
+ [Fact]
+ public void Constructor_WithDefaultOptions_InitializesSuccessfully()
+ {
+ // Arrange & Act
+ var detector = new AdaptiveFitDetector, Vector>();
+
+ // Assert
+ Assert.NotNull(detector);
+ }
+
+ [Fact]
+ public void Constructor_WithCustomOptions_InitializesSuccessfully()
+ {
+ // Arrange
+ var options = new AdaptiveFitDetectorOptions
+ {
+ ComplexityThreshold = 10.0,
+ PerformanceThreshold = 0.7
+ };
+
+ // Act
+ var detector = new AdaptiveFitDetector, Vector>(options);
+
+ // Assert
+ Assert.NotNull(detector);
+ }
+
+ [Fact]
+ public void DetectFit_WithSimpleDataAndGoodPerformance_UsesResidualAnalyzer()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 1.0, // Low variance = simple data
+ validationVariance: 1.1,
+ testVariance: 1.05,
+ trainingR2: 0.9, // High R2 = good performance
+ validationR2: 0.88,
+ testR2: 0.89
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Residual Analysis Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_WithModerateComplexity_UsesLearningCurveDetector()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 8.0, // Moderate variance
+ validationVariance: 8.5,
+ testVariance: 8.2,
+ trainingR2: 0.7, // Moderate performance
+ validationR2: 0.68,
+ testR2: 0.69
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Learning Curve Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_WithComplexDataAndPoorPerformance_UsesHybridDetector()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 20.0, // High variance = complex data
+ validationVariance: 21.0,
+ testVariance: 20.5,
+ trainingR2: 0.4, // Poor performance
+ validationR2: 0.38,
+ testR2: 0.39
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Hybrid Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsValidFitType()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ 5.0, 5.5, 5.2,
+ 0.75, 0.73, 0.74
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.True(System.Enum.IsDefined(typeof(FitType), result.FitType));
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsConfidenceLevel()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ 5.0, 5.5, 5.2,
+ 0.75, 0.73, 0.74
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.ConfidenceLevel);
+ Assert.True(result.ConfidenceLevel >= 0.0);
+ Assert.True(result.ConfidenceLevel <= 1.0);
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsNonEmptyRecommendations()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ 5.0, 5.5, 5.2,
+ 0.75, 0.73, 0.74
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.NotEmpty(result.Recommendations);
+ }
+
+ [Fact]
+ public void DetectFit_IncludesDataComplexityInRecommendations()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ 5.0, 5.5, 5.2,
+ 0.75, 0.73, 0.74
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r =>
+ r.Contains("data complexity") ||
+ r.Contains("Simple") ||
+ r.Contains("Moderate") ||
+ r.Contains("Complex"));
+ }
+
+ [Fact]
+ public void DetectFit_IncludesModelPerformanceInRecommendations()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ 5.0, 5.5, 5.2,
+ 0.75, 0.73, 0.74
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r =>
+ r.Contains("model performance") ||
+ r.Contains("Good") ||
+ r.Contains("Moderate") ||
+ r.Contains("Poor"));
+ }
+
+ [Fact]
+ public void DetectFit_WithCustomComplexityThreshold_UsesCustomThreshold()
+ {
+ // Arrange
+ var options = new AdaptiveFitDetectorOptions
+ {
+ ComplexityThreshold = 2.0 // Lower threshold
+ };
+ var detector = new AdaptiveFitDetector, Vector>(options);
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 3.0, // Would be simple with default, moderate with custom
+ validationVariance: 3.2,
+ testVariance: 3.1,
+ trainingR2: 0.85,
+ validationR2: 0.83,
+ testR2: 0.84
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ // With lower complexity threshold, this should not use Residual Analyzer
+ Assert.NotNull(result.Recommendations);
+ }
+
+ [Fact]
+ public void DetectFit_WithCustomPerformanceThreshold_UsesCustomThreshold()
+ {
+ // Arrange
+ var options = new AdaptiveFitDetectorOptions
+ {
+ PerformanceThreshold = 0.95 // Very high threshold
+ };
+ var detector = new AdaptiveFitDetector, Vector>(options);
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 1.0,
+ validationVariance: 1.1,
+ testVariance: 1.05,
+ trainingR2: 0.85, // Good but below custom threshold
+ validationR2: 0.83,
+ testR2: 0.84
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ // With higher performance threshold, even 0.85 R2 won't be "good"
+ Assert.NotNull(result.Recommendations);
+ }
+
+ [Fact]
+ public void DetectFit_WithVeryLowVariance_IdentifiesAsSimple()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 0.1, // Very low variance
+ validationVariance: 0.12,
+ testVariance: 0.11,
+ trainingR2: 0.9,
+ validationR2: 0.88,
+ testR2: 0.89
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Residual Analysis Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_WithVeryHighVariance_IdentifiesAsComplex()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 50.0, // Very high variance
+ validationVariance: 51.0,
+ testVariance: 50.5,
+ trainingR2: 0.5,
+ validationR2: 0.48,
+ testR2: 0.49
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Hybrid Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_WithHighR2_IdentifiesAsGoodPerformance()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 1.0,
+ validationVariance: 1.1,
+ testVariance: 1.05,
+ trainingR2: 0.95, // Very high R2
+ validationR2: 0.94,
+ testR2: 0.945
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Residual Analysis Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_WithLowR2_IdentifiesAsPoorPerformance()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 20.0,
+ validationVariance: 21.0,
+ testVariance: 20.5,
+ trainingR2: 0.2, // Very low R2
+ validationR2: 0.18,
+ testR2: 0.19
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("Hybrid Detector"));
+ }
+
+ [Fact]
+ public void DetectFit_ProvidesTailoredRecommendations()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ trainingVariance: 25.0, // Complex data
+ validationVariance: 26.0,
+ testVariance: 25.5,
+ trainingR2: 0.3, // Poor performance
+ validationR2: 0.28,
+ testR2: 0.29
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r =>
+ r.Contains("advanced modeling") ||
+ r.Contains("feature engineering") ||
+ r.Contains("complex data"));
+ }
+
+ [Fact]
+ public void DetectFit_WithFloatType_WorksCorrectly()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+
+ var trainingBasicStats = BasicStats.Empty();
+ var validationBasicStats = BasicStats.Empty();
+ var testBasicStats = BasicStats.Empty();
+ var trainingPredStats = PredictionStats.Empty();
+ var validationPredStats = PredictionStats.Empty();
+ var testPredStats = PredictionStats.Empty();
+
+ var varianceProperty = typeof(BasicStats).GetProperty("Variance");
+ varianceProperty?.SetValue(trainingBasicStats, 5.0f);
+ varianceProperty?.SetValue(validationBasicStats, 5.5f);
+ varianceProperty?.SetValue(testBasicStats, 5.2f);
+
+ var r2Property = typeof(PredictionStats).GetProperty("R2");
+ r2Property?.SetValue(trainingPredStats, 0.75f);
+ r2Property?.SetValue(validationPredStats, 0.73f);
+ r2Property?.SetValue(testPredStats, 0.74f);
+
+ var evaluationData = new ModelEvaluationData, Vector>
+ {
+ TrainingSet = new DataSetStats, Vector>
+ {
+ ActualBasicStats = trainingBasicStats,
+ PredictionStats = trainingPredStats,
+ ErrorStats = ErrorStats.Empty()
+ },
+ ValidationSet = new DataSetStats, Vector>
+ {
+ ActualBasicStats = validationBasicStats,
+ PredictionStats = validationPredStats,
+ ErrorStats = ErrorStats.Empty()
+ },
+ TestSet = new DataSetStats, Vector>
+ {
+ ActualBasicStats = testBasicStats,
+ PredictionStats = testPredStats,
+ ErrorStats = ErrorStats.Empty()
+ }
+ };
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.FitType);
+ }
+
+ [Fact]
+ public void DetectFit_AdaptsToDataCharacteristics()
+ {
+ // Arrange
+ var detector = new AdaptiveFitDetector, Vector>();
+
+ // Test with three different scenarios
+ var simpleData = CreateTestEvaluationData(1.0, 1.1, 1.05, 0.9, 0.88, 0.89);
+ var moderateData = CreateTestEvaluationData(8.0, 8.5, 8.2, 0.7, 0.68, 0.69);
+ var complexData = CreateTestEvaluationData(20.0, 21.0, 20.5, 0.4, 0.38, 0.39);
+
+ // Act
+ var simpleResult = detector.DetectFit(simpleData);
+ var moderateResult = detector.DetectFit(moderateData);
+ var complexResult = detector.DetectFit(complexData);
+
+ // Assert - Each should use a different detector
+ Assert.NotNull(simpleResult);
+ Assert.NotNull(moderateResult);
+ Assert.NotNull(complexResult);
+
+ Assert.Contains(simpleResult.Recommendations, r => r.Contains("Residual Analysis"));
+ Assert.Contains(moderateResult.Recommendations, r => r.Contains("Learning Curve"));
+ Assert.Contains(complexResult.Recommendations, r => r.Contains("Hybrid"));
+ }
+ }
+}
diff --git a/tests/AiDotNet.Tests/UnitTests/FitDetectors/BayesianFitDetectorTests.cs b/tests/AiDotNet.Tests/UnitTests/FitDetectors/BayesianFitDetectorTests.cs
new file mode 100644
index 000000000..b31319047
--- /dev/null
+++ b/tests/AiDotNet.Tests/UnitTests/FitDetectors/BayesianFitDetectorTests.cs
@@ -0,0 +1,432 @@
+using AiDotNet.Enums;
+using AiDotNet.FitDetectors;
+using AiDotNet.LinearAlgebra;
+using AiDotNet.Models;
+using AiDotNet.Models.Options;
+using AiDotNet.Statistics;
+using Xunit;
+
+namespace AiDotNetTests.UnitTests.FitDetectors
+{
+ ///
+ /// Unit tests for the BayesianFitDetector class.
+ ///
+ public class BayesianFitDetectorTests
+ {
+ private static ModelEvaluationData, Vector> CreateTestEvaluationData(
+ double dic, double waic, double loo, double posteriorCheck, double bayesFactor)
+ {
+ var modelStats = ModelStats, Vector>.Empty();
+
+ // Use reflection to set the internal calculated values for testing
+ var dicField = typeof(ModelStats, Vector>).GetField("_dic",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var waicField = typeof(ModelStats, Vector>).GetField("_waic",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var looField = typeof(ModelStats, Vector>).GetField("_loo",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var posteriorField = typeof(ModelStats, Vector>).GetField("_posteriorPredictiveCheck",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var bayesFactorField = typeof(ModelStats, Vector>).GetField("_bayesFactor",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+
+ if (dicField == null || waicField == null || looField == null || posteriorField == null || bayesFactorField == null)
+ {
+ throw new InvalidOperationException("One or more private fields not found on ModelStats. This test needs to be updated to match the current implementation.");
+ }
+
+ dicField.SetValue(modelStats, dic);
+ waicField.SetValue(modelStats, waic);
+ looField.SetValue(modelStats, loo);
+ posteriorField.SetValue(modelStats, posteriorCheck);
+ bayesFactorField.SetValue(modelStats, bayesFactor);
+
+ return new ModelEvaluationData, Vector>
+ {
+ ModelStats = modelStats,
+ TrainingSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty()
+ },
+ ValidationSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty()
+ },
+ TestSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty()
+ }
+ };
+ }
+
+ [Fact]
+ public void Constructor_WithDefaultOptions_InitializesSuccessfully()
+ {
+ // Arrange & Act
+ var detector = new BayesianFitDetector, Vector>();
+
+ // Assert
+ Assert.NotNull(detector);
+ }
+
+ [Fact]
+ public void Constructor_WithCustomOptions_InitializesSuccessfully()
+ {
+ // Arrange
+ var options = new BayesianFitDetectorOptions
+ {
+ GoodFitThreshold = 3.0,
+ OverfitThreshold = 8.0,
+ UnderfitThreshold = 1.5
+ };
+
+ // Act
+ var detector = new BayesianFitDetector, Vector>(options);
+
+ // Assert
+ Assert.NotNull(detector);
+ }
+
+ [Fact]
+ public void DetectFit_WithGoodFitMetrics_ReturnsGoodFit()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 3.0, // < GoodFitThreshold (5.0)
+ waic: 4.0, // < GoodFitThreshold (5.0)
+ loo: 3.5, // < GoodFitThreshold (5.0)
+ posteriorCheck: 0.8,
+ bayesFactor: 0.9
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(FitType.GoodFit, result.FitType);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("good fit"));
+ }
+
+ [Fact]
+ public void DetectFit_WithOverfitMetrics_ReturnsOverfit()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 12.0, // > OverfitThreshold (10.0)
+ waic: 8.0,
+ loo: 7.0,
+ posteriorCheck: 0.5,
+ bayesFactor: 0.4
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(FitType.Overfit, result.FitType);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("overfitting"));
+ }
+
+ [Fact]
+ public void DetectFit_WithUnderfitMetrics_ReturnsUnderfit()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 1.0, // < UnderfitThreshold (2.0)
+ waic: 1.5, // < UnderfitThreshold (2.0)
+ loo: 1.2, // < UnderfitThreshold (2.0)
+ posteriorCheck: 0.3,
+ bayesFactor: 0.2
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(FitType.Underfit, result.FitType);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("underfitting"));
+ }
+
+ [Fact]
+ public void DetectFit_WithUnstableMetrics_ReturnsUnstable()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 6.0, // Between thresholds but inconsistent
+ waic: 3.0,
+ loo: 11.0, // Mixed signals
+ posteriorCheck: 0.5,
+ bayesFactor: 0.5
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(FitType.Unstable, result.FitType);
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("unstable"));
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsConfidenceLevel()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 3.0,
+ waic: 4.0,
+ loo: 3.5,
+ posteriorCheck: 0.8,
+ bayesFactor: 0.9
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.ConfidenceLevel);
+ Assert.True(result.ConfidenceLevel >= 0.0);
+ Assert.True(result.ConfidenceLevel <= 1.0);
+ }
+
+ [Fact]
+ public void DetectFit_IncludesBayesianMetricsInRecommendations()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 3.0,
+ waic: 4.0,
+ loo: 3.5,
+ posteriorCheck: 0.8,
+ bayesFactor: 0.9
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.Contains(result.Recommendations, r => r.Contains("DIC:"));
+ Assert.Contains(result.Recommendations, r => r.Contains("WAIC:"));
+ Assert.Contains(result.Recommendations, r => r.Contains("LOO:"));
+ Assert.Contains(result.Recommendations, r => r.Contains("Posterior Predictive Check:"));
+ Assert.Contains(result.Recommendations, r => r.Contains("Bayes Factor:"));
+ }
+
+ [Fact]
+ public void DetectFit_WithCustomThresholds_UsesCustomThresholds()
+ {
+ // Arrange
+ var options = new BayesianFitDetectorOptions
+ {
+ GoodFitThreshold = 6.0, // Higher threshold
+ OverfitThreshold = 12.0,
+ UnderfitThreshold = 3.0
+ };
+ var detector = new BayesianFitDetector, Vector>(options);
+ var evaluationData = CreateTestEvaluationData(
+ dic: 5.5, // Would be above default GoodFitThreshold (5.0) but below custom (6.0)
+ waic: 5.8,
+ loo: 5.2,
+ posteriorCheck: 0.7,
+ bayesFactor: 0.8
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(FitType.GoodFit, result.FitType);
+ }
+
+ [Fact]
+ public void DetectFit_OverfitRecommendations_ContainsPriorAdvice()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 15.0,
+ waic: 12.0,
+ loo: 11.0,
+ posteriorCheck: 0.3,
+ bayesFactor: 0.2
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.Equal(FitType.Overfit, result.FitType);
+ Assert.Contains(result.Recommendations, r => r.Contains("priors") || r.Contains("prior"));
+ }
+
+ [Fact]
+ public void DetectFit_UnderfitRecommendations_ContainsComplexityAdvice()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 0.5,
+ waic: 0.8,
+ loo: 0.6,
+ posteriorCheck: 0.2,
+ bayesFactor: 0.1
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.Equal(FitType.Underfit, result.FitType);
+ Assert.Contains(result.Recommendations, r => r.Contains("complexity") || r.Contains("features"));
+ }
+
+ [Fact]
+ public void DetectFit_UnstableRecommendations_ContainsMCMCAdvice()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 7.0,
+ waic: 3.0,
+ loo: 12.0,
+ posteriorCheck: 0.4,
+ bayesFactor: 0.3
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.Equal(FitType.Unstable, result.FitType);
+ Assert.Contains(result.Recommendations, r => r.Contains("MCMC") || r.Contains("convergence") || r.Contains("multimodality"));
+ }
+
+ [Fact]
+ public void DetectFit_WithHighConfidenceMetrics_ReturnsHighConfidence()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 3.0,
+ waic: 4.0,
+ loo: 3.5,
+ posteriorCheck: 0.95, // High
+ bayesFactor: 0.98 // High
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.ConfidenceLevel);
+ Assert.True(result.ConfidenceLevel >= 0.8);
+ }
+
+ [Fact]
+ public void DetectFit_WithLowConfidenceMetrics_ReturnsLowConfidence()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(
+ dic: 3.0,
+ waic: 4.0,
+ loo: 3.5,
+ posteriorCheck: 0.1, // Low
+ bayesFactor: 0.05 // Low
+ );
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.ConfidenceLevel);
+ Assert.True(result.ConfidenceLevel <= 0.2);
+ }
+
+ [Fact]
+ public void DetectFit_WithFloatType_WorksCorrectly()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var modelStats = ModelStats, Vector>.Empty();
+
+ var dicField = typeof(ModelStats, Vector>).GetField("_dic",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var waicField = typeof(ModelStats, Vector>).GetField("_waic",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var looField = typeof(ModelStats, Vector>).GetField("_loo",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var posteriorField = typeof(ModelStats, Vector>).GetField("_posteriorPredictiveCheck",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+ var bayesFactorField = typeof(ModelStats, Vector>).GetField("_bayesFactor",
+ System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
+
+ dicField?.SetValue(modelStats, 3.0f);
+ waicField?.SetValue(modelStats, 4.0f);
+ looField?.SetValue(modelStats, 3.5f);
+ posteriorField?.SetValue(modelStats, 0.8f);
+ bayesFactorField?.SetValue(modelStats, 0.9f);
+
+ var evaluationData = new ModelEvaluationData, Vector>
+ {
+ ModelStats = modelStats,
+ TrainingSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty()
+ },
+ ValidationSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty()
+ },
+ TestSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty()
+ }
+ };
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Equal(FitType.GoodFit, result.FitType);
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsNonEmptyRecommendations()
+ {
+ // Arrange
+ var detector = new BayesianFitDetector, Vector>();
+ var evaluationData = CreateTestEvaluationData(3.0, 4.0, 3.5, 0.7, 0.8);
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.NotEmpty(result.Recommendations);
+ Assert.True(result.Recommendations.Count > 5); // At least fit advice + 5 metrics
+ }
+ }
+}
diff --git a/tests/AiDotNet.Tests/UnitTests/FitDetectors/HybridFitDetectorTests.cs b/tests/AiDotNet.Tests/UnitTests/FitDetectors/HybridFitDetectorTests.cs
new file mode 100644
index 000000000..cf4be62a1
--- /dev/null
+++ b/tests/AiDotNet.Tests/UnitTests/FitDetectors/HybridFitDetectorTests.cs
@@ -0,0 +1,461 @@
+using AiDotNet.Enums;
+using AiDotNet.FitDetectors;
+using AiDotNet.LinearAlgebra;
+using AiDotNet.Models;
+using AiDotNet.Models.Options;
+using AiDotNet.Statistics;
+using Xunit;
+
+namespace AiDotNetTests.UnitTests.FitDetectors
+{
+ ///
+ /// Unit tests for the HybridFitDetector class.
+ ///
+ public class HybridFitDetectorTests
+ {
+ private static ModelEvaluationData, Vector> CreateTestEvaluationData()
+ {
+ return new ModelEvaluationData, Vector>
+ {
+ TrainingSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty(),
+ ActualBasicStats = BasicStats.Empty(),
+ PredictedBasicStats = BasicStats.Empty()
+ },
+ ValidationSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty(),
+ ActualBasicStats = BasicStats.Empty(),
+ PredictedBasicStats = BasicStats.Empty()
+ },
+ TestSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty(),
+ ActualBasicStats = BasicStats.Empty(),
+ PredictedBasicStats = BasicStats.Empty()
+ }
+ };
+ }
+
+ [Fact]
+ public void Constructor_WithRequiredDetectors_InitializesSuccessfully()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+
+ // Act
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+
+ // Assert
+ Assert.NotNull(detector);
+ }
+
+ [Fact]
+ public void Constructor_WithCustomOptions_InitializesSuccessfully()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var options = new HybridFitDetectorOptions();
+
+ // Act
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector,
+ options
+ );
+
+ // Assert
+ Assert.NotNull(detector);
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsValidResult()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.FitType);
+ Assert.True(System.Enum.IsDefined(typeof(FitType), result.FitType));
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsConfidenceLevel()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.ConfidenceLevel);
+ Assert.True(result.ConfidenceLevel >= 0.0);
+ Assert.True(result.ConfidenceLevel <= 1.0);
+ }
+
+ [Fact]
+ public void DetectFit_CombinesRecommendationsFromBothDetectors()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.Recommendations);
+ Assert.NotEmpty(result.Recommendations);
+ // Should have recommendations from both detectors
+ Assert.True(result.Recommendations.Count >= 2);
+ }
+
+ [Fact]
+ public void DetectFit_ProducesCombinedFitType()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+ var residualResult = residualAnalyzer.DetectFit(evaluationData);
+ var learningResult = learningCurveDetector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ // The hybrid result should consider both inputs
+ // (The exact combination depends on the internal logic)
+ Assert.True(
+ result.FitType == residualResult.FitType ||
+ result.FitType == learningResult.FitType ||
+ // Or it could be a combined assessment
+ System.Enum.IsDefined(typeof(FitType), result.FitType)
+ );
+ }
+
+ [Fact]
+ public void DetectFit_WithSimilarConfidenceLevels_AveragesConfidence()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+ var residualResult = residualAnalyzer.DetectFit(evaluationData);
+ var learningResult = learningCurveDetector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result.ConfidenceLevel);
+ Assert.NotNull(residualResult.ConfidenceLevel);
+ Assert.NotNull(learningResult.ConfidenceLevel);
+
+ // The combined confidence should be related to the individual confidences
+ // (exact formula depends on implementation, but should be reasonable)
+ Assert.True(result.ConfidenceLevel >= 0.0);
+ Assert.True(result.ConfidenceLevel <= 1.0);
+ }
+
+ [Fact]
+ public void DetectFit_ReturnsNonNullRecommendations()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotNull(result.Recommendations);
+ }
+
+ [Fact]
+ public void DetectFit_HandlesConsistentDetectorResults()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = CreateTestEvaluationData();
+
+ // Act
+ var result = detector.DetectFit(evaluationData);
+
+ // Assert
+ // When both detectors agree, hybrid should respect that
+ Assert.NotNull(result);
+ Assert.True(System.Enum.IsDefined(typeof(FitType), result.FitType));
+ }
+
+ [Fact]
+ public void DetectFit_WithFloatType_WorksCorrectly()
+ {
+ // Arrange
+ var residualAnalyzer = new ResidualAnalysisFitDetector, Vector>();
+ var learningCurveDetector = new LearningCurveFitDetector, Vector>();
+ var detector = new HybridFitDetector, Vector>(
+ residualAnalyzer,
+ learningCurveDetector
+ );
+ var evaluationData = new ModelEvaluationData, Vector>
+ {
+ TrainingSet = new DataSetStats, Vector>
+ {
+ ErrorStats = ErrorStats.Empty(),
+ PredictionStats = PredictionStats.Empty(),
+ ActualBasicStats = BasicStats.Empty(),
+ PredictedBasicStats = BasicStats.Empty()
+ },
+ ValidationSet = new DataSetStats