Skip to content

Commit 614d8ab

Browse files
committed
Fix divide-by-zero vulnerability in NestedLearner
Addresses critical divide-by-zero errors in Train and AdaptToNewTask methods when processing empty datasets. ## Issue Both methods called _numOps.Divide(..., _numOps.FromDouble(dataList.Count)) without checking if dataList.Count == 0, causing runtime divide-by-zero errors. ## Locations Fixed 1. Train method (line 164): Computing average loss over training data 2. AdaptToNewTask method (line 228): Computing average new task loss ## Solution Added empty dataset guards immediately after building dataList: **Train method:** - Returns MetaTrainingResult with: - FinalMetaLoss = Zero - FinalTaskLoss = Zero - FinalAccuracy = Zero - TotalIterations = current _globalStep - TotalTimeMs = elapsed time from stopwatch - Converged = false **AdaptToNewTask method:** - Returns MetaAdaptationResult with: - NewTaskLoss = Zero - ForgettingMetric = Zero - AdaptationSteps = 0 - AdaptationTimeMs = elapsed time from stopwatch ## Behavior - Preserves stopwatch timing (starts, stops, records elapsed time) - Returns sensible default values for empty datasets - No divide operations executed when count is zero - Maintains method contracts and return types - Does not throw exceptions for empty input (graceful handling) ## Impact - ✅ Prevents runtime divide-by-zero errors - ✅ Gracefully handles edge case of empty datasets - ✅ Maintains timing accuracy - ✅ Returns semantically correct results (zero loss for no data)
1 parent a34e212 commit 614d8ab

File tree

1 file changed

+29
-0
lines changed

1 file changed

+29
-0
lines changed

src/NestedLearning/NestedLearner.cs

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,21 @@ public MetaTrainingResult<T> Train(
145145
var stopwatch = Stopwatch.StartNew();
146146
var dataList = trainingData.ToList();
147147

148+
// Guard against empty dataset
149+
if (dataList.Count == 0)
150+
{
151+
stopwatch.Stop();
152+
return new MetaTrainingResult<T>
153+
{
154+
FinalMetaLoss = _numOps.Zero,
155+
FinalTaskLoss = _numOps.Zero,
156+
FinalAccuracy = _numOps.Zero,
157+
TotalIterations = _globalStep,
158+
TotalTimeMs = stopwatch.Elapsed.TotalMilliseconds,
159+
Converged = false
160+
};
161+
}
162+
148163
T previousLoss = _numOps.FromDouble(double.MaxValue);
149164
int iterationsWithoutImprovement = 0;
150165
const int patience = 50;
@@ -205,6 +220,20 @@ public MetaAdaptationResult<T> AdaptToNewTask(
205220
_previousTaskParameters = _model.GetParameters().Clone();
206221

207222
var dataList = newTaskData.ToList();
223+
224+
// Guard against empty dataset
225+
if (dataList.Count == 0)
226+
{
227+
startTime.Stop();
228+
return new MetaAdaptationResult<T>
229+
{
230+
NewTaskLoss = _numOps.Zero,
231+
ForgettingMetric = _numOps.Zero,
232+
AdaptationSteps = 0,
233+
AdaptationTimeMs = startTime.Elapsed.TotalMilliseconds
234+
};
235+
}
236+
208237
T newTaskLoss = _numOps.Zero;
209238
int adaptationSteps = 0;
210239

0 commit comments

Comments
 (0)