Skip to content

Commit 17d07d0

Browse files
ooplesclaude
andcommitted
fix: systematic meta-learning compilation error fixes
Removed ITask interface and fixed fundamental issues: - Deleted ITask.cs completely (renamed to IMetaLearningTask) - Fixed Convert.ToDouble patterns throughout codebase - Added missing properties to IMetaLearningTask: QuerySetX, QuerySetY, SupportSetX, SupportSetY - Replaced all ITask references with IMetaLearningTask - Removed ALL generic constraints per user requirements Remaining Issues (396 errors): - ILossFunction interface mismatch: CalculateLoss expects Vector<T> but algorithms pass TOutput - Missing Tensor.Flatten() method - Missing ModelMetadata.AdditionalMetadata property 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 5218dd9 commit 17d07d0

14 files changed

+63
-114
lines changed

src/Interfaces/IMetaLearningTask.cs

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,4 +100,36 @@ public interface IMetaLearningTask<T, TInput, TOutput>
100100
/// - Other relevant attributes
101101
/// </remarks>
102102
Dictionary<string, object>? Metadata { get; }
103+
104+
/// <summary>
105+
/// Gets the query set inputs (alias for QueryInput).
106+
/// </summary>
107+
/// <value>
108+
/// Input data for evaluating adaptation performance.
109+
/// </value>
110+
TInput QuerySetX => QueryInput;
111+
112+
/// <summary>
113+
/// Gets the query set outputs (alias for QueryOutput).
114+
/// </summary>
115+
/// <value>
116+
/// True labels for evaluating query set performance.
117+
/// </value>
118+
TOutput QuerySetY => QueryOutput;
119+
120+
/// <summary>
121+
/// Gets the support set inputs (alias for SupportInput).
122+
/// </summary>
123+
/// <value>
124+
/// Input data containing examples for task adaptation.
125+
/// </value>
126+
TInput SupportSetX => SupportInput;
127+
128+
/// <summary>
129+
/// Gets the support set outputs (alias for SupportOutput).
130+
/// </summary>
131+
/// <value>
132+
/// Output data containing labels corresponding to SupportInput.
133+
/// </value>
134+
TOutput SupportSetY => SupportOutput;
103135
}

src/MetaLearning/Algorithms/MAMLAlgorithm.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
143143
/// <param name="model">The model to adapt.</param>
144144
/// <param name="task">The task to adapt to.</param>
145145
/// <returns>The adapted parameters.</returns>
146-
private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, ITask<T, TInput, TOutput> task)
146+
private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, IMetaLearningTask<T, TInput, TOutput> task)
147147
{
148148
var parameters = model.GetParameters();
149149

@@ -167,7 +167,7 @@ private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, ITas
167167
/// <param name="initialParams">The initial parameters before adaptation.</param>
168168
/// <param name="task">The task to compute meta-gradients for.</param>
169169
/// <returns>The meta-gradient vector.</returns>
170-
private Vector<T> ComputeMetaGradients(Vector<T> initialParams, ITask<T, TInput, TOutput> task)
170+
private Vector<T> ComputeMetaGradients(Vector<T> initialParams, IMetaLearningTask<T, TInput, TOutput> task)
171171
{
172172
// Clone meta model
173173
var model = CloneModel();

src/MetaLearning/Algorithms/MANNAlgorithm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
218218
/// <summary>
219219
/// Trains the controller and memory mechanisms on a single episode.
220220
/// </summary>
221-
private T TrainEpisode(ITask<T, TInput, TOutput> task)
221+
private T TrainEpisode(IMetaLearningTask<T, TInput, TOutput> task)
222222
{
223223
T episodeLoss = NumOps.Zero;
224224

src/MetaLearning/Algorithms/MatchingNetworksAlgorithm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
186186
/// </summary>
187187
/// <param name="task">The meta-learning task containing support and query sets.</param>
188188
/// <returns>The episode loss.</returns>
189-
private T TrainEpisode(ITask<T, TInput, TOutput> task)
189+
private T TrainEpisode(IMetaLearningTask<T, TInput, TOutput> task)
190190
{
191191
// Step 1: Combine support and query examples for full context encoding
192192
var allInputs = CombineInputs(task.SupportInput, task.QueryInput);

src/MetaLearning/Algorithms/MetaSGDAlgorithm.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
198198
/// <summary>
199199
/// Trains the model and per-parameter optimizer on a single episode.
200200
/// </summary>
201-
private T TrainEpisode(ITask<T, TInput, TOutput> task)
201+
private T TrainEpisode(IMetaLearningTask<T, TInput, TOutput> task)
202202
{
203203
// Get initial parameters
204204
var initialParams = _model.GetParameters();
@@ -243,7 +243,7 @@ private T TrainEpisode(ITask<T, TInput, TOutput> task)
243243
/// Performs adaptation using the learned per-parameter optimizer.
244244
/// </summary>
245245
private void AdaptWithLearnedOptimizer(
246-
ITask<T, TInput, TOutput> task,
246+
IMetaLearningTask<T, TInput, TOutput> task,
247247
MetaSGDModel<T, TInput, TOutput> adaptedModel)
248248
{
249249
var currentParams = adaptedModel.GetParameters();

src/MetaLearning/Algorithms/ProtoNetsAlgorithm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
202202
/// 5. Compute cross-entropy loss
203203
/// 6. Backpropagate and update encoder
204204
/// </remarks>
205-
private T TrainEpisode(ITask<T, TInput, TOutput> task)
205+
private T TrainEpisode(IMetaLearningTask<T, TInput, TOutput> task)
206206
{
207207
// Step 1: Encode support set examples to feature space
208208
var supportFeatures = EncodeExamples(task.SupportInput);

src/MetaLearning/Algorithms/RelationNetworkAlgorithm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
192192
/// </summary>
193193
/// <param name="task">The meta-learning task containing support and query sets.</param>
194194
/// <returns>The episode loss.</returns>
195-
private T TrainEpisode(ITask<T, TInput, TOutput> task)
195+
private T TrainEpisode(IMetaLearningTask<T, TInput, TOutput> task)
196196
{
197197
// Step 1: Encode support and query examples
198198
var supportFeatures = EncodeExamples(task.SupportInput);

src/MetaLearning/Algorithms/ReptileAlgorithm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
164164
/// <param name="model">The model to adapt.</param>
165165
/// <param name="task">The task to adapt to.</param>
166166
/// <returns>The adapted parameters.</returns>
167-
private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, ITask<T, TInput, TOutput> task)
167+
private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, IMetaLearningTask<T, TInput, TOutput> task)
168168
{
169169
var parameters = model.GetParameters();
170170

src/MetaLearning/Algorithms/SEALAlgorithm.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ private Vector<T> ComputeAdaptiveLearningRates(Vector<T> gradients)
259259
/// </summary>
260260
/// <param name="task">The task to compute meta-gradients for.</param>
261261
/// <returns>The meta-gradient vector.</returns>
262-
private Vector<T> ComputeMetaGradients(ITask<T, TInput, TOutput> task)
262+
private Vector<T> ComputeMetaGradients(IMetaLearningTask<T, TInput, TOutput> task)
263263
{
264264
// Clone meta model for gradient computation
265265
var model = CloneModel();

src/MetaLearning/Algorithms/iMAMLAlgorithm.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ public override IModel<TInput, TOutput, ModelMetadata<T>> Adapt(IMetaLearningTas
150150
/// <param name="model">The model to adapt.</param>
151151
/// <param name="task">The task to adapt to.</param>
152152
/// <returns>The adapted parameters.</returns>
153-
private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, ITask<T, TInput, TOutput> task)
153+
private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, IMetaLearningTask<T, TInput, TOutput> task)
154154
{
155155
var parameters = model.GetParameters();
156156

@@ -178,7 +178,7 @@ private Vector<T> InnerLoopAdaptation(IFullModel<T, TInput, TOutput> model, ITas
178178
private Vector<T> ComputeImplicitMetaGradients(
179179
Vector<T> initialParams,
180180
Vector<T> adaptedParams,
181-
ITask<T, TInput, TOutput> task)
181+
IMetaLearningTask<T, TInput, TOutput> task)
182182
{
183183
// Step 1: Compute gradient of query loss with respect to adapted parameters
184184
var model = CloneModel();

0 commit comments

Comments
 (0)