-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathErrorLayer.py
More file actions
70 lines (58 loc) · 1.87 KB
/
ErrorLayer.py
File metadata and controls
70 lines (58 loc) · 1.87 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
from abc import ABC, abstractmethod
import numpy as np
class LossFunction(ABC):
"""
Abstract base class for all loss functions.
This class defines the required interface for all loss functions.
Subclasses must implement both `forward` and `backward` methods.
Methods
-------
forward(y_true, y_pred)
Compute the scalar loss value.
backward(y_true, y_pred)
Compute the gradient of the loss with respect to the predictions.
"""
@abstractmethod
def forward(self, y_true: np.ndarray, y_pred: np.ndarray) -> float:
"""
Compute the loss value.
Parameters
----------
y_true : np.ndarray
Ground truth target values.
y_pred : np.ndarray
Predicted values from the model.
Returns
-------
float
Scalar loss value.
"""
pass
@abstractmethod
def backward(self, y_true: np.ndarray, y_pred: np.ndarray) -> np.ndarray:
"""
Compute the gradient of the loss with respect to predictions.
Parameters
----------
y_true : np.ndarray
Ground truth target values.
y_pred : np.ndarray
Predicted values from the model.
Returns
-------
np.ndarray
Gradient of the loss with respect to `y_pred`.
"""
pass
# Optional: universal representation for debugging or doc printing
def __repr__(self) -> str:
"""
Return a string representation of the loss function with key attributes.
"""
attrs = []
if hasattr(self, "epsilon"):
attrs.append(f"epsilon={self.epsilon}")
if hasattr(self, "delta"):
attrs.append(f"delta={self.delta}")
params = ", ".join(attrs) if attrs else "no parameters"
return f"{self.__class__.__name__}({params})"