@@ -406,15 +406,23 @@ TNNetReLU = class(TNNetReLUBase)
406406 procedure Compute (); override;
407407 end ;
408408
409- // / This is an experimental layer - do not use it.
409+ // / This is a leaky ReLU with minimum and maximum values. You can
410+ // scale leakiness via the Leaky parameter.
410411 TNNetReLUL = class (TNNetReLUBase)
411412 private
412413 FScale, FLowLimit, FHighLimit: TNeuralFloat;
413414 public
414- constructor Create(LowLimit, HighLimit: integer); overload;
415+ constructor Create(LowLimit, HighLimit, Leakiness : integer); overload;
415416 procedure Compute (); override;
416417 end ;
417418
419+ // / This is a Relu with low limit = 0 and high limit = 6. You
420+ // can optionally make this activation function leaky.
421+ TNNetReLU6 = class (TNNetReLUL)
422+ public
423+ constructor Create(Leakiness: integer = 0 ); overload;
424+ end ;
425+
418426 // / Scaled Exponential Linear Unit
419427 // https://arxiv.org/pdf/1706.02515.pdf
420428 // You might need to lower your learning rate with SELU.
@@ -1799,6 +1807,12 @@ procedure RebuildNeuronListOnPreviousPatterns
17991807 end ;
18001808end ;
18011809
1810+ { TNNetReLU6 }
1811+ constructor TNNetReLU6.Create(Leakiness: integer);
1812+ begin
1813+ inherited Create(0 , 6 , Leakiness);
1814+ end ;
1815+
18021816{ TNNetSwish }
18031817
18041818procedure TNNetSwish.Compute ();
@@ -2934,14 +2948,15 @@ procedure TNNetMaxPoolPortable.Compute();
29342948
29352949{ TNNetReLUL }
29362950
2937- constructor TNNetReLUL.Create(LowLimit, HighLimit: integer);
2951+ constructor TNNetReLUL.Create(LowLimit, HighLimit, Leakiness : integer);
29382952begin
29392953 inherited Create();
2940- FScale := 0.001 ;
2954+ FScale := 0.001 *Leakiness ;
29412955 FHighLimit := HighLimit;
29422956 FLowLimit := LowLimit;
29432957 FStruct[0 ] := LowLimit;
29442958 FStruct[1 ] := HighLimit;
2959+ FStruct[2 ] := Leakiness;
29452960end ;
29462961
29472962procedure TNNetReLUL.Compute ();
@@ -9656,7 +9671,8 @@ function TNNet.CreateLayer(strData: string): TNNetLayer;
96569671 ' TNNetReLU' : Result := TNNetReLU.Create();
96579672 ' TNNetSwish' : Result := TNNetSwish.Create();
96589673 ' TNNetReLUSqrt' : Result := TNNetReLUSqrt.Create();
9659- ' TNNetReLUL' : Result := TNNetReLUL.Create(St[0 ], St[1 ]);
9674+ ' TNNetReLUL' : Result := TNNetReLUL.Create(St[0 ], St[1 ], St[2 ]);
9675+ ' TNNetReLU6' : Result := TNNetReLU6.Create(St[2 ]);
96609676 ' TNNetPower' : Result := TNNetPower.Create(St[0 ]);
96619677 ' TNNetSELU' : Result := TNNetSELU.Create();
96629678 ' TNNetLeakyReLU' : Result := TNNetLeakyReLU.Create();
@@ -9741,7 +9757,8 @@ function TNNet.CreateLayer(strData: string): TNNetLayer;
97419757 if S[0 ] = ' TNNetReLU' then Result := TNNetReLU.Create() else
97429758 if S[0 ] = ' TNNetSwish' then Result := TNNetSwish.Create() else
97439759 if S[0 ] = ' TNNetReLUSqrt' then Result := TNNetReLUSqrt.Create() else
9744- if S[0 ] = ' TNNetReLUL' then Result := TNNetReLUL.Create(St[0 ], St[1 ]) else
9760+ if S[0 ] = ' TNNetReLUL' then Result := TNNetReLUL.Create(St[0 ], St[1 ], St[2 ]) else
9761+ if S[0 ] = ' TNNetReLU6' then Result := TNNetReLU6.Create(St[2 ]) else
97459762 if S[0 ] = ' TNNetPower' then Result := TNNetPower.Create(St[0 ]) else
97469763 if S[0 ] = ' TNNetSELU' then Result := TNNetSELU.Create() else
97479764 if S[0 ] = ' TNNetLeakyReLU' then Result := TNNetLeakyReLU.Create() else
0 commit comments