@@ -16,6 +16,16 @@ public protocol Training {
1616
1717extension Training {
1818
19+
20+ /**
21+ The train method trains your Neural Network object. WARNING: Use this method only for Perceptron and Adaline architectures.
22+ The Backpropagation class has it's own train method.
23+
24+ - parameter fncType: ActivationFunctionType enum case
25+ - parameter value: A Float
26+
27+ - returns: A Float
28+ */
1929 public mutating func train( network: NeuralNet ) -> NeuralNet {
2030
2131 var weightsComingIn : ValueArray < Float > ! = ValueArray < Float > ( )
@@ -115,7 +125,14 @@ extension Training {
115125
116126
117127
128+ /**
129+ The activationFunc method returns the appropriate output based on the function that is specified.
118130
131+ - parameter fncType: ActivationFunctionType enum case
132+ - parameter value: A Float
133+
134+ - returns: A Float
135+ */
119136 public func activationFunc( fncType: ActivationFunctionType , value: Float ) throws -> Float {
120137
121138 switch fncType {
@@ -132,6 +149,14 @@ extension Training {
132149 }
133150 }
134151
152+ /**
153+ The derivativeFunc method returns the appropriate output based on the derivative of a function that is specified.
154+
155+ - parameter fncType: ActivationFunctionType enum case
156+ - parameter value: A Float
157+
158+ - returns: A Float
159+ */
135160 public func derivativeFunc( fncType: ActivationFunctionType , value: Float ) throws -> Float {
136161
137162 switch fncType {
@@ -257,6 +282,15 @@ extension Training {
257282
258283 }
259284
285+
286+
287+ /**
288+ The printTrainedNetwork method prints the results of a trained Neural Network object.
289+
290+ - parameter trainedNetwork: A trained Neural Network Object.
291+ - parameter singleLayer: Boolean to indicate whether or not your Neural Network has multiple layers.
292+
293+ */
260294 public func printTrainedNetwork( trainedNetwork: NeuralNet , singleLayer: Bool ) {
261295
262296 if singleLayer {
0 commit comments