@@ -128,19 +128,20 @@ def activation(self, x, f="sigmoid", der=False):
128128 """
129129 This function process values of layer outputs with activation function.
130130
131- Args:
131+ ** Args:**
132132
133133 * `x` : array to process (1-dimensional array)
134134
135- Kwargs:
135+ ** Kwargs:**
136136
137137 * `f` : activation function
138138
139139 * `der` : normal output, or its derivation (bool)
140140
141- Returns:
141+ ** Returns:**
142142
143143 * values processed with activation function (1-dimensional array)
144+
144145 """
145146 if f == "sigmoid" :
146147 if der :
@@ -155,14 +156,15 @@ def predict(self, x):
155156 """
156157 This function make forward pass through this layer (no update).
157158
158- Args:
159+ ** Args:**
159160
160161 * `x` : input vector (1-dimensional array)
161162
162- Returns:
163+ ** Returns:**
163164
164165 * `y` : output of MLP (float or 1-diemnsional array).
165166 Size depends on number of nodes in this layer.
167+
166168 """
167169 self .x [1 :] = x
168170 self .y = self .activation (np .sum (self .w * self .x , axis = 1 ), f = self .f )
@@ -173,12 +175,12 @@ def update(self, w, e):
173175 This function make update according provided target
174176 and the last used input vector.
175177
176- Args:
178+ ** Args:**
177179
178180 * `d` : target (float or 1-dimensional array).
179181 Size depends on number of MLP outputs.
180182
181- Returns:
183+ ** Returns:**
182184
183185 * `w` : weights of the layers (2-dimensional layer).
184186 Every row represents one node.
@@ -201,7 +203,7 @@ class NetworkMLP():
201203 """
202204 This class represents a Multi-layer Perceptron neural network.
203205
204- Args:
206+ * Args:**
205207
206208 * `layers` : array describing hidden layers of network
207209 (1-dimensional array of integers). Every number in array represents
@@ -212,7 +214,7 @@ class NetworkMLP():
212214
213215 * `n_input` : number of network inputs (int).
214216
215- Kwargs:
217+ ** Kwargs:**
216218
217219 * `outputs` : number of network outputs (int). Default is 1.
218220
@@ -267,7 +269,7 @@ def train(self, x, d, epochs=10, shuffle=False):
267269 """
268270 Function for batch training of MLP.
269271
270- Args:
272+ ** Args:**
271273
272274 * `x` : input array (2-dimensional array).
273275 Every row represents one input vector (features).
@@ -276,15 +278,15 @@ def train(self, x, d, epochs=10, shuffle=False):
276278 Every row represents target for one input vector.
277279 Target can be one or more values (in case of multiple outputs).
278280
279- Kwargs:
281+ ** Kwargs:**
280282
281283 * `epochs` : amount of epochs (int). That means how many times
282284 the MLP will iterate over the passed set of data (`x`, `d`).
283285
284286 * `shuffle` : if true, the order of inputs and outpust are shuffled (bool).
285287 That means the pairs input-output are in different order in every epoch.
286288
287- Returns:
289+ ** Returns:**
288290
289291 * `e`: output vector (m-dimensional array). Every row represents
290292 error (or errors) for an input and output in given epoch.
@@ -293,6 +295,7 @@ def train(self, x, d, epochs=10, shuffle=False):
293295
294296 * `MSE` : mean squared error (1-dimensional array). Every value
295297 stands for MSE of one epoch.
298+
296299 """
297300 # measure the data and check if the dimmension agree
298301 N = len (x )
@@ -335,15 +338,16 @@ def run(self, x):
335338 """
336339 Function for batch usage of already trained and tested MLP.
337340
338- Args:
341+ ** Args:**
339342
340343 * `x` : input array (2-dimensional array).
341344 Every row represents one input vector (features).
342345
343- Returns:
346+ ** Returns:**
344347
345348 * `y`: output vector (n-dimensional array). Every row represents
346349 output (outputs) for an input vector.
350+
347351 """
348352 # measure the data and check if the dimmension agree
349353 try :
@@ -365,7 +369,7 @@ def test(self, x, d):
365369 """
366370 Function for batch test of already trained MLP.
367371
368- Args:
372+ ** Args:**
369373
370374 * `x` : input array (2-dimensional array).
371375 Every row represents one input vector (features).
@@ -374,10 +378,11 @@ def test(self, x, d):
374378 Every row represents target for one input vector.
375379 Target can be one or more values (in case of multiple outputs).
376380
377- Returns:
381+ ** Returns:**
378382
379383 * `e`: output vector (n-dimensional array). Every row represents
380384 error (or errors) for an input and output.
385+
381386 """
382387 # measure the data and check if the dimmension agree
383388 N = len (x )
@@ -410,14 +415,15 @@ def predict(self, x):
410415 """
411416 This function make forward pass through MLP (no update).
412417
413- Args:
418+ ** Args:**
414419
415420 * `x` : input vector (1-dimensional array)
416421
417- Returns:
422+ ** Returns:**
418423
419424 * `y` : output of MLP (float or 1-diemnsional array).
420425 Size depends on number of MLP outputs.
426+
421427 """
422428 # forward pass to hidden layers
423429 for l in self .layers :
@@ -435,15 +441,16 @@ def update(self, d):
435441 This function make update according provided target
436442 and the last used input vector.
437443
438- Args:
444+ ** Args:**
439445
440446 * `d` : target (float or 1-dimensional array).
441447 Size depends on number of MLP outputs.
442448
443- Returns:
449+ ** Returns:**
444450
445451 * `e` : error used for update (float or 1-diemnsional array).
446452 Size correspond to size of input `d`.
453+
447454 """
448455 # update output layer
449456 e = d - self .y
0 commit comments