|
| 1 | +"""Neural Network Module""" |
| 2 | + |
1 | 3 | import numpy as np
|
2 | 4 | from ..utils.features import prepare_for_training
|
3 | 5 | from ..utils.hypothesis import sigmoid, sigmoid_gradient
|
@@ -29,6 +31,8 @@ def __init__(self, data, labels, layers, epsilon, normalize_data=False):
|
29 | 31 | self.thetas = MultilayerPerceptron.thetas_init(layers, epsilon)
|
30 | 32 |
|
31 | 33 | def train(self, regularization_param=0, max_iterations=1000, alpha=1):
|
| 34 | + """Train the model""" |
| 35 | + |
32 | 36 | # Flatten model thetas for gradient descent.
|
33 | 37 | unrolled_thetas = MultilayerPerceptron.thetas_unroll(self.thetas)
|
34 | 38 |
|
@@ -65,7 +69,7 @@ def predict(self, data):
|
65 | 69 |
|
66 | 70 | @staticmethod
|
67 | 71 | def gradient_descent(
|
68 |
| - data, labels, unrolled_theta, layers, regularization_param, max_iteration, alpha |
| 72 | + data, labels, unrolled_theta, layers, regularization_param, max_iteration, alpha |
69 | 73 | ):
|
70 | 74 | """Gradient descent function.
|
71 | 75 |
|
@@ -187,6 +191,8 @@ def cost_function(data, labels, thetas, layers, regularization_param):
|
187 | 191 |
|
188 | 192 | @staticmethod
|
189 | 193 | def feedforward_propagation(data, thetas, layers):
|
| 194 | + """Feedforward propagation function""" |
| 195 | + |
190 | 196 | # Calculate the total number of layers.
|
191 | 197 | num_layers = len(layers)
|
192 | 198 |
|
|
0 commit comments