vanilla_mlp

This commit is contained in:
Ritchie
2017-07-13 11:28:12 +02:00
parent 36ca828e6b
commit 7dfe24aa76
2 changed files with 268 additions and 30 deletions

View File

@@ -177,11 +177,11 @@ class Network:
self.b[i + 1] = np.zeros(dimensions[i + 1])
self.activations[i + 2] = activations[i]
def feed_forward(self, x):
def _feed_forward(self, x):
"""
Execute a forward feed through the network.
:param x: (array) Batch of input data vectors.
:return: Node outputs and activations per layer. The numbering of the output is equivalent to the layer numbers.
:return: (tpl) Node outputs and activations per layer. The numbering of the output is equivalent to the layer numbers.
"""
# w(x) + b
@@ -198,7 +198,7 @@ class Network:
return z, a
def back_prop(self, z, a, y_true):
def _back_prop(self, z, a, y_true):
"""
The input dicts keys represent the layers of the net.
@@ -231,9 +231,9 @@ class Network:
update_params[i - 1] = (dw, delta)
for k, v in update_params.items():
self.update_w_b(k, v[0], v[1])
self._update_w_b(k, v[0], v[1])
def update_w_b(self, index, dw, delta):
def _update_w_b(self, index, dw, delta):
"""
Update weights and biases.
@@ -245,9 +245,14 @@ class Network:
self.w[index] -= self.learning_rate * dw
self.b[index] -= self.learning_rate * np.mean(delta, 0)
def fit(self, x, y_true, loss, epochs, batch_size, learning_rate=2e-2):
def fit(self, x, y_true, loss, epochs, batch_size, learning_rate=1e-3):
"""
:param x: (array) Containing parameters
:param y_true: (array) Containing one hot encoded labels.
:param loss: Loss class (MSE, CrossEntropy etc.)
:param epochs: (int) Number of epochs.
:param batch_size: (int)
:param learning_rate: (flt)
"""
if not x.shape[0] == y_true.shape[0]:
raise ValueError("Length of x and y arrays don't match")
@@ -265,51 +270,37 @@ class Network:
for j in range(x.shape[0] // batch_size):
k = j * batch_size
l = (j + 1) * batch_size
z, a = self.feed_forward(x_[k:l])
self.back_prop(z, a, y_[k:l])
z, a = self._feed_forward(x_[k:l])
self._back_prop(z, a, y_[k:l])
if (i + 1) % 10 == 0:
_, a = self.feed_forward(x)
_, a = self._feed_forward(x)
print("Loss:", self.loss.loss(y_true, a[self.n_layers]))
def predict(self, x):
_, a = self.feed_forward(x)
"""
:param x: (array) Containing parameters
:return: (array) A 2D array of shape (n_cases, n_classes).
"""
_, a = self._feed_forward(x)
return a[self.n_layers]
if __name__ == "__main__":
from sklearn import datasets
import sklearn.metrics
np.random.seed(1)
# # Load data
# data = datasets.load_iris()
# x = data["data"]
# x = (x - x.mean()) / x.std()
# y = data["target"]
# #y = np.expand_dims(data["target"], 1)
#
# # one hot encoding
# y = np.eye(3)[y]
#
# nn = Network((4, 8, 3), (Relu, Sigmoid))
#
# #nn.fit(x[:2], y[:2], MSE, 1, batch_size=2)
# nn.fit(x, y, MSE, 1000, 16)
data = datasets.load_digits()
x = data["data"]
y = data["target"]
y = np.eye(10)[y]
nn = Network((64, 32, 10), (Relu, Sigmoid))
nn.fit(x, y, MSE, 100, 2)
nn = Network((64, 10, 10), (Relu, Sigmoid))
nn.fit(x, y, MSE, 100, 15, learning_rate=1e-3)
y_ = nn.predict(x)
a = np.argmax(y_, 1)
for i in range(a.size):
print(a[i], y[i], "\t", np.round(y_[i], 3))
y_true = []
y_pred = []
for i in range(len(y)):