-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnumpy_neural_network.py
128 lines (105 loc) · 3.41 KB
/
numpy_neural_network.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
__author__ = 'maxim'
import sys
import numpy as np
import matplotlib.pyplot as plt
from itertools import product
class NeuralNetwork:
class __Layer:
def __init__(self, args):
self.__epsilon = 5e-5
self.localGrad = 0
self.__weights = np.random.randn(args["previousLayerHeight"], args["height"]) * 0.01
self.__biases = np.zeros((args["biasHeight"], 1))
def __str__(self):
return str(self.__weights)
def forward(self, X):
a = np.dot(X, self.__weights) + self.__biases
self.localGrad = np.dot(X.T, self.__tanhPrime(a))
return self.__tanh(a)
def adjustWeights(self, err):
self.__weights -= (err * self.__epsilon)
def __tanh(self, z):
return np.tanh(z)
def __tanhPrime(self, a):
return 1 - self.__tanh(a) ** 2
def __init__(self, args):
self.__inputDimensions = args["inputDimensions"]
self.__outputDimensions = args["outputDimensions"]
self.__hiddenDimensions = args["hiddenDimensions"]
self.__layers = []
self.__constructLayers()
def __constructLayers(self):
self.__layers.append(
self.__Layer(
{
"biasHeight": self.__inputDimensions[0],
"previousLayerHeight": self.__inputDimensions[1],
"height": self.__hiddenDimensions[0][0] if len(self.__hiddenDimensions) > 0 else self.__outputDimensions[0]
}
)
)
for i in range(len(self.__hiddenDimensions)):
self.__layers.append(
self.__Layer(
{
"biasHeight": self.__hiddenDimensions[i + 1][0] if i + 1 < len(self.__hiddenDimensions) else self.__outputDimensions[0],
"previousLayerHeight": self.__hiddenDimensions[i][0],
"height": self.__hiddenDimensions[i + 1][0] if i + 1 < len(self.__hiddenDimensions) else self.__outputDimensions[0]
}
)
)
def forward(self, X):
out = self.__layers[0].forward(X)
for i in range(len(self.__layers) - 1):
out = self.__layers[i + 1].forward(out)
return out
def train(self, X, Y, loss, epoch=100000):
for i in range(epoch):
YHat = self.forward(X)
delta = -(Y - YHat)
loss.append(sum(Y - YHat))
err = np.sum(np.dot(self.__layers[-1].localGrad, delta.T), axis=1)
err.shape = (self.__hiddenDimensions[-1][0], 1)
self.__layers[-1].adjustWeights(err)
i = 0
for l in reversed(self.__layers[:-1]):
err = np.dot(l.localGrad, err)
l.adjustWeights(err)
i += 1
def printLayers(self):
print("Layers:\n")
for l in self.__layers:
print(l)
print("\n")
def main(args):
X = np.array([[x, y] for x, y in product([0, 1], repeat=2)])
Y = np.array([[0], [1], [1], [1]])
nn = NeuralNetwork(
{
# (height,width)
"inputDimensions": (4, 2),
"outputDimensions": (1, 1),
"hiddenDimensions": [
(6, 1)
]
}
)
print("input:\n\n", X, "\n")
print("expected output:\n\n", Y, "\n")
nn.printLayers()
print("prior to training:\n\n", nn.forward(X), "\n")
loss = []
nn.train(X, Y, loss)
print("post training:\n\n", nn.forward(X), "\n")
nn.printLayers()
fig, ax = plt.subplots()
x = np.array([x for x in range(100000)])
loss = np.array(loss)
ax.plot(x, loss)
ax.set(xlabel="epoch", ylabel="loss", title="logic gate training")
plt.show()
if __name__ == "__main__":
main(sys.argv[1:])