-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathiris_demo.py
72 lines (51 loc) · 1.8 KB
/
iris_demo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import numpy as np
from sklearn.metrics import confusion_matrix
from analysis.confustion_matrix import plot_confusion_matrix
from analysis.one_hot_encoder import indices_to_one_hot
from network.network import MultiLayerNetwork
from network.preprocessor import Preprocessor
from network.trainer import Trainer
def main():
class_labels = ['Virginica', 'Versicolor', 'Setosa']
dat = np.loadtxt(
"dataset/iris/iris.data",
delimiter=',',
)
np.random.shuffle(dat)
# Take first 5 columns as the input X
x = dat[:, :4]
# convert the label [0,1,2] representation to one-hot encoding
y_labels = dat[:, 4:].astype(int)
y = indices_to_one_hot(y_labels, len(class_labels))
split_idx = int(0.5 * len(x))
x_train = x[:split_idx]
y_train = y[:split_idx]
x_val = x[split_idx:]
y_val = y[split_idx:]
prep_input = Preprocessor(x_train)
x_train_pre = prep_input.apply(x_train)
x_val_pre = prep_input.apply(x_val)
input_dim = 4
neurons = [16, 3]
activations = ["relu", "identity"]
net = MultiLayerNetwork(input_dim, neurons, activations)
trainer = Trainer(
network=net,
batch_size=8,
nb_epoch=1000,
learning_rate=0.007,
loss_fun="cross_entropy",
shuffle_flag=True,
)
trainer.train(x_train_pre, y_train)
print("Train loss = ", trainer.eval_loss(x_train_pre, y_train))
print("Validation loss = ", trainer.eval_loss(x_val_pre, y_val))
preds = net(x_val_pre).argmax(axis=1).squeeze()
targets = y_val.argmax(axis=1).squeeze()
accuracy = (preds == targets).mean()
print("Validation accuracy: {}".format(accuracy))
# Confusion matrix
cm = confusion_matrix(targets, preds)
plot_confusion_matrix(cm, class_labels)
if __name__ == "__main__":
main()