forked from dmlc/MXNet.jl
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathregression-example.jl
63 lines (54 loc) · 2.58 KB
/
regression-example.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#=
This script shows how a simple MLP net may be used
for regression. It shows how data in memory may be
used for training and evaluation, and how to obtain
the predictions from the trained net.
=#
using MXNet
using Distributions
using PyPlot
# data generating process
generate_inputs(mean, var, size) = rand(MvNormal(mean, var), size)
output(data) = sin(data[1,:]).*sin(data[2,:])./(data[1,:].*data[2,:])
# create training and evaluation data sets
mean=[0.0;0.0]
var=[1.0 0.0;0.0 1.0]
samplesize = 5000
TrainInput = generate_inputs(mean, var, samplesize)
TrainOutput = output(TrainInput)
ValidationInput = generate_inputs(mean, var, samplesize)
ValidationOutput = output(ValidationInput)
# how to set up data providers using data in memory
batchsize = 100 # can adjust this later, but must be defined now for next line
trainprovider = mx.ArrayDataProvider(:data => TrainInput, batch_size=batchsize, shuffle=true, :label => TrainOutput)
evalprovider = mx.ArrayDataProvider(:data => ValidationInput, batch_size=batchsize, shuffle=true, :label => ValidationOutput)
# create a two hidden layer MPL: try varying num_hidden, and change tanh to relu,
# or add/remove a layer
data = mx.Variable(:data)
label = mx.Variable(:label)
net = @mx.chain mx.FullyConnected(data = data, num_hidden=10) =>
mx.Activation(act_type=:tanh) =>
mx.FullyConnected(num_hidden=3) =>
mx.Activation(act_type=:tanh) =>
mx.FullyConnected(num_hidden=1)
# squared error loss is appropriate for regression, don't change
cost = mx.LinearRegressionOutput(data = net, label=label)
# final model definition, don't change, except if using gpu
model = mx.FeedForward(cost, context=mx.cpu())
# set up the optimizer: select one, explore parameters, if desired
#optimizer = mx.SGD(lr=0.01, momentum=0.9, weight_decay=0.00001)
optimizer = mx.ADAM()
# train, reporting loss for training and evaluation sets
# initial training with small batch size, to get to a good neighborhood
batchsize = 100
mx.fit(model, optimizer, initializer=mx.NormalInitializer(0.0,0.1), eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 500)
# more training with the full sample
batchsize = samplesize
mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 500)
# obtain predictions
plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput)
fit = mx.predict(model, plotprovider)
plot(ValidationOutput,fit',".")
xlabel("true")
ylabel("predicted")
title("outputs: true versus predicted. 45º line is what we hope for")