-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtestANNsin.jl
87 lines (61 loc) · 1.33 KB
/
testANNsin.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
# test simple ANN to predict sin function
# DL rev 29-Dec-2018
using Knet
using PyPlot
using AutoGrad
using Compat.Statistics
## using mymod
predict(w,x) = w.*x
loss(w,x,y) = mean(abs2,y-predict(w,x))
lossgradient =grad(loss)
const lr_= 0.1;
const beta1_ = 0.9;
const beta2_ = 0.95;
const eps_ =1e-6;
function train(P,prms,x,y,maxiters = 200)
prm = Adam(lr=lr_, beta1 = beta1_, beta2 = beta2_, eps = eps_);
for iter = 1:maxiters
for i = length(x)
g = lossgradient(P,x,y);
update!(P,g,prm);
end
end
return P
end
function test(P,timepoints,y)
sumloss = numloss = 0;
for t in timepoints
sumloss += loss(P,t,y)
numloss += 1
end
return sumloss/numloss
end
function mainTestANNsin()
nPoints = 1000;
y = zeros(Float64,nPoints);
w = zeros(Float64,nPoints);
x = rand(nPoints)*pi*2.0;
for i=1:nPoints
y[i] = sin(x[i]) + rand()*0.05;
end
#init the ADAM algorithm
prms = Any[];
prms = Adam(lr=lr_, beta1 = beta1_, beta2 = beta2_, eps = eps_)
_maxIters_ = 50;
@time for iters=1:_maxIters_
train(w,prms,x,y,100)
loss = test(w,x,y)
if mod(iters,100) == 0
println(:iteration,iters, :loss,loss)
end
end
figure(1)
clf();
plot(x,y,"og",color=:blue,markersize=6, label="sin")
plot(x,w.*x,"or", markersize = 2, label = "approximation")
xlabel("x")
ylabel("y")
legend();
end
mainTestANNsin();
##foo()