-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.m
134 lines (115 loc) · 4.01 KB
/
main.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
%% Shuffled Frog Leaping Algorithm (SFLA) NN Learner
% Shuffled Frog Leaping Algorithm (SFLA) is used to improve weights and
% biases of trained neural network. You can use your data or change the NN
% structure. Please play with parameters depending on your data and system.
%% Clearing and Loading Data
clear;
data=JustLoad();
warning('off');
% Inputs (Train and Test)
inputs=rescale(data.TrainInputs)';
TstInputs=rescale(data.TestInputs)';
% Targets (Train and Test)
targets=rescale(data.TrainTargets)';
TstTargets=rescale(data.TestTargets)';
sizenn=size(inputs);sizenn=sizenn(1,1);
% Number of neurons----------------------
n = 12;
%----------------------------------------
% 'trainlm' Levenberg-Marquardt
% 'trainbr' Bayesian Regularization (good)
% 'trainbfg' BFGS Quasi-Newton
% 'trainrp' Resilient Backpropagation
% 'trainscg' Scaled Conjugate Gradient
% 'traincgb' Conjugate Gradient with Powell/Beale Restarts
% 'traincgf' Fletcher-Powell Conjugate Gradient
% 'traincgp' Polak-Ribiére Conjugate Gradient
% 'trainoss' One Step Secant (good)
% 'traingdx' Variable Learning Rate Gradient Descent
% 'traingdm' Gradient Descent with Momentum
% 'traingd' Gradient Descent
% Creating the NN ----------------------------
net = feedforwardnet(n,'traingdx');
%---------------------------------------------
% configure the neural network
[net tr]= train(net,inputs, targets);
perf = perform(net,inputs, targets);%mse
% Current NN Weights and Bias
% Create Handle for Error
h = @(x) NMSE(x, net, inputs, targets);
%% SFLA NN
[x, err_ga] = sfla(h, sizenn*n+n+n+1);
%-----------------------------------------
net = setwb(net, x');
% Optimized NN Weights and Bias
getwb(net);
% Error for Optimized NN
Error = targets - net(inputs);
calc = mean(Error.^2)/mean(var(targets',1));
%-----------------------------------------
Outputs=net(inputs);
TestOutputs=net(TstInputs);
% Train
TrMSE=mse(targets,Outputs);
TrRMSE=sqrt(TrMSE);
TrMAE=mae(targets,Outputs);
TrCC= corrcoef(targets,Outputs);
TrR_Squared=TrCC*TrCC;
%Test
TsMSE=mse(TstTargets,TestOutputs);
TsRMSE=sqrt(TsMSE);
TsMAE=mae(TstTargets,TestOutputs);
TsCC = corrcoef(TstTargets,TestOutputs);
TsR_Squared=TsCC*TsCC;
% Statistics
% Train
fprintf('Training "MSE" Is = %0.4f.\n',TrMSE)
fprintf('Training "RMSE" Is = %0.4f.\n',TrRMSE)
fprintf('Training "MAE" Is = %0.4f.\n',TrMAE)
fprintf('Training "Correlation Coefficient" Is = %0.4f.\n',TrCC(1,2))
fprintf('Training "R_Squared" Is = %0.4f.\n',TrR_Squared(1,2))
% Test
fprintf('Testing "MSE" Is = %0.4f.\n',TsMSE)
fprintf('Testing "RMSE" Is = %0.4f.\n',TsRMSE)
fprintf('Testing "MAE" Is = %0.4f.\n',TsMAE)
fprintf('Testing "Correlation Coefficient" Is = %0.4f.\n',TsCC(1,2))
fprintf('Testing "R_Squared" Is = %0.4f.\n',TsR_Squared(1,2))
view(net);
%% Plots
trer=targets-Outputs;
tser=TstTargets-TestOutputs;
figure;
subplot(3,2,1)
plot(targets,'linewidth',2); title('Train');
hold on;
plot(Outputs,'linewidth',2);legend('Target','Output');
subplot(3,2,2)
plot(TstTargets,'linewidth',2); title('Test');
hold on;
plot(TestOutputs,'linewidth',2);legend('Target','Output');
subplot(3,2,3)
plot(trer,'linewidth',2); title('Train Error');
subplot(3,2,4)
plot(tser,'linewidth',2); title('Test Error');
subplot(3,2,5)
[population2,gof] = fit(targets',Outputs','poly3');
plot(targets',Outputs','o',...
'LineWidth',1,...
'MarkerSize',6,...
'MarkerEdgeColor','k',...
'MarkerFaceColor',[0.9,0.9,0.1]);
title(['Train R = ' num2str(1-gof.rmse)]);xlabel('Target');ylabel('Output');
hold on
plot(population2,'k-','predobs');xlabel('Target');ylabel('Output');
hold off
subplot(3,2,6)
[population2,gof] = fit(TstTargets',TestOutputs','poly3');
plot(TstTargets',TestOutputs','o',...
'LineWidth',1,...
'MarkerSize',6,...
'MarkerEdgeColor','r',...
'MarkerFaceColor',[0.9,0.9,0.1]);
title(['Test R = ' num2str(1-gof.rmse)]);xlabel('Target');ylabel('Output');
hold on
plot(population2,'r-','predobs');xlabel('Target');ylabel('Output');
hold off