-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathCompileCrossValidtionLoop.m
68 lines (51 loc) · 2.27 KB
/
CompileCrossValidtionLoop.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
%% Partition data into training, validation, and testing sets
load('CS229A_Dataset.mat');
total = 1:1330*502; %Set of all of the indices
z = 1;
ValidationPercentage = 0.2;
TestingPercentage = 90;
PercentTrainVal = (100-TestingPercentage)/100;
PercentTest = TestingPercentage/100;
NumberTrainValidationRuns = round(PercentTrainVal*1330); %Number of Training Runs
NumberTestRuns = 1330-NumberTrainValidationRuns; %Number of runs that will be tested
startTrVal = Partition(NumberTrainValidationRuns, ICVrecord); %Latin Hypercube Sampling
startVal = randsample(startTrVal, round(ValidationPercentage*length(startTrVal)));
startTr = setdiff(startTrVal,startVal);
tr = [];
val = [];
for j=1:length(startTr)
indices = (502*startTr(j)-501):(502*startTr(j)); %Creates vector of indices corresponding to WHOLE run in X
tr = [tr indices];
end
for j=1:length(startVal)
indices = (502*startVal(j)-501):(502*startVal(j)); %Creates vector of indices corresponding to WHOLE run in X
val = [val indices];
end
ts = setdiff(total,[tr val]); %Finds the testing indices by subtracting total indices by training indices
%% Train ANN (Current setup trains 10-neuron layers starting with 1 to 4
% note that each archeticture is trained several times to account for
% problems associated with optimization with different weight
% initializations as some might give better chances of finding a more
% superior weights values compared to others
%Loop over to train with different neurons or layers
Layers = 1:4;
repeat = 4;
mse_train_matrix = zeros(length(Layers), repeat);
mse_val_matrix = zeros(length(Layers), repeat);
mse_test_matrix = zeros(length(Layers), repeat);
R2_matrix = zeros(length(Layers), repeat);
for i = Layers
i
parfor j = 1:repeat
[netOut, trOut] = ANNCrossValidationLoop(X', y(:, [1,2])', tr, val, ts, i); %Rows=features, Cols=dataPoints
m_matrix{i,j} = netOut;
tr_matrix{i,j} = trOut;
mse_train_matrix(i,j) = trOut.best_perf;
mse_val_matrix(i,j) = trOut.best_vperf;
mse_test_matrix(i,j) = trOut.best_tperf;
end
end
mse_train = min(mse_train_matrix');
mse_test = min(mse_test_matrix');
mse_val = min(mse_val_matrix');
R2 = min(R2_matrix');