-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdrop.m~
185 lines (122 loc) · 3.6 KB
/
drop.m~
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
%% Initialization
%clear ; close all; clc
%% Setup the parameters you will use for this exercise
input_layer_size = 400; % x1 and x2
hidden_layer_size = 25; % 5 hidden units
num_labels = 10; % 2 labels, 0 and 1
lambda = 1; %for regularization
%% Load Data
% The first two columns contains X and the third column
% contains label.
data = load('ex4data1.mat');
%X = data(:, [1, 2]); y = data(:, 3);
m = size(X, 1);
%% ================ Initializing Pameters ================
fprintf('\nInitializing Neural Network Parameters ...\n')
initial_Theta1 = randInitializeWeights(input_layer_size, hidden_layer_size);
initial_Theta2 = randInitializeWeights(hidden_layer_size, num_labels);
% Unroll parameters
initial_nn_params = [initial_Theta1(:) ; initial_Theta2(:)];
%J = new1(nn_params, input_layer_size, hidden_layer_size, ...
% num_labels, X, y, lambda);
%%==================== Dropout ==========
for iteration = 1:1
increment = uint16(8); %used for deciding size of mini-batch
increment
for xyz = 1:increment:m %this for-loop iterates over all
fprintf('\n');
xyz
if xyz+increment>m
X_ran = X(xyz:m, :);
y_ran = y(xyz:m);
else
X_ran = X(xyz:xyz+increment, :);
y_ran = y(xyz:xyz+increment);
end
%%Randomly ommiting hidden units with probability 0.5
c = 0;
percent = 0.5;
ran_mat = rand(hidden_layer_size, 1);
for i=1:hidden_layer_size
if ran_mat(i)>=percent
c++;
end
end
ini_theta1_ran = zeros(c, input_layer_size+1);
ini_theta2_ran = zeros(num_labels, c+1);
k=0;
for i=1:hidden_layer_size
if ran_mat(i)>=percent
k++;
for j=1:input_layer_size+1
ini_theta1_ran(k,j) = initial_Theta1(i, j);
end
end
end
for i=1:num_labels
ini_theta2_ran(i, 1) = initial_Theta2(i, 1);
end
k=1;
for i=1:hidden_layer_size
if ran_mat(i)>=percent
k++;
for j=1:num_labels
ini_theta2_ran(j, k) = initial_Theta2(j, (i+1));
end
end
end
initial_nn_params_ran = [ini_theta1_ran(:) ; ini_theta2_ran(:)];
c
%% =================== Training NN ===================
%
%fprintf('\nTraining Neural Network... \n')
options = optimset('MaxIter', 60); %setting maxiyer to desired value
lambda = 0;
costFunction = @(p) nnCostFunction(p, ...
input_layer_size, ...
c, ...
num_labels, X_ran, y_ran, lambda);
[nn_params_ran, cost] = fmincg(costFunction, initial_nn_params_ran, options);
% Obtain Theta1 and Theta2 back from nn_params
ini_theta1_ran = reshape(nn_params_ran(1:c * (input_layer_size + 1)), ...
c, (input_layer_size + 1));
ini_theta2_ran = reshape(nn_params_ran((1 + (c * (input_layer_size + 1))):end), ...
num_labels, (c + 1));
%fprintf('Program paused. Press enter to continue.\n');
%pause;
%%%%%
%putting back these in the original theta
%%%
k=0;
for i=1:hidden_layer_size
if ran_mat(i)>=percent
k++;
for j=1:input_layer_size+1
initial_Theta1(i, j) = ini_theta1_ran(k,j);
end
end
end
k=1;
for i=1:num_labels
initial_Theta2(i, 1) = ini_theta2_ran(i, 1);
end
for i=1:hidden_layer_size
if ran_mat(i)>=percent
k++;
for j=1:num_labels
initial_Theta2(j, (i+1)) = ini_theta2_ran(j, k);
end
end
end
end %ending loop of xyz
end %end of iterations loop
Theta1 = initial_Theta1;
Theta2 = initial_Theta2;
%% ================= Implement Predict =================
for i=1:size(Theta2, 1)
for j=1:size(Theta2, 2)
Theta2(i, j) = Theta2(i, j)/2; %outgoing weights halved
end
end
pred = predict1(Theta1, Theta2, X);
fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);