Specification:
Number of layers: 2 (hidden + output)
Number of input units: 13 (+ 1 for bias)
Number of hidden units: 8 (+1 for bias)
Number of output units: 3
Activation functions: sigmoid for hidden units; softmax for output units
Initial weights: uniform random numbers between 0 and 1
Code:
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
%%Laxmi Kadariya | |
%change iteration and learning rate in line number 23 and 24 | |
%%% read of input file and normalizing the features | |
clc | |
clear all | |
WineData = importdata("wine_data.txt"); | |
class = WineData(1:end,1); | |
features = WineData(1:end,2:end); | |
max_feature = max(features); | |
features_norm = features./max_feature; | |
%%% forward pass structure | |
%%%calculate An | |
V = rand(8,14); | |
W = rand(3,9); | |
length = size(features,1); | |
learning_rate =0.5; | |
iterations =5000; | |
fprintf("folowing steps are followed\n"); | |
fprintf("1: forward pass to compute an,zn,bn and yn\n"); | |
fprintf("2: compute the error signal.\n"); | |
fprintf("3: Pass error backward to compter error for hidden layer.\n"); | |
fprintf("4: Compute gradient\n"); | |
fprintf("5: Update weight\n"); | |
fprintf("*********************************************\n"); | |
count_original =0; | |
fprintf("Total number of instance = %d \n",length); | |
fprintf("learning rate = %f \n",learning_rate); | |
fprintf("Total number of iteration = %d \n",iterations); | |
tic | |
for iter = 1:iterations | |
Vchange = zeros(8,14); | |
Wchange = zeros(3,9); | |
cost =0; | |
for m = 1:length | |
%%%fprintf("forward pass to compute an,zn,bn and yn"); | |
X = [1,features_norm(m,1:end)].'; %%note here to change | |
A =V*X; | |
z_nobias = sigmoidFunction(A); | |
zn = [1,z_nobias.'].'; %%calculate zn | |
bn= W*zn; %%%calculate bn | |
ynh=softmaxFunc(bn); %%calculate output softmax | |
[maximum,Index] =max(ynh); | |
if iter ==1 | |
if Index == class(m,1) | |
count_original =count_original+1; | |
end | |
end | |
output =class(m,1); | |
if output == 1 | |
actual_yn =[1; 0; 0]; | |
elseif output == 2 | |
actual_yn =[0;1; 0]; | |
else | |
actual_yn =[0; 0; 1]; | |
end | |
%%fprintf("2: compute the error signal."); | |
error = ynh - actual_yn; %%calculation of error | |
value = 0; | |
%cost calculation | |
cost = cost + sum(actual_yn.*log(ynh)+(1-actual_yn).*log(1-ynh)); | |
%%fprintf("3: Pass error backward to compter error for hidden layer."); | |
for i=1:8 | |
del(1,i) = sum(error.*W(:,i+1))*z_nobias(i)*(1-z_nobias(i)); | |
end | |
%compute the gradient | |
Wchange = Wchange + error .*zn'; | |
Vchange = Vchange + del' .* X'; | |
end | |
%%%%update the weight | |
W = W-((learning_rate/178).*Wchange); | |
V = V-((learning_rate/178).*Vchange); | |
cost =cost*-1/178; | |
if(iter == 1 ) | |
fprintf("cost befor training %f\n",cost); | |
fprintf("Total number of correctly classified instance before traing = %d \n",count_original); | |
fprintf("Start Training\n"); | |
end | |
end | |
%%% testing | |
count = 0; | |
for k = 1:length | |
X = [1,features_norm(k,1:end)].'; %%note here to change | |
A =V*X; | |
z_nobias = sigmoidFunction(A); | |
zn = [1,z_nobias.'].'; | |
bn= W*zn; | |
%%calculate output softmax | |
yn=softmaxFunc(bn); | |
[maximum,Index] =max(yn); | |
classify_output(k,1)=Index; | |
if Index == class(k,1) | |
count =count+1; | |
end | |
end | |
time = toc; | |
fprintf("Total number of correctly classified instance after training = %d \n",count); | |
fprintf("Cost after Training = %f \n",cost); | |
fprintf("Total time = %f sec\n",time); | |
fprintf("*********************************************\n"); | |
function s=softmaxFunc(z) | |
% Compute softmax function | |
s = zeros(size(z)); | |
s= exp(z)/sum(exp(z)); | |
end | |
function g = sigmoidFunction(z) | |
% Compute sigmoid function | |
% | |
g = zeros(size(z)); | |
g = 1.0 ./ ( 1.0 + exp(-z)); % For Matlab | |
% g = 1.0 ./ ( 1.0 + e.^(-z)); % For Octave, it can use 'exp(1)' or 'e' | |
end |
Output:
folowing steps are followed
1: forward pass to compute an,zn,bn and yn
2: compute the error signal.
3: Pass error backward to compter error for hidden layer.
4: Compute gradient
5: Update weight
*********************************************
Total number of instance = 178
learning rate = 0.500000
Total number of iteration = 5000
cost befor training 1.971846
Total number of correctly classified instance before traing = 71
Start Training
Total number of correctly classified instance after training = 178
Cost after Training = 0.020612
Total time = 22.546547 sec
*********************************************
>>
No comments:
Post a Comment