Following code is written in Matlab using back propagation algorithm for error propagation to hidden layers. It used batch update mode and has ability to work with N numbers of hidden layers also as many as required numbers of nodes in each hidden layer.
%References: Patterson, D. [Online]Available from: http://www.csee.umbc.edu/~ dpatte3/nn/backprop.html [accessed 24 February 2009]
function MLPUsingBackpropagation()
[x1,x2,x3,x4,x5,x6,t] = textread('ex1-data.txt','%f %f %f %f %f %f %f');
trainingIndex=0;
testingIndex=0;
for i=1:265
if (rand > 0.2)
trainingIndex = trainingIndex + 1;
trainingInputs(trainingIndex,1) = x1(i);
trainingInputs(trainingIndex,2) = x2(i);
trainingInputs(trainingIndex,3) = x3(i);
trainingInputs(trainingIndex,4) = x4(i);
trainingInputs(trainingIndex,5) = x5(i);
trainingInputs(trainingIndex,6) = x6(i);
trainingOutputs(trainingIndex,1) = t(i);
else
testingIndex = testingIndex + 1;
testingInputs(testingIndex,1) = x1(i);
testingInputs(testingIndex,2) = x2(i);
testingInputs(testingIndex,3) = x3(i);
testingInputs(testingIndex,4) = x4(i);
testingInputs(testingIndex,5) = x5(i);
testingInputs(testingIndex,6) = x6(i);
testingOutput(testingIndex,1) = t(i);
end
end
%***** Memory Clean up **********
clear x1
clear x2
clear x3
clear x4
clear x5
clear x6
clear t
%********************************
layers = [6 23 23 1];
for i=1:50
Output = BackpropagationTraining(trainingInputs,trainingOutputs,0.1,0.05,0.01,layers);
OutputTesting = BackpropagationTesting(testingInputs, testingOutput, layers, Output.weights);
OutputTesting = BackpropagationTesting(testingInputs, testingOutput, layers, Output.bestweights);
end
%***** Memory Clean up **********
clear trainingInputs
clear testingInputs
clear layers
%********************************
end
function Results = BackpropagationTraining(inputs,targets,learningRate,momentumWeight,minimumMSE,layers)
[trainingDataCount,inputNodesCount] = size(inputs);
[targetDataCount,targetNodesCount] = size(targets);
if trainingDataCount ~= targetDataCount
error('BackpropagationTraining:TrainingAndTargetDataLengthMismatch', 'The number of input vectors and desired ouput vectors do not match');
end
if length(layers) < 3
error('BackpropagationTraining:InvalidNetworkStructure','The network must have at least 3 layers');
end
if inputNodesCount ~= layers(1)
msg = sprintf('Dimensions of input nodes (%d) does not match with numbers of input layer (%d).',inputNodesCount,layers(1));
error('BackpropagationTraining:InvalidInputLayerSize', msg);
end
if targetNodesCount ~= layers(end)
msg = sprintf('Dimensions of output nodes (%d) does not match with numbers of output layer (%d)',targetNodesCount,layers(end));
error('BackpropagationTraining:InvalidOutLayerSize', msg);
end
leayersLength = length(layers);
weights = cell(leayersLength-1,1);
for i=1:leayersLength-2
weights{i} = [-15 + 30 .* rand(layers(i+1),layers(i)+1); zeros(1,layers(i)+1)];
end
weights{end} = -15 + 30 .* rand(layers(end),layers(end-1)+1);
MSE = Inf;
epochs = 0;
activation = cell(leayersLength,1);
activation{1} = [inputs ones(trainingDataCount,1)]; % activation{1} is the input + 1 for the bias node activation
% activation{1} remains the same throught the computation
for i=2:leayersLength-1
activation{i} = ones(trainingDataCount,layers(i)+1); % inner layers include a bias node (trainingDataCount-by-Nodes+1)
end
activation{end} = ones(trainingDataCount,layers(end)); % no bias node at output layer
net = cell(leayersLength-1,1); % one net matrix for each layer exclusive input
for i=1:leayersLength-2;
net{i} = ones(trainingDataCount,layers(i+1)+1); % affix bias node
end
net{end} = ones(trainingDataCount,layers(end));
previousDeltaW = cell(leayersLength-1,1);
sumDeltaW = cell(leayersLength-1,1);
for i=1:leayersLength-1
previousDeltaW{i} = zeros(size(weights{i})); % previousDeltaW starts at 0
sumDeltaW{i} = zeros(size(weights{i}));
end
lowestsse = 999999;
bestweights = weights;
while MSE > minimumMSE && epochs < 3000
for i=1:leayersLength-1
net{i} = activation{i} * weights{i}'; % compute inputs to current layer
if i < leayersLength-1 % inner layers
%a{i+1} = [2./(1+exp(-net{i}(:,1:end-1)))-1
%ones(trainingDataCount,1)]; % for bi-polar
activation{i+1} = [1./(1+exp(-net{i}(:,1:end-1))) ones(trainingDataCount,1)]; % for sigmoid
%activation{i+1} = [(net{i}(:,1:end-1)) ones(trainingDataCount,1)]; %without sigmoid i.e for linear activation function
else % output layers
%a{i+1} = 2 ./ (1 + exp(-net{i})) - 1;
%activation{i+1} = 1 ./ (1 + exp(-net{i}));
%a{i+1} = net{i};
activation{end} = net{i} > 0;
end
end
% calculate sum squared error of all samples
err = (targets-activation{end}); % save this for later
sse = sum(sum(err.^2)); % sum of the error for all samples, and all nodes
if(lowestsse>sse)
lowestsse = sse;
bestweights = weights;
end
%delta = err .* (1 + a{end}) .* (1 - a{end});
%delta = err .* a{end} .* (1 - a{end});
%delta = err;
for
delta = err + weights{i}' weights{i};
for i=leayersLength-1:-1:1
sumDeltaW{i} = learningRate * delta' * activation{i};
if i > 1
%delta = (1+a{i}) .* (1-a{i}) .* (delta*weights{i});
delta = activation{i} .* (1-activation{i}) .* (delta*weights{i});
%delta = (delta*weights{i}); % where there is no activation
%function
end
end
% update the prev_w, weight matrices, epoch count and MSE
for i=1:leayersLength-1
previousDeltaW{i} = (sumDeltaW{i} ./ trainingDataCount) + (momentumWeight * previousDeltaW{i});
weights{i} = weights{i} + previousDeltaW{i};
end
epochs = epochs + 1;
MSE = sse/(trainingDataCount*targetNodesCount); % MSE = 1/trainingDataCount * 1/targetNodesCount * summed squared error
%MSEPerEpoch(epochs) = MSE;
end
%printing error
% hold on
% axis ( [0 3000 0 1]);
% x = 1:1:3000;
% plot(x,MSEPerEpoch);
% hold off
% s=input('Press enter to continue, enter 0 to stop \n');
% return the trained network
Results.structure = layers;
Results.weights = weights;
Results.bestweights = bestweights;
Results.epochs = epochs;
Results.MSE = MSE;
sse
lowestsse
end
function Results = BackpropagationTesting(inputs,targets,layers, weights)
[trainingDataCount,inputNodesCount] = size(inputs);
[targetDataCount,targetNodesCount] = size(targets);
if trainingDataCount ~= targetDataCount
error('BackpropagationTraining:TrainingAndTargetDataLengthMismatch', 'The number of input vectors and desired ouput vectors do not match');
end
leayersLength = length(layers);
activation = cell(leayersLength,1);
activation{1} = [inputs ones(trainingDataCount,1)]; % activation{1} is the input + 1 for the bias node activation
% activation{1} remains the same throught the computation
for i=2:leayersLength-1
activation{i} = ones(trainingDataCount,layers(i)+1); % inner layers include a bias node (trainingDataCount-by-Nodes+1)
end
activation{end} = ones(trainingDataCount,layers(end)); % no bias node at output layer
net = cell(leayersLength-1,1); % one net matrix for each layer exclusive input
for i=1:leayersLength-2;
net{i} = ones(trainingDataCount,layers(i+1)+1); % affix bias node
end
net{end} = ones(trainingDataCount,layers(end));
for i=1:leayersLength-1
net{i} = activation{i} * weights{i}'; % compute inputs to current layer
if i < leayersLength-1 % inner layers
%a{i+1} = [2./(1+exp(-net{i}(:,1:end-1)))-1 ones(trainingDataCount,1)];
activation{i+1} = [1./(1+exp(-net{i}(:,1:end-1))) ones(trainingDataCount,1)];
%a{i+1} = [(-net{i}(:,1:end-1)) ones(trainingDataCount,1)];
else % output layers
%a{i+1} = 2 ./ (1 + exp(-net{i})) - 1;
%activation{i+1} = 1 ./ (1 + exp(-net{i}));
activation{end} = net{i} > 0;
%a{i+1} = net{i};
end
end
%activation{end} = activation{end} > 0.5;
% calculate sum squared error of all samples
err = (targets-activation{end}); % save this for later
sse = sum(sum(err.^2)); % sum of the error for all samples, and all nodes
MSE = sse/(trainingDataCount*targetNodesCount); % MSE = 1/trainingDataCount * 1/targetNodesCount * summed squared error
% return the trained network
Results.MSE = MSE;
Results.sse = (sse/trainingDataCount)*100;
100-((sse/trainingDataCount)*100)
end
%References: Patterson, D. [Online]Available from: http://www.csee.umbc.edu/~
function MLPUsingBackpropagation()
[x1,x2,x3,x4,x5,x6,t] = textread('ex1-data.txt','%f %f %f %f %f %f %f');
trainingIndex=0;
testingIndex=0;
for i=1:265
if (rand > 0.2)
trainingIndex = trainingIndex + 1;
trainingInputs(trainingIndex,1) = x1(i);
trainingInputs(trainingIndex,2) = x2(i);
trainingInputs(trainingIndex,3) = x3(i);
trainingInputs(trainingIndex,4) = x4(i);
trainingInputs(trainingIndex,5) = x5(i);
trainingInputs(trainingIndex,6) = x6(i);
trainingOutputs(trainingIndex,1) = t(i);
else
testingIndex = testingIndex + 1;
testingInputs(testingIndex,1) = x1(i);
testingInputs(testingIndex,2) = x2(i);
testingInputs(testingIndex,3) = x3(i);
testingInputs(testingIndex,4) = x4(i);
testingInputs(testingIndex,5) = x5(i);
testingInputs(testingIndex,6) = x6(i);
testingOutput(testingIndex,1) = t(i);
end
end
%***** Memory Clean up **********
clear x1
clear x2
clear x3
clear x4
clear x5
clear x6
clear t
%********************************
layers = [6 23 23 1];
for i=1:50
Output = BackpropagationTraining(trainingInputs,trainingOutputs,0.1,0.05,0.01,layers);
OutputTesting = BackpropagationTesting(testingInputs, testingOutput, layers, Output.weights);
OutputTesting = BackpropagationTesting(testingInputs, testingOutput, layers, Output.bestweights);
end
%***** Memory Clean up **********
clear trainingInputs
clear testingInputs
clear layers
%********************************
end
function Results = BackpropagationTraining(inputs,targets,learningRate,momentumWeight,minimumMSE,layers)
[trainingDataCount,inputNodesCount] = size(inputs);
[targetDataCount,targetNodesCount] = size(targets);
if trainingDataCount ~= targetDataCount
error('BackpropagationTraining:TrainingAndTargetDataLengthMismatch', 'The number of input vectors and desired ouput vectors do not match');
end
if length(layers) < 3
error('BackpropagationTraining:InvalidNetworkStructure','The network must have at least 3 layers');
end
if inputNodesCount ~= layers(1)
msg = sprintf('Dimensions of input nodes (%d) does not match with numbers of input layer (%d).',inputNodesCount,layers(1));
error('BackpropagationTraining:InvalidInputLayerSize', msg);
end
if targetNodesCount ~= layers(end)
msg = sprintf('Dimensions of output nodes (%d) does not match with numbers of output layer (%d)',targetNodesCount,layers(end));
error('BackpropagationTraining:InvalidOutLayerSize', msg);
end
leayersLength = length(layers);
weights = cell(leayersLength-1,1);
for i=1:leayersLength-2
weights{i} = [-15 + 30 .* rand(layers(i+1),layers(i)+1); zeros(1,layers(i)+1)];
end
weights{end} = -15 + 30 .* rand(layers(end),layers(end-1)+1);
MSE = Inf;
epochs = 0;
activation = cell(leayersLength,1);
activation{1} = [inputs ones(trainingDataCount,1)]; % activation{1} is the input + 1 for the bias node activation
% activation{1} remains the same throught the computation
for i=2:leayersLength-1
activation{i} = ones(trainingDataCount,layers(i)+1); % inner layers include a bias node (trainingDataCount-by-Nodes+1)
end
activation{end} = ones(trainingDataCount,layers(end)); % no bias node at output layer
net = cell(leayersLength-1,1); % one net matrix for each layer exclusive input
for i=1:leayersLength-2;
net{i} = ones(trainingDataCount,layers(i+1)+1); % affix bias node
end
net{end} = ones(trainingDataCount,layers(end));
previousDeltaW = cell(leayersLength-1,1);
sumDeltaW = cell(leayersLength-1,1);
for i=1:leayersLength-1
previousDeltaW{i} = zeros(size(weights{i})); % previousDeltaW starts at 0
sumDeltaW{i} = zeros(size(weights{i}));
end
lowestsse = 999999;
bestweights = weights;
while MSE > minimumMSE && epochs < 3000
for i=1:leayersLength-1
net{i} = activation{i} * weights{i}'; % compute inputs to current layer
if i < leayersLength-1 % inner layers
%a{i+1} = [2./(1+exp(-net{i}(:,1:end-1)))-1
%ones(trainingDataCount,1)]; % for bi-polar
activation{i+1} = [1./(1+exp(-net{i}(:,1:end-1))) ones(trainingDataCount,1)]; % for sigmoid
%activation{i+1} = [(net{i}(:,1:end-1)) ones(trainingDataCount,1)]; %without sigmoid i.e for linear activation function
else % output layers
%a{i+1} = 2 ./ (1 + exp(-net{i})) - 1;
%activation{i+1} = 1 ./ (1 + exp(-net{i}));
%a{i+1} = net{i};
activation{end} = net{i} > 0;
end
end
% calculate sum squared error of all samples
err = (targets-activation{end}); % save this for later
sse = sum(sum(err.^2)); % sum of the error for all samples, and all nodes
if(lowestsse>sse)
lowestsse = sse;
bestweights = weights;
end
%delta = err .* (1 + a{end}) .* (1 - a{end});
%delta = err .* a{end} .* (1 - a{end});
%delta = err;
for
delta = err + weights{i}' weights{i};
for i=leayersLength-1:-1:1
sumDeltaW{i} = learningRate * delta' * activation{i};
if i > 1
%delta = (1+a{i}) .* (1-a{i}) .* (delta*weights{i});
delta = activation{i} .* (1-activation{i}) .* (delta*weights{i});
%delta = (delta*weights{i}); % where there is no activation
%function
end
end
% update the prev_w, weight matrices, epoch count and MSE
for i=1:leayersLength-1
previousDeltaW{i} = (sumDeltaW{i} ./ trainingDataCount) + (momentumWeight * previousDeltaW{i});
weights{i} = weights{i} + previousDeltaW{i};
end
epochs = epochs + 1;
MSE = sse/(trainingDataCount*targetNodesCount); % MSE = 1/trainingDataCount * 1/targetNodesCount * summed squared error
%MSEPerEpoch(epochs) = MSE;
end
%printing error
% hold on
% axis ( [0 3000 0 1]);
% x = 1:1:3000;
% plot(x,MSEPerEpoch);
% hold off
% s=input('Press enter to continue, enter 0 to stop \n');
% return the trained network
Results.structure = layers;
Results.weights = weights;
Results.bestweights = bestweights;
Results.epochs = epochs;
Results.MSE = MSE;
sse
lowestsse
end
function Results = BackpropagationTesting(inputs,targets,layers, weights)
[trainingDataCount,inputNodesCount] = size(inputs);
[targetDataCount,targetNodesCount] = size(targets);
if trainingDataCount ~= targetDataCount
error('BackpropagationTraining:TrainingAndTargetDataLengthMismatch', 'The number of input vectors and desired ouput vectors do not match');
end
leayersLength = length(layers);
activation = cell(leayersLength,1);
activation{1} = [inputs ones(trainingDataCount,1)]; % activation{1} is the input + 1 for the bias node activation
% activation{1} remains the same throught the computation
for i=2:leayersLength-1
activation{i} = ones(trainingDataCount,layers(i)+1); % inner layers include a bias node (trainingDataCount-by-Nodes+1)
end
activation{end} = ones(trainingDataCount,layers(end)); % no bias node at output layer
net = cell(leayersLength-1,1); % one net matrix for each layer exclusive input
for i=1:leayersLength-2;
net{i} = ones(trainingDataCount,layers(i+1)+1); % affix bias node
end
net{end} = ones(trainingDataCount,layers(end));
for i=1:leayersLength-1
net{i} = activation{i} * weights{i}'; % compute inputs to current layer
if i < leayersLength-1 % inner layers
%a{i+1} = [2./(1+exp(-net{i}(:,1:end-1)))-1 ones(trainingDataCount,1)];
activation{i+1} = [1./(1+exp(-net{i}(:,1:end-1))) ones(trainingDataCount,1)];
%a{i+1} = [(-net{i}(:,1:end-1)) ones(trainingDataCount,1)];
else % output layers
%a{i+1} = 2 ./ (1 + exp(-net{i})) - 1;
%activation{i+1} = 1 ./ (1 + exp(-net{i}));
activation{end} = net{i} > 0;
%a{i+1} = net{i};
end
end
%activation{end} = activation{end} > 0.5;
% calculate sum squared error of all samples
err = (targets-activation{end}); % save this for later
sse = sum(sum(err.^2)); % sum of the error for all samples, and all nodes
MSE = sse/(trainingDataCount*targetNodesCount); % MSE = 1/trainingDataCount * 1/targetNodesCount * summed squared error
% return the trained network
Results.MSE = MSE;
Results.sse = (sse/trainingDataCount)*100;
100-((sse/trainingDataCount)*100)
end
No comments:
Post a Comment