Friday, 13 February 2009

Single Layer Perceptron

Below is the code written in C++ to train the single layer perceptron (single layer network) for 2-class classification.

// SingleLayerPerceptron.cpp : Defines the entry point for the console application.
// Single Layer Perceptron training example code
// Author: Rudra P.K. Poudel

#include "stdafx.h"
#include<stdio.h>
#include<stdlib.h>
#include<math.h>
#include<float.h>
#include<conio.h>

#define NN 3 // Number of neurons (2 Inputs + 1 bias )
#define LEARNING_RATE 0.1 // Learning rate
#define INPUTS_COUNT 4 // Number of input patterns i.e. training sets
#define MAX_TRAIN_ITERATION 10000 //Maximum nos of iteration to train the networks

#define PRINT_OPTION 1
//Enum flag to control the output print
enum PrintOptions
{
poNone = 0, //Dont print any output
poTraining = 1 // Printing whole training process
};

//Function decleration
int PerceptronOutput(const double weights[NN], const int inputs[NN]);
double NodeOutput(const double weights[NN], const int inputs[NN]);

//Main entry point of the function
//It will train the all possible target patterns for NN (number of nodes) inputs for 2-class problem
//This program training single layer perceptron for the XAND operation
int _tmain(int argc, _TCHAR* argv[])
{
int i,j, iterations, trainFunction;
double totalError, singleError, output;
char filename[25];
FILE *pFile;

//Input patterns, last one is always 1 as it is input for bias
int inputs[][NN]= {
{0,0,1},
{0,1,1},
{1,0,1},
{1,1,1}
};

/*int targets[INPUTS_COUNT] = {1,1,1,0};*/
//Possible combination of targets class with target +1,-1
int targets[INPUTS_COUNT] = {1,1,1,-1};

if(PRINT_OPTION==poTraining){
sprintf(filename,"Training_Function.dat");
pFile = fopen(filename,"w");
}

double weights[]= {0,0,0};
iterations=0;
//Training Perceptron with adaline learning algorithm
do{
totalError=0;
for(i=0;i<INPUTS_COUNT;i++){
output = PerceptronOutput(weights,inputs[i]);
singleError = targets[i] - output;

if(PRINT_OPTION==poTraining){
//Printing input values
for(j=0;j<NN;j++){
fprintf(pFile,"%d\t",inputs[i][j]);
}
//Printing weights
for(j=0;j<NN;j++){
fprintf(pFile,"%f\t",weights[j]);
}
//Printing target output
fprintf(pFile,"%d\t",targets[i]);
//Printing node output
fprintf(pFile,"%f\t",NodeOutput(weights,inputs[i]));
//Printing Perceptron output
fprintf(pFile,"%f\t",output);
//Printing error
fprintf(pFile,"%f\t",singleError);
//Printing adjustment
fprintf(pFile,"%f\t", ((double)LEARNING_RATE) * singleError);

}
if(singleError!=0){
for(j=0;j<NN;j++){
weights[j] = weights[j] + ((double)LEARNING_RATE) * inputs[i][j] * singleError;
}
}
if(PRINT_OPTION==poTraining){
//Printing new weights
for(j=0;j<NN;j++){
fprintf(pFile,"%f\t",weights[j]);
}
fprintf(pFile,"\n");
}
totalError += fabs(singleError);
}
iterations++;
printf("Iterations: %d\tTotal Error= %f\n",iterations,totalError);
}while(totalError!=0 && iterations<MAX_TRAIN_ITERATION);
//END- While loop to train one function

if(PRINT_OPTION==poTraining)
fclose(pFile);

printf("\n\nLearned weights:\n");
for(j=0;j<NN;j++){
printf("%f\t",weights[j]);
}
printf("\n\nPress any to to close the program...");
getche();
return 0;
}

//Function Name: PerceptronOutput
//Input: Weights of the connections and inputs to the node
//Putput: 1 if <NodeOput> is >=0 else -1
int PerceptronOutput(const double weights[NN], const int inputs[NN]){

if(NodeOutput(weights,inputs)>=0)
return 1;
else
return -1;
}

//Function Name: NodeOutput
//Input: Weights of the connections and inputs to the node
//Putput: Give sumation of weight and input sensor's product
double NodeOutput(const double weights[NN], const int inputs[NN]){
double output=0;

for(int i=0;i<NN;i++)
output +=weights[i]*inputs[i];

return output;
}

No comments:

Post a Comment