5.MLP Wine

You might also like

Download as pdf or txt
Download as pdf or txt
You are on page 1of 9

#include<stdio.

h>
#include<math.h>
#include<stdlib.h>
#include<time.h>

#define ALPHA 0.2


#define N 178 //Total no. of samples
#define N_SAMPLE 50 // No of training sample
#define MAX_ITERATION 50000
#define RATE 0.1 // Rate of learning
#define NI 13 // No of Feature
#define NH 29// No of hidden nurons 2*(NI+1) +1
#define NO 3 // No of Classs
#define BIAS 1 // Bias value
#define LB -0.05 // LB of initial weights
#define UB 0.05 // UB of initial weights
#define TOLERANCE 0.24
#define SHARPNESS 1
#define SCALE_FACTOR 1000

typedef struct
{
float x[NI];
int class_no;
int n_in,n_out;
float t[NO]; /* membership value for different class */
}PATTERN;

/****************** PROTOTYPE DECLERATION ************************/


void linkfloat(void);
void getUnknownPattern(PATTERN *X);
void getPatterns(PATTERN *P,PATTERN *Q);
void initialisation(void);
void back_propagation(PATTERN *p);
void shuffel(PATTERN *P);
double avgError(PATTERN *p);
void computeOutputs(PATTERN X);
int computeClass(PATTERN X);
void report(PATTERN *P);
/*********************** END OF DECLERATION ************************/

void linkfloat()
{
float F,*fp;
fp=&F;
F=*fp;
}

void getUnknownPattern(PATTERN *X)


{
int j,n_in,n_class;

printf("\n Enter the no. of Input Parameters :");


fflush(stdin);
scanf("%d",&n_in);
X->n_in=n_in;
printf("\n Enter the no. of class :");
fflush(stdin);
scanf("%d",&n_class);
X->n_out=n_class;
printf("\n Enter the input parameters:");
for(j=0;j<n_in;j++)
{
fflush(stdin);
scanf("%f",&X->x[j]);
}
}

void getPatterns(PATTERN *P,PATTERN *Q)


{
FILE *f;
int i,j,n_in,n_class,class_no;
time_t t;
int I[N_SAMPLE];

srand((unsigned) time(&t));
printf("\n Enter the no. of Input Parameters :");
fflush(stdin);
scanf("%d",&n_in);
printf("\n Enter the no. of class :");
fflush(stdin);
scanf("%d",&n_class);
f=fopen("data_wine.doc","r");
// Selection of the random integers
for(i=0;i<N_SAMPLE;i++)
I[i]=rand()%N;

for(i=0;i<N;i++)
{
P[i].n_in=n_in;
P[i].n_out=n_class;

for(j=0;j<n_in;j++)
{
fscanf(f,"%f",&P[i].x[j]);
P[i].x[j]=P[i].x[j]/(float)SCALE_FACTOR;
}
fscanf(f,"%d",&class_no);
P[i].class_no=class_no;
for(j=0;j<n_class;j++)
if(j==class_no-1)
P[i].t[j]=1.0;
else
P[i].t[j]=0.0;
}
fclose(f);
for(i=0;i<N_SAMPLE;i++)
Q[i]=P[I[i]];
}
double w1[NH][NI+1],w2[NO][NH+1];
//double dw1[NH][NI+1],dw2[NO][NH+1];
double pdw1[NH][NI+1],pdw2[NO][NH+1];
double delta_h[NH],delta_o[NO];
double out_h[NH+1],out_o[NO];

void initialisation()
{
int i,j;
time_t t;
srand((unsigned int) time(&t));

for( j = 0 ; j < NH ; j++ ) /* initialize WeightIH and


DeltaWeightIH */
for( i = 0 ; i <= NI ; i++ )
{
pdw1[j][i]=0.0;
w1[j][i]=(double)
LB+((double)rand()/RAND_MAX)*(UB-LB);
}

for( j= 0 ; j < NO ; j ++ ) /* initialize WeightHO and


DeltaWeightHO */
for( i = 0 ; i <= NH ; i++ )
{
pdw2[j][i]=0.0;
w2[j][i]=(double)
LB+((double)rand()/RAND_MAX)*(UB-LB);
}
}

void back_propagation(PATTERN *P)


{
unsigned long int iteration=0;
int i,j,p,n_correct,n_incorrect,c_classNo;
double Error,AVGError,Sum,max;
initialisation();

do
{
Error = 0.0 ;
n_correct=n_incorrect=0;
for( p=0 ; p < N_SAMPLE ; p++ )
{
// Forward Pass :
// Computation of the op for Hidden layer
for( j = 0 ; j < NH ; j++ )
{
Sum = 0.0;
for( i = 0 ; i <= NI ; i++
)
if(i<NI)
Sum+= P[p].x[i]
* w1[j][i];
else

Sum+=w1[j][i]*BIAS;
out_h[j] = 1.0/(1.0 + exp(-
Sum * SHARPNESS));
}
out_h[NH]=BIAS;

// Computation of the OP for output


layer
max=-9999;
for( j= 0 ; j < NO ; j++ )
{
Sum = 0.0;
for( i = 0 ; i <= NH ; i++
)
Sum+= out_h[i] *
w2[j][i];
out_o[j] = 1.0/(1.0 + exp(-
Sum * SHARPNESS));
Error+= fabs(P[p].t[j] -
out_o[j]); /* SSE */

/***************

if(iteration==MAX_ITERATION-2)
{
if(j==0)
printf("\n %d",p+1);
else
printf(" ");
printf(" %f %f
",P[p].t[j],out_o[j]);
//fflush(stdin);
//getchar();
}
***********/
if(out_o[j]>max)
{
max=out_o[j];
c_classNo=j+1;
}

/*****************/
// Propagating error
backward to determine the delta values of the op nodes.
delta_o[j] =SHARPNESS *
(P[p].t[j] - out_o[j]) * out_o[j] * (1.0 - out_o[j]); /* Sigmoidal
Outputs, SSE */
}

if( P[p].class_no!=c_classNo)
n_incorrect++;
else
n_correct++;
// Determination of delta value for
hidden nodes
for( j = 0 ; j < NH ; j++ )
{
Sum = 0.0;
for( i = 0 ; i < NO ; i++ )
Sum += w2[i][j]
* delta_o[i];
delta_h[j] = Sum * out_h[j]
* (1.0 - out_h[j]) * SHARPNESS;
}
//weight updation
for( j = 0 ; j < NH ; j++ )
for( i = 0 ; i <= NI ; i++
)
{
w1[j][i]+= RATE
* P[p].x[i] * delta_h[j] + ALPHA * pdw1[j][i] ;
pdw1[j][i]=RATE
* P[p].x[i] * delta_h[j];
}
for( j = 0 ; j < NO ; j++ )
for( i = 0 ; i <= NH ; i++
)
{
w2[j][i]+= RATE
* out_h[i] * delta_o[j] + ALPHA * pdw2[j][i] ;
pdw2[j][i]=RATE
* out_h[i] * delta_o[j];
}

}
iteration++;
AVGError=Error/(N_SAMPLE*NO);
printf("\n Average Error : %e Correct %d Incorrect
%d",AVGError,n_correct,n_incorrect);
shuffel(P);
}while( iteration <= MAX_ITERATION);
}

void shuffel(PATTERN *P)


{
int i,k,l;
PATTERN T;
time_t time;
srand((unsigned int) &time);
for(i=0;i<N_SAMPLE/2;i++)
{
k=rand()%N_SAMPLE;
l=rand()%N_SAMPLE;
T=P[k];
P[k]=P[l];
P[l]=T;
}
}
double avgError(PATTERN *p)
{
int i,j;
double err=0.00;
for(i=0;i<N_SAMPLE;i++)
for(j=0;j<NO;j++)
err+=fabs(out_o[j]-p[i].t[j]);
return err/(N_SAMPLE*NO);
}

void computeOutputs(PATTERN X)
{
int i,j,class_no;
double t,max=-9999.99;
for(j=0;j<NH;j++)
{
t=0.00;
for(i=0;i<NI+1;i++)
if(i<NI)
t+=w1[j][i]*(X.x[i]/(float)SCALE_FACTOR);
else
t+=w1[j][i]*BIAS;
out_h[j]=1.00/(1.00+exp(-t));
}
for(j=0;j<NO;j++)
{
t=0.00;
for(i=0;i<NH+1;i++)
if(i<NH)
t+=w2[j][i]*out_h[i];
else
t+=w2[j][i]*BIAS;
out_o[j]=1.00/(1.00+exp(-t));
}

for(j=0;j<NI;j++)
printf("\t %.2f",X.x[j]);
for(i=0;i<NO;i++)
{
X.t[i]=(float)out_o[i];
if(max<out_o[i])
{
max=out_o[i];
class_no=i+1;
}
printf("\n %f",X.t[i]);
}
printf("\n Computed Class No.:%d ",class_no);
}

int computeClass(PATTERN X)
{
int i,j,class_no;
double t,max=-9999.99;
for(j=0;j<NH;j++)
{
t=0.00;
for(i=0;i<=NI;i++)
if(i<NI)
t+=w1[j][i]*(X.x[i]/(float)SCALE_FACTOR);
else
t+=w1[j][i]*BIAS;
out_h[j]=1.00/(1.00+exp(-t*SHARPNESS));
}
out_h[NH]=BIAS;
for(j=0;j<NO;j++)
{
t=0.00;
for(i=0;i<=NH;i++)
t+=w2[j][i]*out_h[i];
out_o[j]=1.00/(1.00+exp(-t*SHARPNESS));
}

for(i=0;i<NO;i++)
{
X.t[i]=(float)out_o[i];
if(max<out_o[i])
{
max=out_o[i];
class_no=i+1;

}
printf("\n %f",X.t[i]);
}
printf("\n Computed Class No.:%d ",class_no);
return class_no;
}

void report(PATTERN *P)


{
int n_correct=0,n_incorrect=0,i;
for(i=0;i<N;i++)
if(P[i].class_no==computeClass(P[i]))
n_correct++;
else
{
n_incorrect++;
//printf("\n actual class : %d ",P[i].class_no);
//fflush(stdin);
//getchar();
}
printf("\n Correct %d Incorrect %d Accuracy
%.2f%",n_correct,n_incorrect,(float)(n_correct*100)/N);
}

void report1(PATTERN *P)


{

int i,j,p,n_correct,n_incorrect,c_classNo;
double Error,Sum,max,p_accuracy;
n_correct=n_incorrect=0;
for( p=0 ; p < N ; p++ )
{
// Forward Pass :
// Computation of the op for Hidden layer
for( j = 0 ; j < NH ; j++ )
{
Sum = 0.0;
for( i = 0 ; i <= NI ; i++
)
if(i<NI)
Sum+= P[p].x[i]
* w1[j][i];
else

Sum+=w1[j][i]*BIAS;
out_h[j] = 1.0/(1.0 + exp(-
Sum * SHARPNESS));
}
out_h[NH]=BIAS;

// Computation of the OP for output


layer
max=-9999;
for( j= 0 ; j < NO ; j++ )
{
Sum = 0.0;
for( i = 0 ; i <= NH ; i++
)
Sum+= out_h[i] *
w2[j][i];
out_o[j] = 1.0/(1.0 + exp(-
Sum * SHARPNESS));
Error+= fabs(P[p].t[j] -
out_o[j]); /* SSE */

if(out_o[j]>max)
{
max=out_o[j];
c_classNo=j+1;
}

/*****************/
// Propagating error
backward to determine the delta values of the op nodes.
/* Sigmoidal Outputs, SSE */
}

if( P[p].class_no!=c_classNo)
n_incorrect++;
else
n_correct++;

}
p_accuracy=(float)n_correct*100/N;
printf("\n Correct %d Incorrect %d,Accuracy
%.2f",n_correct,n_incorrect,p_accuracy);
}

void main()
{
PATTERN P[N],Q[N_SAMPLE],X;
int i,j;

linkfloat();
//clrscr();
getPatterns(P,Q);
/*************************************************/
for(i=0;i<N_SAMPLE;i++)
{
printf("\n");
for(j=0;j<Q[i].n_in;j++)
printf("%.3f ",Q[i].x[j]);
for(j=0;j<Q[i].n_out;j++)
printf("%.1f ",Q[i].t[j]);
}
/*************************************************/
// getUnknownPattern(&X);
// srand(60);
back_propagation(Q);
// computeOutputs(X);
report1(P);
}

You might also like