home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Simtel MSDOS 1992 September
/
Simtel20_Sept92.cdr
/
msdos
/
ddjmag
/
ddj8901.arc
/
NNFIL.ASC
< prev
next >
Wrap
Text File
|
1989-01-02
|
10KB
|
287 lines
_NEURAL NETS AND NOISE FILTERING_
by Casey Klimasauskas
[LISTIN╟ ONE]
/* (backprop.c) Back-propagation XOR example */
#include <stdio.h>
/************************************************************************
* Back-propagation Exclusive OR Program *
************************************************************************
Written by Casimir C. Klimasauskas. No copyright or other
proprietary rights reserved.
This program was compiled and tested using DataLight "C" version 3.14
as follows: DLC -ms backprop.c
NOTE: the structure "_conn" uses an index for the PE source. This was
done (rather than a pointer to the processing element itself) to get
around certain problems with handling circular references in structure
definitions.
*/
#define MAXCONN 5 /* maximum number of conn */
typedef struct _conn { /* connection to a PE */
int PESource; /* index of PE source */
float ConnWt; /* connection Wt */
float LastDelta; /* last weight change */
} CONN;
typedef struct _pe { /* processing element */
float Output; /* PE output */
float Error; /* Accumulated error */
CONN Conns[MAXCONN+1]; /* connections */
} PE;
/************************************************************************
* Network Topology *
************************************************************************
The following diagram shows how the processing elements have been
connected to solve the exclusive "OR" problem. This is taken from
Volume 1 of "Parallel Distributed Processing" by Rummelhart and
McClelland.
+--------------------- PE5
| / | \
| / | \
+-----------------/----PE4 \
| | / \ |
| | / \ |
| |/ \|
PE1 - bias PE2 PE3
*/
static PE pe1 = { 1.0, 0 }; /* bias */
static PE pe2 = { 0 }; /* inputs */
static PE pe3 = { 0 };
static PE pe4 = { 0, 0, 1,0,0, 2,0,0, 3,0,0 }; /* hidden */
static PE pe5 = { 0, 0, 1,0,0, 2,0,0, 3,0,0, 4,0,0 }; /* output */
/* --- Processing Elements (for reference by number) ---- */
static PE *PEList[] = { (PE *)0, &pe1, &pe2, &pe3, &pe4, &pe5 };
/* --- Layer definitions --- */
static PE *LayIn[] = { &pe2, &pe3, (PE *)0 }; /* input layer list */
static PE *LayMid[] = { &pe4, (PE *)0 }; /* hidden layer list */
static PE *LayOut[] = { &pe5, (PE *)0 }; /* output layer list */
/* --- Network List --- */
static PE **LayList[] = { &LayIn[0], &LayMid[0], &LayOut[0], (PE **)0 };
/************************************************************************
* Sigmoid() - Compute the sigmoid of a value *
************************************************************************
*/
double sigmoid( x )
double x;
{
double r; /* result */
extern double exp();
/* check special limiting cases to prevent overflow */
if ( x < -10. ) r = 0.0;
else if ( x > 10. ) r = 1.0;
else r = 1.0 / (1.0 + exp( -x ));
return( r );
}
/************************************************************************
* RRand() - Compute a random number in a range *
************************************************************************
*/
double RRand( low, high )
double low, high; /* low / high limits */
{
double r; /* return result */
extern int rand(); /* random number generator */
r = (rand() / 32767.) * (high - low) + low;
return( r );
}
/************************************************************************
* RandWts() - randomize all of the weights in a network *
************************************************************************
*/
void RandWts( low, high, LLp )
double low, high; /* low / high limits for random */
PE ***LLp; /* layer list pointer */
{
PE **PePP; /* PE Pointer */
PE *PeP; /* PE itself */
CONN *WtP; /* connection Pointer */
for( ; (PePP = *LLp) != (PE **)0; LLp++ ) /* layer loop */
for( ; (PeP = *PePP) != (PE *)0; PePP++ ) /* PE loop */
for( WtP = &PeP->Conns[0]; WtP->PESource != 0; WtP++ )
{
WtP->ConnWt = RRand( low, high );
WtP->LastDelta = 0.0;
}
return;
}
/************************************************************************
* Recall() - Recall information from the network *
************************************************************************
*/
void Recall( ov, iv, LLp, rcf )
double *ov; /* output vector */
double *iv; /* input vector */
PE ***LLp; /* layer list pointer */
int rcf; /* "recall" mode flag (0=learn) */
{
PE **PePP; /* PE Pointer */
PE **LastPP; /* last non-zero PE list pointer */
PE *PeP; /* PE itself */
CONN *WtP; /* connection Pointer */
double sum; /* weighted sum */
/* copy the input vector to the inputs of the network */
for( PePP = *LLp++; (PeP = *PePP) != (PE *)0; PePP++ )
PeP->Output = *iv++;
/* compute the weighted sum and transform it */
for( ; (PePP = *LLp) != (PE **)0; LLp++ ) /* layer loop */
{
LastPP = PePP;
for( ; (PeP = *PePP) != (PE *)0; PePP++ ) /* PE's in a layer */
{
/* weighted sum of the inputs */
sum = 0;
for( WtP = &PeP->Conns[0]; WtP->PESource != 0; WtP++ )
sum += WtP->ConnWt * PEList[ WtP->PESource ]->Output;
/* transform it using a sigmoidal transfer function */
PeP->Output = sigmoid( sum );
/* if "learn" mode, set the error to zero */
if ( rcf == 0 ) PeP->Error = 0.0; /* (for learning) */
}
}
/* copy the results to the output array */
if ( rcf != 0 ) /* only if not learning */
{
for( ; (PeP = *LastPP) != (PE *)0; LastPP++ )
*ov++ = PeP->Output;
}
return;
}
/************************************************************************
* Learn() - "learn" an association *
************************************************************************
*/
double Learn( ov, iv, LLp, alpha, eta )
double *ov; /* output vector */
double *iv; /* input vector */
PE ***LLp; /* layer list pointer */
double alpha; /* learning rate */
double eta; /* momentum */
{
double MAErr; /* Maximum Absolute error */
double rv; /* work value */
double SigErr; /* back-propagated error */
PE ***ALp; /* alternate layer pointer */
PE **PePP; /* PE Pointer */
PE **LastPP; /* last non-zero PE list pointer */
PE *PeP; /* PE itself */
CONN *WtP; /* connection Pointer */
extern double fabs(); /* absolute value */
Recall( ov, iv, LLp, 0 ); /* perform a recall */
/* find the output layer */
for( ALp = LLp; ALp[1] != (PE **)0; ALp++ )
;
/* compute the square error in the output */
for( MAErr = 0.0, PePP = *ALp; (PeP = *PePP) != (PE *)0; PePP++ )
{
rv = *ov++ - PeP->Output; /* output Error */
PeP->Error = rv;
if ( fabs(rv) > MAErr ) MAErr = fabs(rv);
}
/* back-propagate the error & update the weights */
for( ; ALp > LLp; ALp-- ) /* layer loop */
{
PePP = *ALp;
for( ; (PeP = *PePP) != (PE *)0; PePP++ ) /* PE's in a layer */
{
/* compute the error prior to the sigmoid function */
SigErr = PeP->Output * (1.0 - PeP->Output) * PeP->Error;
/* back-propagate the errors & adjust weights */
for( WtP = &PeP->Conns[0]; WtP->PESource != 0; WtP++ )
{
PEList[ WtP->PESource ]->Error +=
WtP->ConnWt * SigErr;
rv = alpha * PEList[ WtP->PESource ]->Output * SigErr +
eta * WtP->LastDelta;
WtP->ConnWt += rv;
WtP->LastDelta = rv;
}
}
}
return( MAErr );
}
/************************************************************************
* Main() - main driver routine to train the network *
************************************************************************
*/
static double iv1[] = { 0.0, 0.0 }; static double ov1[] = { 0.0 };
static double iv2[] = { 1.0, 0.0 }; static double ov2[] = { 1.0 };
static double iv3[] = { 0.0, 1.0 }; static double ov3[] = { 1.0 };
static double iv4[] = { 1.0, 1.0 }; static double ov4[] = { 0.0 };
static double *ivp[] = { &iv1[0], &iv2[0], &iv3[0], &iv4[0] };
static double *ovp[] = { &ov1[0], &ov2[0], &ov3[0], &ov4[0] };
main()
{
int wx; /* work index */
int x; /* index into samples array */
double r; /* work value */
double MAErr; /* maximum Absolute error */
double wo[sizeof(ivp)/sizeof(*ivp)];
/* randomize the weights in the network */
RandWts( -1.0, 1.0, &LayList[0] );
MAErr = 0.0;
for( wx = 0; ; wx++ )
{
x = wx % (sizeof(ivp)/sizeof(*ivp));
if ( x == 0 && wx != 0 )
{
if ( (wx % 100) == 0 )
printf( "Presentation %4d, Maximum Absolute Error = %.5f\n",
wx, MAErr );
if ( MAErr < .1 ) break;
MAErr = 0.0;
}
r = Learn( ovp[x], ivp[x], &LayList[0], 0.9, 0.5 );
if ( r > MAErr ) MAErr = r;
}
/* test the network */
for( wx = 0; wx < (sizeof(ivp)/sizeof(*ivp)); wx++ )
{
Recall( wo, ivp[wx], &LayList[0], 1 ); /* perform a recall */
printf( "Input: %.2f %.2f -> %.2f\n", ivp[wx][0], ivp[wx][1], wo[0] );
}
return;
}