home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Collection of Education
/
collectionofeducationcarat1997.iso
/
COMPUSCI
/
NNUTL101.ZIP
/
NNSIM2
/
NNSIM2.C
< prev
next >
Wrap
C/C++ Source or Header
|
1993-07-31
|
5KB
|
111 lines
/*--------------------------------------------------------------------------*
* Gregory Stevens 7/1/93 *
* NNSIM2.C *
* (Generalized Simulator: UnSupervised,Feed Forward,Hebbian Learning) *
* *
* This is a generalized simulator for a supervised feed-forward neural *
* with back-propagation. It uses the nn*.c series. *
* For this file, the following parameters in the following files should be *
* set to comply with the input file and desired net configuration: *
* NNPARAMS.C : INPUT_LAYER_SIZE *
* OUTPUT_LAYER_SIZE *
* NUM_HIDDEN_LAYERS 0 *
* HL_SIZE_# 0 *
* *
* NNINPUTS.C : NUM_PATTERNS *
* *
* NNSTRUCT.C : InitNet() (output nodes should be linear) *
* *
* NNHEBBLN.C : EPSILON 0.05 (recommended...this is what I used) *
* *
* There should be 2 data files: nninputs.dat, nnintest.dat *
* The later is only necessary if IN_TEST is set to 1, and is to test the *
* net on novel inputs. *
* *
*--------------------------------------------------------------------------*/
#include "nncompet.c" /* to chain it to the nn*.c utilities */
#define NUM_ITS 100 /* iterations before it stops training */
#define IN_TEST 1
void main()
{
int Pattern; /* for looping through patterns */
int Layer; /* for looping through layers */
int LCV; /* for looping training sets */
NNETtype Net; /* for the network itself */
PATTERNtype InPatterns; /* for the training patterns */
Net = InitNet( NUMNODES ); /* initializes the network */
InPatterns = InitInPatterns(0); /* loads input patterns from file */
printf("\n\n\n\n\n"); /* gets screen ready for output */
printf( "BEGINNING TRAINING:\n\n" );
for (LCV=0; (LCV < NUM_ITS); ++LCV) /* loop through a training set */
{
for (Pattern=0; (Pattern<NUM_PATTERNS); ++Pattern) /* each pattern */
{
/* FORWARD PROPAGATION */
Net = UpDateInputAct( InPatterns, Pattern, Net );
for (Layer=1; (Layer<NUMLAYERS); ++Layer )
{
Net = UpDateLayerAct( Net, Layer );
/* for Hebbian, non-competative, comment out the next line: */
Net = AllOrNoneLayerActs( Net, Layer );
}
/* OUTPUT PRINTS */
/* NOTE: The last number in DisplayLayer() will need to be */
/* adjusted to format different size input layers. */
DisplayLayer( Net, 0, 4 ); /* display input layer */
printf( " " );
DisplayLayer( Net, (NUMLAYERS-1), 4 ); /* display output layer*/
printf( "\n" ); /* new line */
/* LEARNING RULE */
Net = UpDateWeightandThresh( Net );
}
printf( "\n\n" ); /* prepare for next set*/
if ( LCV > (NUM_ITS-10) )
{
getc(stdin); /* pause inbetween training epochs */
}
}
if (IN_TEST==1)
{
InPatterns = InitInPatterns(1); /* Loads test input patterns */
printf( "BEGINNING PATTERN TESTING:\n\n" );
for (Pattern=0; (Pattern<NUM_PATTERNS); ++Pattern)
{
Net = UpDateInputAct( InPatterns, Pattern, Net );
for (Layer=1; (Layer<NUMLAYERS); ++Layer )
{
Net = UpDateLayerAct( Net, Layer );
/* for Hebbian, non-competative, comment out the next line: */
Net = AllOrNoneLayerActs( Net, Layer );
}
/* OUTPUT PRINTS */
/* NOTE: The last number in DisplayLayer() will need to be */
/* adjusted to format different size input layers. */
DisplayLayer( Net, 0, 4 ); /* display input layer */
printf( " " );
DisplayLayer( Net, 1, 5 ); /* display output layer*/
printf( "\n" ); /* new line */
}
getc(stdin);
}
}