home *** CD-ROM | disk | FTP | other *** search
/ Collection of Education / collectionofeducationcarat1997.iso / COMPUSCI / NNUTL101.ZIP / NNSIM2 / NNSIM2.C < prev    next >
C/C++ Source or Header  |  1993-07-31  |  5KB  |  111 lines

  1. /*--------------------------------------------------------------------------*
  2.  * Gregory Stevens                                                   7/1/93 *
  3.  *                                NNSIM2.C                                  *
  4.  *    (Generalized Simulator: UnSupervised,Feed Forward,Hebbian Learning)   *
  5.  *                                                                          *
  6.  *   This is a generalized simulator for a supervised feed-forward neural   *
  7.  * with back-propagation.  It uses the nn*.c series.                        *
  8.  * For this file, the following parameters in the following files should be *
  9.  * set to comply with the input file and desired net configuration:         *
  10.  *      NNPARAMS.C : INPUT_LAYER_SIZE                                       *
  11.  *                   OUTPUT_LAYER_SIZE                                      *
  12.  *                   NUM_HIDDEN_LAYERS 0                                    *
  13.  *                   HL_SIZE_#         0                                    *
  14.  *                                                                          *
  15.  *      NNINPUTS.C : NUM_PATTERNS                                           *
  16.  *                                                                          *
  17.  *      NNSTRUCT.C : InitNet()   (output nodes should be linear)            *
  18.  *                                                                          *
  19.  *      NNHEBBLN.C : EPSILON 0.05  (recommended...this is what I used)      *
  20.  *                                                                          *
  21.  *   There should be 2 data files: nninputs.dat, nnintest.dat               *
  22.  * The later is only necessary if IN_TEST is set to 1, and is to test the   *
  23.  * net on novel inputs.                                                     *
  24.  *                                                                          *
  25.  *--------------------------------------------------------------------------*/
  26. #include "nncompet.c"                /* to chain it to the nn*.c utilities  */
  27.  
  28. #define NUM_ITS 100                  /* iterations before it stops training */
  29. #define IN_TEST 1
  30.  
  31. void main()
  32. {
  33.   int Pattern;                          /* for looping through patterns   */
  34.   int Layer;                            /* for looping through layers     */
  35.   int LCV;                              /* for looping training sets      */
  36.   NNETtype Net;                         /* for the network itself         */
  37.   PATTERNtype InPatterns;               /* for the training patterns      */
  38.  
  39.   Net = InitNet( NUMNODES );           /* initializes the network        */
  40.   InPatterns = InitInPatterns(0);       /* loads input patterns from file */
  41.  
  42.   printf("\n\n\n\n\n");                 /* gets screen ready for output   */
  43.  
  44.   printf( "BEGINNING TRAINING:\n\n" );
  45.  
  46.   for (LCV=0; (LCV < NUM_ITS); ++LCV)   /* loop through a training set    */
  47.     {
  48.       for (Pattern=0; (Pattern<NUM_PATTERNS); ++Pattern)  /* each pattern */
  49.          {
  50.             /* FORWARD PROPAGATION */
  51.             Net = UpDateInputAct( InPatterns, Pattern, Net );
  52.             for (Layer=1; (Layer<NUMLAYERS); ++Layer )
  53.              {
  54.               Net = UpDateLayerAct( Net, Layer );
  55.  
  56.               /* for Hebbian, non-competative, comment out the next line: */
  57.               Net = AllOrNoneLayerActs( Net, Layer );
  58.              }
  59.  
  60.             /* OUTPUT PRINTS */
  61.             /*    NOTE: The last number in DisplayLayer() will need to be */
  62.             /*          adjusted to format different size input layers.   */
  63.  
  64.             DisplayLayer( Net, 0, 4 );             /* display input layer */
  65.             printf( "   " );
  66.             DisplayLayer( Net, (NUMLAYERS-1), 4 ); /* display output layer*/
  67.             printf( "\n" );                        /* new line            */
  68.  
  69.             /* LEARNING RULE */
  70.             Net = UpDateWeightandThresh( Net );
  71.          }
  72.  
  73.       printf( "\n\n" );                            /* prepare for next set*/
  74.  
  75.       if ( LCV > (NUM_ITS-10) )
  76.         {
  77.            getc(stdin);           /* pause inbetween training epochs */
  78.         }
  79.     }
  80.  
  81.   if (IN_TEST==1)
  82.     {
  83.       InPatterns = InitInPatterns(1);          /* Loads test input patterns */
  84.  
  85.       printf( "BEGINNING PATTERN TESTING:\n\n" );
  86.  
  87.       for (Pattern=0; (Pattern<NUM_PATTERNS); ++Pattern)
  88.          {
  89.             Net = UpDateInputAct( InPatterns, Pattern, Net );
  90.             for (Layer=1; (Layer<NUMLAYERS); ++Layer )
  91.              {
  92.               Net = UpDateLayerAct( Net, Layer );
  93.  
  94.               /* for Hebbian, non-competative, comment out the next line: */
  95.               Net = AllOrNoneLayerActs( Net, Layer );
  96.              }
  97.  
  98.             /* OUTPUT PRINTS */
  99.             /*    NOTE: The last number in DisplayLayer() will need to be */
  100.             /*          adjusted to format different size input layers.   */
  101.  
  102.             DisplayLayer( Net, 0, 4 );             /* display input layer */
  103.             printf( "   " );
  104.             DisplayLayer( Net, 1, 5 );             /* display output layer*/
  105.             printf( "\n" );                        /* new line            */
  106.          }
  107.  
  108.       getc(stdin);
  109.     }
  110. }
  111.