home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Garbo
/
Garbo.cdr
/
mac
/
source
/
luschsrc.sit
/
neural.c
< prev
next >
Wrap
Text File
|
1990-05-23
|
4KB
|
203 lines
/********************************************************************************
* neural.c
*
* Neural Network Package
*
* Written by Paco Xander Nathan
* ⌐1990, Motorola Inc. Public domain source code.
********************************************************************************/
#include "applic.h"
#include "neural.h"
#define NN_INPUT 181
#define NN_HIDDEN 38
#define NN_OUTPUT 1
typedef enum {
nnLayerInput = 0, nnLayerHidden, nnLayerOutput
} NNLayerValues;
/* External Data Structures
*/
float
nnCertainty = 0.0,
nnOutput = 0.0;
Boolean
nnClusterFlag = TRUE;
/* Local Data Structures
*/
static float
nnHidLayer[NN_HIDDEN],
nnOutLayer[NN_OUTPUT],
nnInWeight[NN_INPUT][NN_HIDDEN],
nnOutWeight[NN_HIDDEN][NN_OUTPUT];
/* Initialize the network
*/
void
NNInit ()
{
/* Set default values
*/
nnCertainty = 0.0;
nnClusterFlag = TRUE;
NNSetAllWeights(0.0);
}
/* Sets the weight of all connections between all neurons in all layers
* to the given value
*/
void
NNSetAllWeights (value)
register float value;
{
register short i, j;
/* Weights between input and hidden
*/
for (i = 0; i < NN_INPUT; i++)
for (j = 0; j < NN_HIDDEN; j++)
nnInWeight[i][j] = value;
/* Weights between hidden and output
*/
for (i = 0; i < NN_HIDDEN; i++)
for (j = 0; j < NN_OUTPUT; j++)
nnOutWeight[i][j] = value;
}
/* Sets the weight between to neurons to a given value
*/
void
NNSetWeight (from, to, layer, value)
register short from, to, layer;
register float value;
{
switch (layer) {
case nnLayerHidden:
if ((from <= NN_INPUT) && (to <= NN_HIDDEN))
nnInWeight[from][to] = value;
break;
case nnLayerOutput:
if ((from <= NN_HIDDEN) && (to <= NN_OUTPUT))
nnOutWeight[from][to] = value;
break;
default:
break;
}
}
/* Teach the net a thing or two
*/
void
NNLearn (bit, nBits, theOutput)
register Boolean *bit;
register WORD nBits;
register WORD theOutput;
{
register WORD theBit, sumBits;
register float distWeight = 1.0;
/* Use the distributed weight of the input vector to setup
* the hidden layer
*/
for (theBit = 0, sumBits = 0; theBit < nBits; theBit++)
sumBits += bit[theBit];
if (sumBits > 0)
distWeight = 1.0 / sumBits;
ApplSpinCurs(FALSE);
for (theBit = 0; theBit < nBits; theBit++)
if (bit[theBit])
NNSetWeight(theBit, theOutput, nnLayerHidden, distWeight);
/* Setup the output layer
*/
NNSetWeight(theOutput, 0, nnLayerOutput, (float) theOutput);
}
/* Sets a neural network in motion by sequentially activating each neuron
*/
void
NNActivate (bit, nBits)
register Boolean *bit;
register WORD nBits;
{
register float sum, highest = 0.0;
register WORD i, j, closest = -1;
/* Activate hidden layer
*/
for (i = 0; i < NN_HIDDEN; i++) {
for (j = 0, sum = 0.0; j < nBits; j++)
sum += bit[j] * nnInWeight[j][i];
nnHidLayer[i] = sum;
ApplSpinCurs(FALSE);
}
/* Performs a cluster function. It inhibits (sets to 0) all neurons in the cluster
* except the one which is closest to the value 1.0. This neuron is set to 1.0. The
* net's certainty value is assigned the certainty to which the closest neuron felt it
* matched the pattern.
*/
if (nnClusterFlag) {
for (i = 0; i < NN_HIDDEN; i++)
if (nnHidLayer[i] > highest) {
closest = i;
highest = nnHidLayer[i];
}
/* All are equally likely - choose the first
*/
if (closest == -1)
closest = 0;
nnCertainty = nnHidLayer[closest] * 100.0;
/* Cause just enough feedback to the neuron which is closest to being "on".
* That is, set it "on". All others are given negative feedback to force
* them to zero. (Set them to zero).
*/
for (i = 0; i < NN_HIDDEN; i++)
if (i == closest)
nnHidLayer[i] = 1.0;
else
nnHidLayer[i] = 0.0;
}
/* Activate output layer
*/
for (i = 0, sum = 0.0; i < NN_OUTPUT; i++) {
for (j = 0; j < NN_HIDDEN; j++) {
sum += nnHidLayer[j] * nnOutWeight[j][i];
ApplSpinCurs(FALSE);
}
nnOutLayer[i] = sum;
}
/* For the purposes of this application, we only need one
* public output...
*/
nnOutput = nnOutLayer[0];
}