home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Simtel MSDOS 1992 December
/
simtel1292_SIMTEL_1292_Walnut_Creek.iso
/
msdos
/
aijournl
/
ai_dec86.arc
/
NEUROSIM.DEC
< prev
Wrap
Text File
|
1986-12-09
|
25KB
|
686 lines
"Catching Knowledge in Neuro Nets"
by Charles Jorgensen
Full length version of code from article
NEUROSIM.BAS
10 ON ERROR GOTO 30000
20 REM ************************************************************************
30 REM * Experimental Network System written in GW-BASIC 2.02 *
40 REM * C.J. Matheus & C.C. Jorgensen Sept 24, 1986 *
50 REM * *
60 REM * This software may be freely used for educational purposes only. *
70 REM * Reproduction and distribution is allowed provided this statement *
80 REM * and the authors' names are included. *
90 REM * *
100 REM ************************************************************************
110 REM ********************
120 REM SYSTEM PARAMETERS
130 REM ********************
140 MATRIXLOADED = 0 ' flag to test if matrix is loaded
150 PARAMETERS = 6
160 DIM PARAM(PARAMETERS), PARAM$(PARAMETERS)
170 PARAM$(1) = "Number of nodes in network "
180 PARAM(1) = 25
190 PARAM$(2) = "Evaluation procedure (0 = Hopfield, 1 = S. Annealing)"
200 PARAM(2) = 0 ' Evaluation procedure for new states
210 INITTEMP = 8 ' Initial temperature for simulated annealing
220 TEMPDECR = 2 ' temperature decrement per cycle
230 PARAM$(3) = "Percent of nodes firing per cycle "
240 PARAM(3) = 100 ' 100 percent of nodes fire each cycle
250 PARAM$(4) = "Single node threshold value "
260 PARAM(4) = 0 ' threshold that must be exceeded for node to turn on
270 PARAM$(5) = "Learning rule (1=Hopfield, 2=Hebbian, 3=Heb/anti-Heb)"
280 PARAM(5) = 1 ' rule used to adjust connection weights
290 LEARNRULE$(1) = "Hopfield [(2Vi - 1)(2Vj - 1)]"
300 LEARNRULE$(2) = "Hebbian [Vi * Vj]"
310 LEARNRULE$(3) = "Hebb/anit-Hebb [(2Vi - 1) * Vj]"
320 PARAM$(6) = "Input biasing factor "
330 PARAM(6) = 0 ' factor mutiplied by input state and added to input sum
340 DIM WEIGHT(PARAM(1),PARAM(1)) ' connection weight matrix
350 DIM STATE(PARAM(1)) ' state vector
360 DIM NEWSTATE(PARAM(1)) ' new-state vector
370 DIM INPUTVECTOR(PARAM(1)) ' input state vector (for input biasing)
400 REM *********************
410 REM *** INITIAL SETUP ***
420 REM *********************
430 CLS
440 PRINT " Experimental Network System"
450 PRINT " by"
460 PRINT " C.J. Matheus & C.C. Jorgensen"
470 PRINT
480 PRINT
490 PRINT
500 PRINT " Initial Network Description"
510 PRINT " ---------------------------"
520 GOSUB 11000 ' describe network
530 PRINT
540 INPUT "Change initial network <NO>"; ANSWER$
550 IF LEFT$(ANSWER$,1) = "y" OR LEFT$(ANSWER$,1) = "Y" THEN GOSUB 4000
560 FOR I = 1 TO PARAM(1)
570 READ STATE(I)
580 NEXT I
590 CLS
600 PRINT "Initial ";
610 GOSUB 6000 ' print initial pattern
700 REM *********************
710 REM *** ***
720 REM *** Main Loop ***
730 REM *** ***
740 REM *********************
750 WHILE COMMAND <> 9
760 GOSUB 11000 ' describe network
770 PRINT "Options: 1 = learn"
780 PRINT " 2 = recall"
790 PRINT " 3 = change parameters"
800 PRINT " 4 = display/change state vector"
810 PRINT " 5 = save state vector in file"
820 PRINT " 6 = display weight matrix"
830 PRINT " 7 = load matrix from file"
840 PRINT " 8 = save matrix to file"
850 PRINT " 9 = quit"
860 PRINT
870 INPUT "Option choice"; COMMAND
880 ON COMMAND GOSUB 2000,3000,4000,5000,7000,8000,9000,10000
890 WEND
900 GOTO 32050
1000 REM *********************
1010 REM **** ****
1020 REM **** SUBROUTINES ****
1030 REM **** ****
1040 REM *********************
1050 REM
2000 REM ********************
2010 REM LEARN SUBROUTINE
2020 REM ********************
2030 CLS
2040 PRINT "Learn ";
2050 GOSUB 5040 ' Display and change state
2060 LOCATE 11,1
2070 PRINT "Loading pattern into weight matrix:"
2080 PRINT "Using "; LEARNRULE$(PARAM(5)) ;" learning rule."
2090 ON PARAM(5) GOSUB 2180,2320,2460
2100 CLS
2110 PRINT "Learned ";
2120 GOSUB 6000
2130 MATRIXLOADED = 1 ' turn matix loaded flag on
2140 RETURN
2150 REM *****************************************
2160 REM Learning Rule Subroutines
2170 REM *****************************************
2180 REM *************************************
2190 REM Hopfield leanring rule (2Vi-1)(2Vj-1)
2200 REM *************************************
2210 FOR I = 1 TO PARAM(1) - 1
2220 LOCATE 11,40
2230 PRINT I
2240 FOR J = I + 1 TO PARAM(1)
2250 LOCATE 11,45
2260 PRINT J
2270 WEIGHT(I,J) = WEIGHT(I,J) + (2 * STATE(I) - 1) * (2 * STATE(J) - 1)
2280 WEIGHT(J,I) = WEIGHT(I,J)
2290 NEXT J
2300 NEXT I
2310 RETURN
2320 REM *************************************
2330 REM Hebbian learning rule: Vi*Vj
2340 REM *************************************
2350 FOR I = 1 TO PARAM(1) - 1
2360 LOCATE 11,40
2370 PRINT I
2380 FOR J = I + 1 TO PARAM(1)
2390 LOCATE 11,45
2400 PRINT J
2410 WEIGHT(I,J) = WEIGHT(I,J) + STATE(I) * STATE(J)
2420 WEIGHT(J,I) = WEIGHT(I,J)
2430 NEXT J
2440 NEXT I
2450 RETURN
2460 REM ********************************************************
2470 REM Hebbian/anit-Hebbian learning rule: (2Vi - 1) * Vj
2480 REM ********************************************************
2490 REM This rule takes more time since the matrix is not necc. symetric
2500 FOR I = 1 TO PARAM(1)
2510 LOCATE 11,40
2520 PRINT I
2530 FOR J = 1 TO PARAM(1)
2540 IF I = J THEN GOTO 2580
2550 LOCATE 11,45
2560 PRINT J
2570 WEIGHT(I,J) = WEIGHT(I,J) + (2 * STATE(I) - 1) * STATE(J)
2580 NEXT J
2590 NEXT I
2600 RETURN
3000 REM ********************
3010 REM RECALL SUBROUTINE
3020 REM ********************
3030 CLS
3040 IF MATRIXLOADED THEN GOTO 3090
3050 BEEP
3060 PRINT "Weight matrix is empty!"
3070 PRINT "Run either the 'learn' or 'load matrix' option before 'recall'."
3080 GOTO 3420
3090 GOSUB 5040 ' displat/change state vector
3100 FOR I = 1 TO PARAM(1)
3110 INPUTVECTOR(I) = (2 * STATE(I) - 1)
3120 NEXT I
3130 DEF FNEVALUATE(INDEX) = PARAM(3) - (100 * RND(1))
3140 IF PARAM(2) = 0 THEN GOTO 3240
3150 REM Simulated annealing options
3160 PRINT
3170 PRINT "Simulated Annealing: Initial temperature <"; INITTEMP ;">";
3180 INPUT ANSWER
3190 IF ANSWER THEN INITTEMP = ANSWER
3200 PRINT " Temperature decrement <"; TEMPDECR ;">";
3210 INPUT ANSWER
3220 IF ANSWER THEN TEMPDECR = ANSWER
3230 TEMP = INITTEMP
3240 ANSWER$ = "YES"
3250 WHILE LEFT$(ANSWER$, 1) <> "N" AND LEFT$(ANSWER$, 1) <> "n"
3260 LOCATE 12,1
3270 PRINT SPACE$(79)
3280 LOCATE 12,1
3290 PRINT "Evaluating nodes";
3300 ON PARAM(2) + 1 GOSUB 3430,3610 ' Evaluate subroutines
3310 FOR I = 1 TO PARAM(1)
3320 STATE(I) = NEWSTATE(I)
3330 NEXT I
3340 PRINT
3350 GOSUB 6000 ' print state vector
3360 INPUT "Cycle again <YES>"; ANSWER$
3370 LOCATE CSRLIN - 1, 1
3380 PRINT SPACE$(30)
3390 WEND
3400 CLS
3410 GOSUB 6000 ' Print state vector
3420 RETURN
3430 REM ***********************************
3440 REM Evaluation Subroutines
3450 REM ***********************************
3460 REM *******************************
3470 REM Straight threshold function
3480 REM *******************************
3490 FOR I= 1 TO PARAM(1)
3500 IF FNEVALUATE(I) <= 0 THEN GOTO 3590
3510 SUM = 0
3520 PRINT ".";
3530 FOR J = 1 TO PARAM(1)
3540 SUM = SUM + WEIGHT(J,I) * STATE(J)
3550 NEXT J
3560 REM E = SUMj(Vj * Wij) + (INPUT * INPUT-BIAS) - THRESHOLD
3570 SUM = SUM - PARAM(4) + (INPUTVECTOR(I) * PARAM(6))
3580 IF SUM > 0 THEN NEWSTATE(I) = 1 ELSE NEWSTATE(I) = 0
3590 NEXT I
3600 RETURN
3610 REM **************************
3620 REM Simulated Annealing
3630 REM **************************
3640 PRINT " (temp =" TEMP ;")";
3650 FOR I= 1 TO PARAM(1)
3660 IF FNEVALUATE(I) <= 0 THEN GOTO 3760
3670 SUM = 0
3680 PRINT ".";
3690 FOR J = 1 TO PARAM(1)
3700 SUM = SUM + WEIGHT(J,I) * STATE(J)
3710 NEXT J
3720 REM E = SUMj(Vj * Wij) + (INPUT * INPUT-BIAS) - THRESHOLD
3730 SUM = SUM - PARAM(4) + (INPUTVECTOR(I) * PARAM(6))
3740 PROB = 1 / (1 + EXP(-SUM/ TEMP))
3750 IF PROB > RND(1) THEN NEWSTATE(I) = 1 ELSE NEWSTATE(I) = 0
3760 NEXT I
3770 TEMP = TEMP - TEMPDECR
3780 IF TEMP <= 0 THEN TEMP = .5
3790 RETURN
4000 REM ********************
4010 REM CHANGE PARAMETERS
4020 REM ********************
4030 CLS
4040 PRINT
4050 PRINT "System parameters:"
4060 PRINT
4070 FOR I = 1 TO PARAMETERS
4080 PRINT I;") "; PARAM$(I);" = "; PARAM(I)
4090 NEXT I
4100 PRINT
4110 INPUT "Parameter # to change <none>"; PNUM
4120 IF PNUM > PARAMETERS OR PNUM = 0 THEN 4150
4130 INPUT "New value"; PARAM(PNUM)
4140 GOTO 4030
4150 CLS
4160 RETURN
5000 REM ********************
5010 REM CHANGE STATE
5020 REM ********************
5030 CLS
5040 GOSUB 6000 ' display pattern
5050 INPUT "Change pattern <NO>"; ANSWER$
5060 IF LEFT$(ANSWER$,1) <> "Y" AND LEFT$(ANSWER$,1) <> "y" THEN GOTO 5510
5070 PRINT
5080 INPUT "Get pattern from FILE or CHANGE current pattern <FILE>"; ANSWER$
5090 IF LEFT$(ANSWER$,1) = "C" OR LEFT$(ANSWER$,1) = "c" THEN GOTO 5250
5100 PRINT
5110 PRINT "Pattern files available:"
5120 PRINT
5130 FILES "*.pat"
5140 PRINT
5150 INPUT "File to load <none>"; FILENAME$
5160 IF FILENAME$ = "" OR FILENAME$ = "none" OR FILENAME$ = "NONE" THEN GOTO 5000
5170 IF INSTR(".",FILENAME$) = 0 THEN FILENAME$ = FILENAME$ + ".pat"
5180 OPEN FILENAME$ FOR INPUT AS #1
5190 FOR I = 1 TO PARAM(1)
5200 INPUT #1, STATE(I)
5210 NEXT I
5220 CLOSE #1
5230 CLS
5240 GOTO 5000
5250 REM *** Create new pattern ***
5260 CLS
5270 GOSUB 6000 ' Display state
5280 ROWLEN = SQR(PARAM(1))
5290 XINPUT = ROWLEN + 4
5300 LOCATE 12,1
5310 PRINT "To change the current state of each node respond to the prompts"
5320 PRINT "with either a 0 or 1 to turn on or off the state of the node "
5330 PRINT "pointed to by the uparrow; all other responses leave the state"
5340 PRINT "unchanged."
5350 FOR I = 1 TO PARAM(1)
5360 Y = (((I - 1) MOD ROWLEN)+ 1) * 2
5370 X = INT((I - 1) / ROWLEN ) + 3
5380 LOCATE X + 1,Y
5390 PRINT "^"
5400 LOCATE XINPUT, 1
5410 INPUT "Node state <unchanged>"; STATE$
5420 IF STATE$ = "1" THEN STATE(I) = 1
5430 IF STATE$ = "0" THEN STATE(I) = 0
5440 LOCATE X,Y
5450 IF STATE(I) THEN PRINT "*" ELSE PRINT " "
5460 LOCATE X + 1,Y
5470 IF STATE(I + ROWLEN) THEN PRINT "*" ELSE PRINT " "
5480 NEXT I
5490 CLS
5500 GOTO 5040
5510 LOCATE CSRLIN - 1, 1
5520 PRINT SPACE$(40)
5530 LOCATE CSRLIN - 2, 1
5540 RETURN
6000 REM ********************
6010 REM PRINT SUBROUTINE
6020 REM ********************
6030 ROWLEN = SQR(PARAM(1))
6040 PRINT "Pattern:"
6050 FOR I = 1 TO ROWLEN
6060 PRINT
6070 FOR J = 1 TO ROWLEN
6080 IF STATE( ROWLEN * (I - 1) + J ) THEN PRINT " *"; ELSE PRINT " ";
6090 NEXT J
6100 NEXT I
6110 PRINT
6120 PRINT
6130 RETURN
7000 REM ********************
7010 REM SAVE STATE FILE
7020 REM ********************
7030 GOSUB 5000
7040 PRINT
7050 PRINT "Pattern files:"
7060 PRINT
7070 FILES "*.pat"
7080 PRINT
7090 INPUT "File name to save state in <none>"; FILENAME$
7100 IF FILENAME$ = "" OR FILENAME$ = "none" THEN GOTO 7250
7110 IF INSTR(".",FILENAME$) = 0 THEN FILENAME$ = FILENAME$ + ".pat"
7120 OPEN FILENAME$ FOR OUTPUT AS #1
7130 CLS
7140 PRINT
7150 PRINT
7160 PRINT "Saving state";
7170 FOR I = 1 TO PARAM(1)
7180 PRINT #1, STATE(I)
7190 PRINT ".";
7200 NEXT I
7210 PRINT
7220 PRINT
7230 CLOSE #1
7240 PRINT "State saved in "; FILENAME$
7250 RETURN
8000 REM *********************
8010 REM DISPLAY WEIGHT MATRIX
8020 REM *********************
8030 PRINT "Weight Matrix:"
8040 FOR I = 1 TO PARAM(1)
8050 PRINT
8060 FOR J = 1 TO PARAM(1)
8070 PRINT WEIGHT(I,J);
8080 NEXT J
8090 NEXT I
8100 PRINT
8110 INPUT "Press CR to continue"; ANSWER$
8120 CLS
8130 RETURN
9000 REM ********************
9010 REM LOAD MATRIX FILE
9020 REM ********************
9030 CLS
9040 PRINT "Available matrix files:"
9050 PRINT
9060 FILES "*.mat"
9070 PRINT
9080 INPUT "File to load <none>"; FILENAME$
9090 IF FILENAME$ = "" OR FILENAME$ = "none" OR FILENAME$ = "NONE" THEN GOTO 9270
9100 IF INSTR(".",FILENAME$) = 0 THEN FILENAME$ = FILENAME$ + ".mat"
9110 OPEN FILENAME$ FOR INPUT AS #1
9120 CLS
9130 PRINT
9140 PRINT
9150 PRINT "Loading matrix:"
9160 FOR I = 1 TO PARAM(1)
9170 LOCATE 3,17
9180 PRINT I
9190 FOR J = 1 TO PARAM(1)
9200 INPUT #1, WEIGHT(I,J)
9210 NEXT J
9220 NEXT I
9230 CLOSE #1
9240 PRINT
9250 PRINT FILENAME$; " loaded into matrix."
9260 MATRIXLOADED = 1 ' turn matix loaded flag on
9270 RETURN
10000 REM ********************
10010 REM SAVE MATRIX FILE
10020 REM ********************
10030 CLS
10040 PRINT "Matrix files:"
10050 FILES "*.mat"
10060 PRINT
10070 INPUT "File name to save matrix under <none>"; FILENAME$
10080 IF FILENAME$ = "" THEN GOTO 10250
10090 IF INSTR(".",FILENAME$) = 0 THEN FILENAME$ = FILENAME$ + ".mat"
10100 OPEN FILENAME$ FOR OUTPUT AS #1
10110 CLS
10120 PRINT
10130 PRINT
10140 PRINT "Saving Matrix:"
10150 FOR I = 1 TO PARAM(1)
10160 LOCATE 3,16
10170 PRINT I
10180 FOR J = 1 TO PARAM(1)
10190 PRINT #1, WEIGHT(I,J)
10200 NEXT J
10210 NEXT I
10220 CLOSE #1
10230 PRINT
10240 PRINT "Matrix saved in "; FILENAME$
10250 RETURN
11000 REM *********************
11010 REM *** DESCRIBE NET ***
11020 REM *********************
11030 PRINT
11040 IF PARAM(2) THEN PRINT "Sim Annealing"; ELSE PRINT "Hopfield-like";
11050 PRINT " network: size ="; PARAM(1) ;"threshold ="; PARAM(4) ;
11060 PRINT " input bias ="; PARAM(6) ;" fire% ="; PARAM(3)
11070 PRINT SPACE$(23);
11080 PRINT "learning rule = "; LEARNRULE$(PARAM(5))
11090 PRINT
11100 RETURN
20000 REM *********************
20010 REM *** ***
20020 REM *** data ***
20030 REM *** ***
20040 REM *********************
20050 REM Initial pattern: "o"
20060 DATA 1, 1, 1, 1, 1
20070 DATA 1, 0, 0, 0, 1
20080 DATA 1, 0, 0, 0, 1
20090 DATA 1, 0, 0, 0, 1
20100 DATA 1, 1, 1, 1, 1
30000 REM *********************
30010 REM *** ***
30020 REM *** ERROR HANDLER ***
30030 REM *** ***
30040 REM *********************
30050 IF ERR <> 53 THEN 30150
30060 IF ERL = 5130 THEN RESUME 5140
30070 IF ERL = 7070 THEN RESUME 7080
30080 IF ERL = 9060 THEN RESUME 9070
30090 IF ERL = 10050 THEN RESUME 10060
30100 PRINT "File not found."
30110 IF ERL = 5180 THEN RESUME 5140
30120 IF ERL = 7120 THEN RESUME 7080
30130 IF ERL = 9110 THEN RESUME 9070
30140 IF ERL = 10100 THEN RESUME 10060
30150 IF ERL <> 5470 THEN GOTO 30180
30160 PRINT " "
30170 RESUME 5480
30180 PRINT "Error #"; ERR ;"at line"; ERL
30190 PRINT "Continuing at main loop..."
30200 CLOSE
30210 RESUME 700
32000 REM *********************
32010 REM *** ***
32020 REM *** END PROGRAM ***
32030 REM *** ***
32040 REM *********************
32050 CLOSE
32060 ON ERROR GOTO 0 ' turn off error trapping
32767 END
README file for NEUROSIM.BAS
------------------------------------
NEUROSIM is written in GW-BASIC 2.02
------------------------------------
An Experimental Network System
conceived and written by: C.J. Matheus & C.C. Jorgensen
This program is intended to provide the user with a feel for some of the
the various aspects of programming neural networks. The user is presented
with several parameters which may be altered to produce different versions
of Hopfield-like or Boltzmann-like networks. Also included are three
different types of learning rules: Hopfield (2*Vi - 1)(2*Vj - 1),
Hebbian (Vi * Vj), and Hebbian/anti-Hebbian (2*Vi - 1) * Vj. While this
system is intended to be purely educational it does have the capabilities
of some full fledged network systems and is limited primarily by its size
and speed.
The basic network consists of 25 nodes (or neurons), totally interconnected
through a weight matrix W. For visual purposes the state vector of the
network is laid out as a 5 by 5 matrix. Patterns are created in the state
vector (usually alphabetic character representations) and then loaded into
the weight matrix through a "learn"ing procedure or recalled from the matrix
through the "recall" process.
When the program is run the default network configuration is described: a
Hopfield-like 25 node network with threshold = 0, input bias = 0, and fire
percent = 100. The default learning rule is the Hopfield rule. These
parameters can be changed initially or at any other time using the "change
parameters" option.
The network is initially loaded with a pattern representing the letter 'O':
Initial Pattern:
* * * * *
* *
* *
* *
* * * * *
This pattern (or any other state pattern) can be changed with the
"display/change state vector" option (most other options also allow you to
change the pattern). The weight matrix is initially empty and must be
taught or loaded before recall is allowed.
After describing the network configuration and displaying the initial
pattern the program enters the main option loop. The options available
are:
1) learn
takes the pattern in the state vector and stores it
into the weight matrix according to the current learning rule.
2) recall
uses the current state vector as the initial "cue" pattern
and recalls the closest and/or strongest pattern retrievable
from the weight matrix. Recall is not allowed unless
the matrix has first been loaded with either the "learn" or
"load matrix" option.
3) change parameters
allows the user to change the 6 major system parameters:
1) Number of nodes in network
the number of nodes is initially set to 25.
It is not recommended that this number be
changed unless you are very familiar with the
system and willing to possibly make some
programming changes since the system has not
been thoroughly tested on different size
networks. If you do change this parameter
the number chosen should be a square due to
the way the program displays the state vector.
Also the pattern and matrix files for one
size network will NOT necessarily work for
other size networks.
2) Evaluation procedure
if this is set to 0 then the evaluation
process will simulated a Hopfield-like network;
set to 1 it will perform evaluation using a
simulated annealing process similar to that
used by a Boltzmann machine.
3) Percent of nodes firing per cycle
this parameter allows the user to specify
what percentage of the nodes in the network
will be evaluated for firing each cycle.
When this value is 100 all nodes fire each
cycle, and the network becomes synchronous
and deterministic. By decreasing this
value operation becomes asynchronous (i.e. nodes
no longer fire in order, one after another) and
the system is no longer deterministic (i.e. the
same initial pattern may settle into different
final states on different occasions). The
rationale for this parameter is that it is
intended to simulate part of the asynchronous
behavior of biological neurons, and also it
helps to keep the system from getting caught
in weak local minima as opposed to the desired
strongest close minima.
4) Single node threshold value
by increasing the threshold you increase
the amount of input that a node must receive
from other nodes in order to turn on. The
effect this has on recall varies depending
upon the weight matrix, but it seems to be
most useful in conjunction with the Hebbian
rules which otherwise have difficulty retrieving
the original patterns without noise. This
parameter can also make the network "forget"
certain memories and recall instead their
linear superpositions.
5) Learning rule
select one of the following:
Hopfield (2Vi - 1)(2Vj - 1)
Hebbian Vi * Vj
Hebbian/anti-Hebbian (2Vi - 1) * Vj
It is helpful to draw up the truth tables
for these rules to see how they effect the
weights between nodes of different activation.
The Hopfield rule is the most effect for
associative recognition which is the primary
use for a network of this size and design.
6) Input bias
this parameter determines how much
influence the initial "cue" pattern should
have on the settling of the network. When
set to 0 the settling process is only
influenced by the input pattern on the first
cycle. Setting it to one or more biases each
node towards being in its initial cue state
on all future cycles (the greater the factor the
greater the biasing).
4) display/change state vector
displays the current state vector and allows it to be
changed by either loading an existing pattern file or
changing the current pattern. Changing the current pattern
involves the use of a simple visual pattern editor in which
you can change the state of each individual node in the network.
5) save state vector in file
saves the current state vector in a file named by the user
(only one pattern per file).
6) display weight matrix
displays the individual weights stored in the weight matrix.
The standard weight matrix size is 25 by 25 with entry Wij
representing the connection weight between the ith and jth
nodes.
7) load matrix from file
lists the names of existing weight matrix files and
allows the user to select one to be loaded into the weight
matrix.
8) save matrix in file
saves the current weight matrix in a file named by
the user. This is often useful since the learning process
can be tedious.
9) quit
exits from the program. This can also be accomplished at
any time using ^C or BREAK (provided your PC is setup for
breaking).
The program is designed to be relatively simple to use. All prompts have
assigned defaults which are indicated between brackets (e.g. <NO>); the
default with be assumed if the user does not provide a value (i.e. the user
simply hits the carriage return).
The general procedure for using the system is as follows:
* Enter basic and run NEUROSIM
* Configure the network by selecting values for the various
parameters (not necessary if the default Hopfield-like network is used).
* Teach the network one or more patterns using the "learn" option
(After creating a pattern and teaching it to the network it is
usually a good idea to save that pattern in a file (using the
"save state vector" option) which can then be retrieved later
for setting up a pattern for recall).
OR
Load a weight matrix from an existing file.
* Select the recall option and create a pattern to be recalled by the
network. To see how well a network recalls its patterns it is best to
create patterns that are slightly distorted from the originally taught
patterns. If the original patterns where stored in files, then they
can be loaded into the state vector and distorted using the pattern
editor.
* Change some parameter(s) and notice the effect it has on recall
and/or learning.
* Save the weight matrix if you intend to continue experimentation.
It is hoped that through experimentation with this program you will obtain
a fuller appreciation for the functioning of neural networks than could be
obtained from simply reading about them.
Enjoy! (9-25-86 CJM)