home *** CD-ROM | disk | FTP | other *** search
- /*--------------------------------------------------------------------------*
- * Gregory Stevens 7/5/93 *
- * NNWHAT.C *
- * (Feed-Forward, Back-Propagation, Supervised) *
- * *
- * This is a program to be used with the nn*.c series for implementing *
- * the "what" feature detector algorithm described in the paper called *
- * "Why are What and Where Processed By Separate Cortical Visual Systems?" *
- * by J.G. Rueckl and Kyle R. Cave. The following constant settings must *
- * be made: *
- * NNPARAMS.C : INPUT_LAYER_SIZE 25 (the retina: 5x5) *
- * OUTPUT_LAYER_SIZE 8 (8 possible patterns) *
- * NUM_HIDDEN_LAYERS 1 *
- * HL_SIZE_1 8 (this can be changed) *
- * *
- * NNINPUTS.C : NUM_PATTERNS 72 (8 patterns in 9 positions)++ *
- * *
- * NNSTRUCT.C : InitNet() ...should set output nodes as logistic... *
- * *
- * NNBKPROP.C : EPSILON 0.25 (recommended...this is what I used) *
- * *
- * Everything else can be left unchanged. *
- * *
- * ++ NOTE: I would have used the full 9 patterns, 9 positions, but ran into*
- * inexplicable memory errors on the allocation of pattern 76. *
- * I am using Borland C++ version 3.0 on an IBM compatable 486 *
- * machine with a math coprocessor and 4 megs RAM, and have no idea*
- * why there is a memory allocation error. Even with only 72 *
- * input patterns I need the huge memory model. *
- *--------------------------------------------------------------------------*/
- #include "nnbkprop.c" /* to chain it to the nn*.c utilities */
- #include <math.h> /* for the exp() for logistic units */
-
- #define NUM_ITS 2000 /* iterations before it stops */
-
- /* MAIN PROGRAM */
- void main()
- {
- int Pattern; /* for looping through patterns */
- int Layer; /* for looping through layers */
- int LCV; /* for looping training sets */
- NNETtype Net;
- PATTERNtype InPatterns, OutPattern;
-
- Net = InitNet( NUMNODES ); /* initializes the network */
- InPatterns = InitInPatterns(0); /* loads input patterns from file */
- OutPattern = InitOutPatterns(); /* loads output patterns from file*/
-
- for (LCV=0; (LCV < NUM_ITS); ++LCV) /* loop through a training set */
- {
- for (Pattern=0; (Pattern<NUM_PATTERNS); ++Pattern)
- {
- /* FORWARD PROPAGATION */
- Net = UpDateInputAct( InPatterns, Pattern, Net );
- for (Layer=1; (Layer<NUMLAYERS); ++Layer)
- {
- Net = UpDateLayerAct( Net, Layer );
- }
-
- /* OUTPUT PRINTS */
- printf( "Pat: %d ", Pattern );
- printf( "Itr: %d\n", LCV );
- DisplayLayer( Net, 0, 5 ); /* display input layer */
- printf( "\n" );
- DisplayLayer( Net, 2, 9 ); /* display output layer */
- printf( "\n\n" );
-
- if (LCV>50) getc(stdin);
-
- /* BACKWARD PROPAGATION */
- Net = UpDateWeightandThresh( Net, OutPattern, Pattern );
- }
- }
- }
-