home *** CD-ROM | disk | FTP | other *** search
- /*
- * $Id: NetBackProp.c 1.7 1995/03/21 10:51:08 projects Exp $
- *
- * Function NetBackProp
- * Programmer Nicholas d'Alterio
- * Gideon Denby
- * Date 20/02/95
- *
- * Synopsis: This function carries out the back propagation algorithm
- * to ammend a set of weights in a neural network given a
- * set of nodes which have been through a feed forwards stage.
- * It works with any network size.
- *
- * NOTE It assumes that the nodes are fully connected.
- *
- * $Log: NetBackProp.c $
- * Revision 1.7 1995/03/21 10:51:08 projects
- * Small error in last change fixed
- *
- * Revision 1.6 1995/03/21 10:34:03 projects
- * Made code for readable for loops
- *
- * Revision 1.5 1995/03/20 23:37:25 daltern
- * Added gain factor
- *
- * Revision 1.4 1995/03/16 19:26:28 daltern
- * Added code for momentum factor. Program now has an additional
- * 3
- * D array passed to it to store previous delta weights. If the
- * momentum factor is 0 then it reverts to old behaviour
- *
- * Revision 1.3 1995/03/14 23:34:28 daltern
- * Commented and cleaned up
- *
- *
- */
-
- #include "Neural.h"
-
- void NetBackProp( float ***Weight, float ***Delta_weight, float **Node, NET netI,
- float *Output, float learn_rate, float mom_fac, float gain )
-
- {
-
- register int s_node, e_node, layer;
-
- int output_layer;
-
- int nodes_on_layer;
- int nodes_on_next_layer;
- int nodes_on_prev_layer;
-
- float delta;
- float upper_error;
-
- /*
- * Loop from top of net down.
- */
-
- output_layer = netI.NumLayers-1;
-
- for ( layer = output_layer; layer > 0; layer-- ) {
-
- nodes_on_layer = netI.LayerSize[layer]+1;
- nodes_on_prev_layer = netI.LayerSize[layer-1]+1;
- nodes_on_next_layer = netI.LayerSize[layer+1]+1;
-
- /*
- * Calculate all error terms for this layer. Put them in corresponding
- * position in node array since its values are no longer needed.
- */
-
-
- /*
- * Check if this is output layer and perform the appropriate error
- * calculation.
- */
-
- if ( layer == output_layer ) {
-
- /*
- * Error term for output layer.
- */
-
- for ( e_node = 1; e_node < nodes_on_layer; e_node++ ) {
-
- /*
- * Calculate error term.
- */
-
- delta = Node[layer][e_node] * gain *
- ( 1 - Node[layer][e_node] ) *
- ( Output[e_node-1] - Node[layer][e_node] );
-
- Node[layer][e_node] = delta;
-
- } /* end for e_node */
-
- } else {
-
- /*
- * Error term for Lower layers.
- */
-
- for ( s_node = 0; s_node < nodes_on_layer; s_node++ ) {
-
- /*
- * Sum the contribution from errors in the next layer up
- */
-
- upper_error = 0.0;
- for ( e_node = 1; e_node < nodes_on_next_layer; e_node++ ) {
-
- upper_error += Node[layer+1][e_node] *
- Weight[layer][s_node][e_node];
-
- } /* end for e_node */
-
- /*
- * Calculate error term.
- */
-
- delta = Node[layer][s_node] * gain *
- ( 1 - Node[layer][s_node] ) * upper_error;
-
- Node[layer][s_node] = delta;
-
- } /* end for s_node */
-
- } /* end if : calc error depending on layer */
-
- /*
- * Update all weights in this layer.
- * NOTE node[layer][e_node] is the stored delta term.
- */
-
- /*
- * Loop for each node on this layer.
- */
-
- for ( e_node = 1; e_node < nodes_on_layer; e_node++ ) {
-
- /*
- * Loop for each node connected to current node in this layer.
- */
-
- for ( s_node = 0; s_node < nodes_on_prev_layer; s_node++ ) {
-
- Delta_weight[layer-1][s_node][e_node] =
- ( learn_rate * Node[layer-1][s_node] *
- Node[layer][e_node] ) +
- (mom_fac * Delta_weight[layer-1][s_node][e_node]);
-
- Weight[layer-1][s_node][e_node] += Delta_weight[layer-1][s_node][e_node];
-
- } /* end for s_node */
-
- } /* end for e_node */
-
- } /* end for layer */
-
- return;
-
- } /* end function NetBackProp */
-
-
-
-