home *** CD-ROM | disk | FTP | other *** search
/ Liren Large Software Subsidy 9 / 09.iso / e / e065 / 1.ddi / DNNANCUM.INS < prev    next >
Encoding:
Text File  |  1991-08-28  |  4.5 KB  |  159 lines

  1. inst4.0        ! 07-Aug-90  (dnnancum.ins)  DNNA InstaNet
  2. !****************************************************************
  3. !*                                *
  4. !*    Digital Neural Network Architecture Builder        *
  5. !*    with Norm-Cum-Delta Learning Rule            *
  6. !****************************************************************
  7.  
  8. !    *** check that input / output PE count is non-zero
  9.  
  10. ?&In    1
  11. >bge    CheckOut
  12. @Err    "DNNA MUST have at least one input PE"
  13. :CheckOut
  14. ?&Out    1
  15. >bge    OutOK
  16. @Err    "DNNA MUST have at least one output PE"
  17. :OutOK
  18. ?&Hd1    1
  19. >bge    HidOK
  20. @Err    "DNNA MUST have at least one Hidden Layer 1 PE"
  21. :HidOK
  22.  
  23. !    *** Load the Control Strategy and LRS if needed
  24.  
  25. @LdCS    "backprop"        !control strategy
  26. @LdLR    "ncum_hid"        !L/R schedule
  27. @LdLR    "ncum_out"        !L/R schedule
  28.  
  29. =netn    "InstaNet (tm) Digital Neural Network Arch. version 2.0 07-Aug-90"
  30. =DLnF    0            !learn  re-display off
  31. =DRcF    0            !recall re-display off
  32.  
  33. !    *** Build the Input Layer ***
  34.  
  35. @LLdf                !load default layer to mi_layer structure
  36. =LDln    "In"            !layer name
  37. =Lpes    &In            !copy # of input PEs from menu
  38. =Ltrn    "Linear"        !buffer
  39. =x    100            !place to put layer on screen
  40. =y     80
  41. #Incl    "stdnwgtf.iif"        !standard # weight fields
  42. @LAdd                !add the input layer
  43.  
  44. !    *** Build the first hidden Layer ***
  45.  
  46. @LLdf                !start with default layer again
  47. =LDln    "Hidden 1"        !layer name
  48. =Lpes    &Hd1            !Proper number of PEs
  49. =Lsum    "DNNA"            !DNNA sum function
  50. =Ltrn    "DNNA"            !DNNA transfer function
  51. =Llrn    "Norm-Cum-Delta"    !Norm-Cum Delta learning rule
  52. =Llrs    "ncum_hid"        !L/R Schedule
  53. +y    60            !up higher on display
  54. #Incl    "stdnwgtf.iif"        !standard # weight fields
  55. @LAdd
  56.  
  57. !    *** Connect Hidden Layer to Bias & Input Layers ***
  58.  
  59. =SPEl    LayN            !current layer
  60. @SlPE                !select it as destination (sb already)
  61. =NPEl    -1            !near to bias term (source)
  62. @NrPE
  63. =cnwt    1.0            !connection weight
  64. =cnty    WVar            !variable
  65. =cnsc    WAbs            !absolute
  66. @LCFl                !fully connect to bias element
  67. =NPEl    0            !input layer
  68. @NrPE
  69. @LCFl                !fully connect to input layer
  70. =n0    LayN            !save current layer for later
  71.  
  72. !    *** Build the Second hidden Layer ***
  73.  
  74. ?&Hd2    0
  75. >ble    NoHid2            !skip adding second hidden layer
  76.  
  77. @LLdf                !start with default layer again
  78. =LDln    "Hidden 2"        !layer name
  79. =Lpes    &Hd2            !Proper number of PEs
  80. =Lsum    "DNNA"            !DNNA sum function
  81. =Ltrn    "DNNA"            !DNNA transfer function
  82. =Llrn    "Delta-Rule"        !Standard Delta learning rule
  83. =Llrn    "Norm-Cum-Delta"    !Norm-Cum Delta learning rule
  84. =Llrs    "ncum_hid"        !L/R Schedule
  85. +y    60            !up higher on display
  86. #Incl    "stdnwgtf.iif"        !standard # weight fields
  87. @LAdd
  88.  
  89. !    *** Connect Hidden Layer 2 to Bias & Input Layers ***
  90.  
  91. =SPEl    LayN            !current layer
  92. @SlPE                !select it as destination (sb already)
  93. =NPEl    -1            !near to bias term (source)
  94. @NrPE
  95. @LCFl                !fully connect to bias element
  96. =NPEl    n0            !previous layer
  97. @NrPE
  98. @LCFl                !fully connect to input layer
  99. =n0    LayN            !save current layer for later
  100. :NoHid2
  101.  
  102. !    *** Build the output layer & connect it to prior layer & bias term ***
  103.  
  104. @LLdf                !load default layer to mi_layer structure
  105. =LDln    "Out"            !layer name
  106. =Lpes    &Out            !copy # of input PEs from menu
  107. =Lsum    "DNNA"            !DNNA sum function
  108. =Ltrn    "DNNA"            !DNNA transfer function
  109. =Llrn    "Delta-Rule"        !Standard  Delta learning rule
  110. =Llrn    "Norm-Cum-Delta"    !Norm-Cum Delta learning rule
  111. =Llrs    "ncum_out"        !L/R Schedule
  112. +y    60
  113. #Incl    "stdnwgtf.iif"        !standard # weight fields
  114. @LAdd                !add the output layer
  115. =n7    LayN            !save for stdprobe
  116.  
  117. =SPEl    LayN            !current layer
  118. @SlPE                !select it as destination (sb already)
  119. =NPEl    -1            !near to bias term (source)
  120. @NrPE
  121. @LCFl                !fully connect to bias element
  122. =NPEl    n0            !previous layer
  123. @NrPE
  124. @LCFl                !fully connect to input layer
  125.  
  126. !    *** Select Control Strategy & L/R Schedule ***
  127.  
  128. @LLsl                !load super layer
  129. =Lctl    "backprop"        !backprop control strategy
  130. =Llrs    "ncum_out"        !backprop L/R Schedule
  131. =Llnn    "train"            !name of learn input
  132. =Lrcn    "test"            !name of recall output
  133. #Incl    "stdioset.iif"        !standard I/O settings
  134. =Lscl     -1.0            !input  low-value
  135. =Loff     1.0            !input  high-value
  136. =Llow     0.2            !output low-value
  137. =Lhgh     0.8            !output high-value
  138. =DLnN     1            !learn count for Check Points
  139. =MWLF     1            !turn on weight limiting
  140. =WtVl     0.99            !max value for weight limiting
  141. @SVsl                !save it back
  142. !
  143. =n5    0.0
  144. =n6    1.0
  145. =n4    &Out
  146. #Incl    "stdprobe.iif"        !Standard probe include file
  147. !
  148. =jogl    -.1            !lower limit for jog
  149. =jogh    +.1            !upper limit for jog
  150. =seed    257            !starting seed number
  151. @seed                !set the seed
  152. @Nini                !initialize the network
  153.  
  154. @LLsl                !load super layer
  155. =Lepc     0            !set learn counter to zero
  156. @SVsl                !save it back
  157. @EOF
  158.  
  159.