home *** CD-ROM | disk | FTP | other *** search
/ Liren Large Software Subsidy 9 / 09.iso / e / e065 / 1.ddi / DNNASAMP.INS < prev    next >
Encoding:
Text File  |  1991-08-28  |  3.3 KB  |  117 lines

  1. inst4.0        ! 07-Aug-90  (dnnasamp.ins)  DNNA InstaNet
  2. !****************************************************************
  3. !*                                *
  4. !*    Digital Neural Network Architecture Builder        *
  5. !*        sample XOR problem                *
  6. !****************************************************************
  7.  
  8.  
  9. !    *** Load the Control Strategy and LRS if needed
  10.  
  11. @LdCS    "backprop"        !control strategy
  12. @LdLR    "backprop"        !L/R schedule
  13.  
  14. =netn    "InstaNet (tm) Digital Neural Network Arch. version 2.0 07-Aug-90"
  15.  
  16. =DRcF    1            !recall re-display on
  17. =DRcN    1            !show each recall
  18.  
  19. !    *** Build the Input Layer ***
  20.  
  21. @LLdf                !load default layer to mi_layer structure
  22. =LDln    "In"            !layer name
  23. =Lpes    2            !2 PEs for XOR
  24. =Ltrn    "Linear"        !buffer
  25. =LDsp    6            !spacing
  26. =x    100            !place to put layer on screen
  27. =y     80
  28. #Incl    "stdnwgtf.iif"        !standard # weight fields
  29. @LAdd                !add the input layer
  30. =x    130            !position hidden & output over center
  31.  
  32. !    *** Build the first hidden Layer ***
  33.  
  34. @LLdf                !start with default layer again
  35. =LDln    "Hidden 1"        !layer name
  36. =Lpes    3            !Three Hidden PEs
  37. =Lsum    "DNNA"            !DNNA sum function
  38. =Ltrn    "DNNA"            !DNNA transfer function
  39. =Llrn    "Delta-Rule"        !Standard Delta learning rule
  40. +y    60            !up higher on display
  41. #Incl    "stdnwgtf.iif"        !standard # weight fields
  42. @LAdd
  43.  
  44. !    *** Connect Hidden Layer to Bias & Input Layers ***
  45.  
  46. =SPEl    LayN            !current layer
  47. @SlPE                !select it as destination (sb already)
  48. =NPEl    -1            !near to bias term (source)
  49. @NrPE
  50. =cnwt    1.0            !connection weight
  51. =cnty    WVar            !variable
  52. =cnsc    WAbs            !absolute
  53. @LCFl                !fully connect to bias element
  54. =NPEl    0            !input layer
  55. @NrPE
  56. @LCFl                !fully connect to input layer
  57. =n0    LayN            !save current layer for later
  58.  
  59.  
  60. !    *** Build the output layer & connect it to prior layer & bias term ***
  61.  
  62. @LLdf                !load default layer to mi_layer structure
  63. =LDln    "Out"            !layer name
  64. =Lpes    1            !1 output PE
  65. =Lsum    "DNNA"            !DNNA sum function
  66. =Ltrn    "DNNA"            !DNNA transfer function
  67. =Llrn    "Delta-Rule"        !Standard  Delta learning rule
  68. +y    60
  69. #Incl    "stdnwgtf.iif"        !standard # weight fields
  70. @LAdd                !add the output layer
  71. =n7    LayN            !save for stdprobe
  72.  
  73. =SPEl    LayN            !current layer
  74. @SlPE                !select it as destination (sb already)
  75. =NPEl    -1            !near to bias term (source)
  76. @NrPE
  77. @LCFl                !fully connect to bias element
  78. =NPEl    n0            !previous layer
  79. @NrPE
  80. @LCFl                !fully connect to input layer
  81.  
  82. !    *** Select Control Strategy & L/R Schedule ***
  83.  
  84. =LrnN    2000            !learn counter for LearnN
  85. @LLsl                !load super layer
  86. =Lax1    1            !epoch size of 1
  87. =Lctl    "backprop"        !backprop control strategy
  88. =Llrs    "backprop"        !backprop L/R Schedule
  89. =Llnn    "sample"        !name of learn input
  90. =Lrcn    "sample"        !name of recall output
  91. #Incl    "stdioset.iif"        !standard I/O settings
  92. =Lscl     -1.0            !input  low-value
  93. =Loff     1.0            !input  high-value
  94. =Llow     0.2            !output low-value
  95. =Lhgh     0.8            !output high-value
  96. =DLnN     1            !learn count for Check Points
  97. =MWLF     1            !turn on weight limiting
  98. =WtVl     0.99            !max value for weight limiting
  99. @SVsl                !save it back
  100. !
  101. =n5    0.0
  102. =n6    1.0
  103. =n4    &Out
  104. #Incl    "stdprobe.iif"        !Standard probe include file
  105. !
  106. =jogl    -.1            !lower limit for jog
  107. =jogh    +.1            !upper limit for jog
  108. =seed    257            !starting seed number
  109. @seed                !set the seed
  110. @Nini                !initialize the network
  111.  
  112. @LLsl                !load super layer
  113. =Lepc     0            !set learn counter to zero
  114. @SVsl                !save it back
  115. @EOF
  116.  
  117.