home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
PC World Komputer 1997 February
/
PCWK0297.iso
/
technika
/
nnmodel
/
neural.h
< prev
next >
Wrap
C/C++ Source or Header
|
1995-03-30
|
3KB
|
112 lines
#ifndef __CNEURAL_H__
#define __CNEURAL_H__
#define NN_DYNAMIC 1
#define NN_DATAMAT 2
#define NN_PARAMSLOADED 4
#define NN_STHRU // Straight through connections
struct _neural {
long m_version; // version
int m_istate;
int m_ninputs;
int m_nhidden;
int m_noutputs;
float *m_ni; /* declare input neurons */
float *m_nout; // output of output layer
float *m_olastoutputv; // output of output
// declare hidden layer neurons
float **m_hinputw; // weight of input to hidden
float *m_houtputv; // output of hidden
float *m_hlastoutputv; // output of hidden
float *m_htheta;
// declare output neurons
float **m_oinputw; // weight of hidden to output
float *m_otheta;
#ifdef NN_STHRU
float **m_iinputw; // weight of input to output
#endif
// Now for the dynamic variables
PARAMS *m_params;
DATAMAT *m_dm;
// declare hidden layer neurons
float **m_hlastdelta; // last delta weight of input/hidden
float *m_hlastvar; // last VAR
float *m_hlearn;
float *m_htlearn;
// declare output neurons
float **m_olastdelta; // last delta weight of hidden/output
#ifdef NN_STHRU
float **m_ilastdelta; // last delta weight of input/output
#endif
float *m_olastvar; // last var weight of output
float *m_otraining;
float *m_olearn;
float *m_otlearn;
float *m_startp;
int m_NDIM;
float *m_pcom;
float *m_xicom;
float *m_xt;
float *m_g,*m_h,*m_xi;
float m_sumerr2;
long m_cnt;
float m_stats[7];
int m_itmax; // # of iterations in CG optim
};
typedef struct _neural NEURAL;
NEURAL *NCreateNeural();
void NDeleteNeural(NEURAL *pN );
int NImportNetwork (NEURAL *pN, FILE *fd);
NEURAL *LoadNetwork (char *filename);
void DumpNeural(NEURAL *pN, FILE *fd);
void NAddHidden(NEURAL *pN);
void NFeedForward(NEURAL *pN);
int NBackProp1(NEURAL *pN,int cnt);
void NClearDelta(NEURAL *pN);
void NInitializeNetwork(NEURAL *pN);
void NNewTrainingSet(NEURAL *pN, int t,int flag);
int NAI(NEURAL *pN, int flag,int a);
float NCalcRsquare(NEURAL *pN);
float NCheckTol(NEURAL *pN,float *min, float *max, int *nmin, int *nmax);
int NQuickTrain(NEURAL *pN,int mode,int increment);
// Conj Grad routines
void Nfrprmn(NEURAL *pN,float *p, float ftol, int *iter, float *fret);
float Nbrent(NEURAL *pN,float ax, float bx, float cx, float tol,float *xmin);
void Nmnbrak(NEURAL *pN,float *ax, float *bx, float *cx, float *fa, float *fb,float *fc);
float Nf1dim(NEURAL *pN,float x);
void Nlinmin(NEURAL *pN,float *p, float *xi, float *fret);
void NforwardDIffGradient (NEURAL *pN,float *x0, float *g);
float NErrorFunction(NEURAL *pN,float* x);
void Nrerror(char *error_text);
void NCGTrain(NEURAL *pN);
void NSetDM( NEURAL *pN, DATAMAT *a);
void NInterrogate(NEURAL *pN,float *Ivec,float *Ovec);
/*
void SetInput(const int neuron, float f);
void SetRInput(const int neuron, float f);
float GetInput(const int neuron);
float GetOutput(const int neuron);
float GetTrain(const int neuron);
float GetRInput(const int neuron);
float GetROutput(const int neuron);
float GetRTrain(const int neuron);
*/
#endif // __CNEURAL_H__