#include #include #include #include #if(defined(WIN32)) #include int APIENTRY DllMain (HINSTANCE hInstance, DWORD fwdReason, PVOID pvReserved) { return TRUE; } #if(defined(_USRDLL)) #define MLPHEADER #endif #if(defined(_LIB)) #define MLPHEADER #endif #endif #include "mlp_gen.h" #include "mlp_inter.h" /* This file contains routines which can be called for fortran or C code Their name follow the rules: - only lowercase characters - start with mlp - end with _ for being compatible with fortran calling -> to call them from fortran use the name WITHOUT _ -> to call them from C use the name WITH _ - all arguments passed by address J.Schwindling 14-NOV-98 */ /***********************************************************/ /* mlpsetnet */ /* */ /* to set the structure of a neural network */ /* inputs: int *nl = number of layers */ /* int *nn = number of neurons */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* 1: N layers > NLMAX */ /* 2: N layers < 2 */ /* -111: not enough memory */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /* Modified: J.Schwindling 05-Oct-99 call MLP_SetNet */ /***********************************************************/ DLLEXPORT int mlpsetnet_ (int *nl, int *nn) { return MLP_SetNet(nl,nn); } /***********************************************************/ /* mlplearnalloc */ /* */ /* to allocate memory for learning */ /* should be called after mlpsetnet */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /* Modified: J.Schwindling 31-Jan-99 return value */ /***********************************************************/ DLLEXPORT int mlplearnalloc_() { return LearnAlloc(); } /***********************************************************/ /* mlplearnfree */ /* */ /* to free memory for learning */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /***********************************************************/ DLLEXPORT void mlplearnfree_() { LearnFree(); } /***********************************************************/ /* mlpsetlearn */ /* */ /* to define learning method and learning parameters */ /* */ /* inputs: int *lmet = learning method: */ /* 1 = stochastic minimozation */ /* 2 = steepest descent fixed steps */ /* 3 = steepest descent with L.search*/ /* 4 = Ribiere-Polak Conjugate Grad. */ /* 5 = Fletcher-Reeves Conj. Grad. */ /* 6 = BFGS */ /* 7 = Hybrid method */ /* float *eta = learning parameter */ /* (used by methods 1 and 2) */ /* float *decay = decrease factor (per epoch) */ /* of learning parameter */ /* float *epsilon = momentum term (meth. 1,2) */ /* float *Delta = flat spot elimination param. */ /* (methods 1 or 2) */ /* int *nreset = frequency to reset to */ /* steepest gradient (meths 4->7)*/ /* float *tau = governs precision of line */ /* search (meths 4->7)*/ /* float *lambda = size of regularisation term */ /* (method 7) */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* 1: method > 7 */ /* -111: not enough memory */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /***********************************************************/ DLLEXPORT int mlpsetlearn_ (int *lmet, float *eta, float *decay, float *epsilon, float *Delta, int *nreset, float *tau, float *lambda) { if(*lmet>7) return(1); LEARN.Meth = *lmet; LEARN.Nreset = *nreset; LEARN.Tau = (dbl) *tau; LEARN.Decay = (dbl) *decay; LEARN.Lambda = (dbl) *lambda; LEARN.eta = (dbl) *eta; LEARN.delta = (dbl) *Delta; LEARN.epsilon = (dbl) *epsilon; return LearnAlloc(); } /***********************************************************/ /* mlpsetnpat */ /* */ /* to set the number of examples */ /* inputs: int *ifile = 0: learning examples */ /* 1: test examples */ /* int *npat = number of examples */ /* int *ipond = 0: no weighting */ /* 1: weighting */ /* int *nin: number of input variables */ /* int *nout: number of output quantities */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /* Modified: J.Schwindling 27-Jan-00 nin, nout */ /***********************************************************/ DLLEXPORT int mlpsetnpat_ (int *ifile, int *npat, int *ipond, int *nin, int *nout) { int ierr; PAT.Npat[*ifile] = *npat; PAT.Iponde = *ipond; PAT.Nin = *nin; PAT.Nout = *nout; ierr = AllocPatterns(*ifile,*npat,*nin,*nout,0); return(ierr); } /***********************************************************/ /* mlppatnum */ /* */ /* to set the number of examples */ /* inputs: int *ifile = 0: learning examples */ /* 1: test examples */ /* int *npat = number of examples */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* */ /* Author: J.Schwindling 26-APR-99 */ /***********************************************************/ int mlppatnum_ (int *ifile, int *npat) { PAT.Npat[*ifile] = *npat; return(0); } /***********************************************************/ /* mlpaddnpat */ /* */ /* to add to the examples */ /* inputs: int *ifile = 0: learning examples */ /* 1: test examples */ /* int *npat = number of examples */ /* int *ipond = 0: no weighting */ /* 1: weighting */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ int mlpaddnpat_ (int *ifile, int *npat, int *ipond) { PAT.Iponde = *ipond; return AllocPatterns(*ifile,*npat,PAT.Nin,PAT.Nout,1); } /***********************************************************/ /* mlpgetnpat */ /* */ /* to get the number of examples, the number of */ /* inputs/outputs, the weighting mode */ /* inputs: int *ifile = 0: learning examples */ /* 1: test examples */ /* outputs: int *npat = number of examples */ /* int *ninp = number of inputs */ /* int *nout = number of outputs */ /* int *ipond = 0: no weighting */ /* 1: weighting */ /* */ /* Author: J.Schwindling 26-Apr-99 */ /***********************************************************/ DLLEXPORT void mlpgetnpat_(int *ifile, int *npat, int *ninp, int *nout, int *ipond) { *ninp = PAT.Nin; *nout = PAT.Nout; *ipond = PAT.Iponde; *npat = PAT.Npat[*ifile]; } /***********************************************************/ /* mlpprw */ /* */ /* print weights on screen */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ void mlpprw_() { PrintWeights(); } /***********************************************************/ /* mlpinitw */ /* */ /* initialize weights to random values */ /* */ /* inputs: int *mode = 0: random weights */ /* 1: call srand(1) before */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /* Modified: J.Schwindling 28-Jun-99 srandom(1) */ /***********************************************************/ DLLEXPORT void mlpinitw_(int *mode) { if(*mode == 1) { #if(defined(WIN32)) srand(1); #else srandom(1); #endif } InitWeights(); } /***********************************************************/ /* mlpprff */ /* */ /* writes fortran function in file mlpfun.f */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ void mlpprff_() { MLP_PrFFun("mlpfun.f"); } /***********************************************************/ /* mlpsaveff */ /* */ /* writes fortran function in a file */ /* */ /* inputs: char *filename: name of the output file */ /* */ /* return value (int) = 0: no error */ /* */ /* Author: J.Schwindling 30-Nov-99 */ /***********************************************************/ DLLEXPORT int mlpsaveff_(char *filename) { return MLP_PrFFun(filename); } /***********************************************************/ /* mlpsavecf */ /* */ /* writes C function in a file */ /* */ /* inputs: char *filename: name of the output file */ /* */ /* return value (int) = 0: no error */ /* */ /* Author: J.Schwindling 30-Nov-99 */ /***********************************************************/ DLLEXPORT int mlpsavecf_(char *filename) { return MLP_PrCFun(filename); } /***********************************************************/ /* mlpversion */ /* */ /* return value (float) = version of the MLPfit package */ /* */ /* Author: J.Schwindling 11-Jun-98 */ /***********************************************************/ float mlpversion_() { return MLPfitVersion; } /***********************************************************/ /* mlpprfun */ /* */ /* writes fortran or C function in file mlpfun.f or .c */ /* */ /* inputs: int *lang = 1: fortran */ /* = 2: C */ /* */ /* Author: J.Schwindling 21-May-99 */ /***********************************************************/ void mlpprfun_(int *lang, char *finame) { if(*lang==1) { MLP_PrFFun(finame); printf("\nWriting file %s\n",finame); } else { MLP_PrCFun(finame); } } /***********************************************************/ /* mlploadw */ /* */ /* load weights from file filename */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ DLLEXPORT int mlploadw_(char *filename) { int idummy; return LoadWeights(filename,&idummy); } /***********************************************************/ /* mlpsavew */ /* */ /* save weights to file filename */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ DLLEXPORT int mlpsavew_(char *filename) { return SaveWeights(filename,0); } /***********************************************************/ /* mlpfree */ /* */ /* frees memory of the examples */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ DLLEXPORT void mlpfree_() { int i; if(ExamplesMemory==0) return; for(i=0; i<2; i++) { FreePatterns(i); } free(PAT.Rin); free(PAT.Rans); free(PAT.Pond); ExamplesMemory = 0; LearnFree(); } /***********************************************************/ /* mlpsetpat */ /* */ /* to set an example */ /* inputs: int *ifile = 0: learning example */ /* 1: test example */ /* int *ipat = example number (1 -> N) */ /* float *rin = inputs */ /* float *rans = answers */ /* float *pond = weight */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ DLLEXPORT int mlpsetpat_ (int *ifile, int *ipat, float *rin, float *rans, float *pond) { int in; for(in=0; in N) */ /* int *neuron: neuron number (1 -> N) */ /* output: float *weights: weights */ /* */ /* return value (int) : error code = 0: no error */ /* 1: wrong layer */ /* 2: wrong neuron */ /* */ /* Author: J.Schwindling 31-May-99 */ /***********************************************************/ DLLEXPORT int mlpgetw_(int *layer, int *neuron, float *weights) { int i; if(*layer<2) return 1; if(*layer>NET.Nlayer) return 1; if(*neuron>NET.Nneur[*layer-1]) return 2; for(i=0; i<=NET.Nneur[*layer-2]; i++) { weights[i] = (float) NET.Weights[*layer-1][*neuron-1][i]; } return 0; } /***********************************************************/ /* mlpgetinput */ /* */ /* gets input for a given file / variable */ /* */ /* input: int *ifile : 0 = learn file */ /* 1 = test file */ /* int *input : input number (1->N) */ /* output: float *rin: input values */ /* */ /* Author: J.Schwindling 04-JAN-2000 */ /***********************************************************/ DLLEXPORT void mlpgetinput_(int *ifile, int *input, float *rin) { int i; for(i=0;iN) */ /* output: float *rout: output values */ /* */ /* Author: J.Schwindling 07-JAN-2000 */ /***********************************************************/ DLLEXPORT void mlpgetoutput_(int *ifile, int *output, float *rout) { int i; for(i=0;i +(%lf) * OUT%d\n", (double) NET.Weights[il][in][jn],jn); } fprintf(W,"C\n"); for(in=0; in +(%lf) * OUT%d\n", (double) NET.Weights[il][0][jn],jn); fprintf(W,"C\n"); fprintf(W," END\n"); fprintf(W," REAL FUNCTION SIGMOID(X)\n"); fprintf(W," SIGMOID = 1./(1.+EXP(-X))\n"); fprintf(W," END\n"); fclose(W); } /***********************************************************/ /* mlpreadf */ /* */ /* to read learn.pat or test.pat file */ /* */ /* Author: J.Schwindling 26-Apr-99 */ /* Modified: J.S. 21-Sep-99 add filename and return value */ /* Modified: J.S. 22-Dec-1999 FreePatterns */ /***********************************************************/ DLLEXPORT int mlpreadf_(int *ifile, char *filename) { int idummy1, idummy2, idummy3; if(PAT.Npat[*ifile] != 0) { FreePatterns(*ifile); } return ReadPatterns(filename, *ifile, &idummy1, &idummy2, &idummy3); } /***********************************************************/ /* mlpsettf */ /* */ /* to set the transfer function of a given neuron */ /* */ /* inputs: int *layer: layer number (2 -> Nlayer) */ /* int *neuron: neuron number (1 -> Nneur) */ /* int *tfunc = 0: neuron not activated */ /* = 1: linear neuron */ /* = 2: sigmoid neuron */ /* */ /* return code (int) = 0: no error */ /* = 1: wrong layer number */ /* = 2: wrong neuron number */ /* = 3: wrong transfer function */ /* */ /* Author: J.Schwindling 07-May-99 */ /***********************************************************/ /* int mlpsettf_(int *layer, int *neuron, int *tfunc) { if(*layer<2 || *layer>NET.Nlayer) return 1; if(*neuron<1 || *neuron>NET.Nneur[*layer-1]) return 2; if(*tfunc<0 || *tfunc>2) return 3; SetTransFunc(*layer,*neuron,*tfunc); return 0; } */ /***********************************************************/ /* mlpgettf */ /* */ /* to get the transfer function of a given neuron */ /* */ /* inputs: int *layer: layer number (2 -> Nlayer) */ /* int *neuron: neuron number (1 -> Nneur) */ /* output int *tfunc = 0: neuron not activated */ /* = 1: linear neuron */ /* = 2: sigmoid neuron */ /* */ /* return code (int) = 0: no error */ /* = 1: wrong layer number */ /* = 2: wrong neuron number */ /* = 3: wrong transfer function */ /* */ /* Author: J.Schwindling 07-May-99 */ /***********************************************************/ int mlpgettf_(int *layer, int *neuron, int *tfunc) { if(*layer<2 || *layer>NET.Nlayer) return 1; if(*neuron<1 || *neuron>NET.Nneur[*layer-1]) return 2; *tfunc = NET.T_func[*layer-1][*neuron-1]; return 0; } /***********************************************************/ /* mlpnorm */ /* */ /* to normalize the inputs to mean = 0, rms = 1 */ /* */ /* Author: J.Schwindling 18-May-99 */ /***********************************************************/ void mlpnorm_() { DIVERS.Norm = 1; NormalizeInputs(); } /***********************************************************/ /* mlpstat */ /* */ /* returns statistics */ /* */ /* inputs: ifile (int *): file number */ /* outputs: mean (float *): mean values */ /* sigma (float *): sigmas */ /* minimum (float *): minimum values */ /* maximum (float *): maximum values */ /* */ /* Author: J.Schwindling 21-Dec-1999 */ /***********************************************************/ DLLEXPORT void mlpstat_(int *ifile, float *mean, float *sigma, float *minimum, float *maximum) { dbl *Mean, *Sigma, *Minimum, *Maximum; int Nexamples, Ninputs, i; Nexamples = PAT.Npat[*ifile]; Ninputs = PAT.Nin; Mean = (dbl *) malloc(Ninputs * sizeof(dbl)); Sigma = (dbl *) malloc(Ninputs *sizeof(dbl)); Minimum = (dbl *) malloc(Ninputs * sizeof(dbl)); Maximum = (dbl *) malloc(Ninputs *sizeof(dbl)); MLP_StatInputs(Nexamples, Ninputs, PAT.Rin[*ifile], Mean, Sigma, Minimum, Maximum); for(i=0; i