StEmcUtil/neuralNet/NeuNet.cxx

00001 00002 // 00003 // Neural Network classes : 00004 // 00005 // J.P. Ernenwein, Universite de Haute Alsace 00006 // rnenwein@in2p3.fr 00007 // last changes : 25/10/2001 for ROOT V3.01 00008 // 00009 // 00010 // TNNFormula : used by following classes 00011 // TNNTree : trees to prepare the NN work 00012 // TNNKernel : feed forward kernel 00013 // TNNControlE: tool to control the training 00014 // TNNUtils : utilities to use a trained kernel with standard TTree's 00015 // 00017 // 00018 //Begin_Html 00019 /* 00020 <img src="histex.gif"> 00021 <img src="chist.gif"> 00022 <img src="ccurves.gif"> 00023 */ 00024 //End_Html 00025 // NeuNet is a set of classes to do a NN study of a problem 00026 // 00027 // the NN is a feed forward one with 1 or more hidden layers. 00028 // 00029 //----------------------------------------------------------------- 00030 // New since 12/1999 : 00031 // 00032 // the Kernel is in double precision. 00033 // a momentum parameter and flat spot elimination parameter are 00034 // available for learning (default values : 0 ) 00035 // 00036 // New for ROOT V3.01 : 00037 // 00038 // 1)Some pointers are initialised at 0 in TNNKernel and TNNUtils. 00039 // It was necessary because libNeW isn't linked any more 00040 // and the initialisation is now random if not explicit. 00041 // 2)TNNFormula has a Refresh method used by the AddTree method. 00042 // 3)The TNNUtils SetFormula(Text_t *form, Bool_t clip=1) method 00043 // has now the clip parameter, to enable the clipping disabling. 00044 //----------------------------------------------------------------- 00045 // 00046 // 00047 // With these classes you can, from one or more TTree's (ex : mytree) : 00048 // 00049 // - extract the training and validation events, with the relevant variables 00050 // ( you can select the events with cuts) : you are building the TNNTree's : 00051 // one for training, one for validation (if you want). 00052 // 00053 // - construct a TNNKernel Object : you choose number of inputs, hidden layers, 00054 // outputs, learning parameters, ... 00055 // 00056 // - associate the TNNTree's to the TNNKernel Object. 00057 // 00058 // - loop on training cycles, with validation or not : 00059 // method TrainNCycles(TNNControlE *conte, Int_t period=5, Int_t nCycles=10) 00060 // of the kernel. 00061 // A TNNControlE object enables to plot the 00062 // errors between computed output and teaching output. 00063 // 00064 // - after training (but also during it), you can associate the kernel 00065 // to TTree's, so that the outputs of the kernel are visible by the 00066 // TTree, to use for example mytree->Draw("v1:OutputU[0]"), 00067 // OutputU[i] being the standard name given to the values of outputs. 00068 // The tools are provided by the class TNNUtils 00069 // 00071 // example of macros in order to use the classes : nn.C and nngrap.C : 00073 // //////////////////// macro nn.C >>>>>>>>>>>>>>>>>>> 00074 // 00075 // { 00076 // //////////// this is a demo macro for NeuNet Classes. 00077 // //////////// its name is nn.C, use before the macro prodtree.C 00078 // //////////// to produce the trees needed by this macro. 00079 // //////////// THIS example consists of 2 "processes" : signal and background. 00080 // //////////// 5 variables are used to define the processes, these variables 00081 // //////////// have gaussian distribution (see the plots), and the NN 00082 // //////////// discriminates using these variables : v1 v2 v3 v4 v5. 00083 // //////////// 00084 // //////////// => the kernel has 5 input units, I have choosen 8 hidden units, 00085 // //////////// and 1 output unit : 1 -> it's signal, 0 -> it's background. 00086 // 00087 // 00088 // //load the ROOT shared libraries (new since v2.23) 00089 // gSystem->Load("$ROOTSYS/lib/libProof.so"); 00090 // gSystem->Load("$ROOTSYS/lib/libTree.so"); 00091 // gSystem->Load("$ROOTSYS/lib/libTreePlayer.so"); 00092 // gSystem->Load("$ROOTSYS/lib/libTreeViewer.so"); 00093 // //load the NN shared library 00094 // gSystem->Load("~rnenwein/root/NN/libNeuNet.so"); 00095 // // file containing the "signal" 00096 // TFile f1("NNsignal.root"); 00097 // // we get the tree 00098 // TTree *t1=(TTree*)f1.Get("NNsignal"); 00099 // 00100 // // file containing the "background" 00101 // TFile f2("NNbackg.root"); 00102 // // we get the tree 00103 // TTree *t2=(TTree*)f2.Get("NNbackg"); 00104 // 00105 // gROOT.cd(); 00106 // 00107 // // allocation of the kernel : here we have one hidden layer with 8 units, 00108 // // replace "8" by "3:4:5" to have 3 hidden 00109 // // layers with 3, 4 and 5 units for example. 00110 // TNNKernel *k= new TNNKernel("Test NN",5,"8",1); 00111 // 00112 // k->SetLearnParam(0.2); // the learning parameter (<1) 00113 // k->SetInitParam(-2,2); // bounds for the initialisation of weights 00114 // 00115 // // set the use of biases in the kernel (default) 00116 // // to inhibit it type k->SetUseBiases(0); 00117 // k->SetUseBiases(); 00118 // 00119 // k->Init(); // initialisation of the kernel 00120 // k->PrintS(); // printing of network structure 00121 // 00122 // TNNTree valid; // declaration of validation tree 00123 // TNNTree train; // declaration of training tree 00124 // 00125 // // to deal with valid and train trees, we need files (as buffers) 00126 // valid.SetFile("testvalid.root"); 00127 // train.SetFile("testtrain.root"); 00128 // 00129 // // we will get some variables from the trees t1 and t2, in order 00130 // // to build the valid and train trees, we use for that TNNFormula's. 00131 // // first, we set a tree structure recognized by the formula's : 00132 // valid.SetFormulaTree(t1); 00133 // train.SetFormulaTree(t1); 00134 // 00135 // // second, we define the formula's : here we have 5 input neurons, so 00136 // // the formula has 5 parts, we could also use ::sqrt(v1) for example ... 00137 // valid.SetInFormula("v1:v2:v3:v4:v5"); 00138 // train.SetInFormula("v1:v2:v3:v4:v5"); 00139 // 00140 // // clips the values computed by the input formula's (the default) (in [0,1]) 00141 // // if you don't want to do that, type SetClip(0) : 00142 // train.SetClip(); 00143 // valid.SetClip(); 00144 // 00145 // // set some cuts for the filling of the valid and train tree's 00146 // // the default is "", so no cut, in my examples there are stupid cuts 00147 // train.SetCut("v1>-1.&&v4>-1."); 00148 // valid.SetCut("v1>-1.&&v4>-1."); 00149 // 00150 // 00151 // // at the end we can fill the valid and train trees, the formula's are 00152 // // used for input computation, and the second field in AddTree is the values 00153 // // of the output units, here we have only one, for more we should 00154 // // put : "1:0:0" for example, or "0.5:0.5" ... 00155 // train.AddTree(t1,"1",0,3999); 00156 // valid.AddTree(t1,"1",4000,5000); 00157 // train.AddTree(t2,"0",0,3999); 00158 // valid.AddTree(t2,"0",4000,5000); 00159 // 00160 // // REMARK 1: we need sometimes to use output values different for 00161 // // EACH event (e.g. for a fit). In this case we can define an output 00162 // // formula which will get values in tree's t1 and t2, and will compute 00163 // // the outputs (as the input formula) : 00164 // // the methods are : SetOutFormula(Text_t *formula) 00165 // // and train.AddTree(t1,0,3000); WITHOUT THE SECOND PARAMETER 00166 // // (and SetOutClip(Bool_t trueForClip=1) for clipping) 00167 // 00168 // // REMARK 2: SetCut is active for input and output 00169 // 00170 // 00171 // // methods to see what we have done : 00172 // train->Infos(); 00173 // valid->Infos(); 00174 // 00175 // 00176 // //////////////////////////////////////////////////////////////// 00177 // // for results exploitation : 00178 // /////////////////////////////////////////////////////////////// 00179 // // we use the TNNUtils class : an object tu1 is associated to t1 and 00180 // // k : in this way t1 recognizes the outputs of the NN, with the name : 00181 // // OutputU[i], and you can draw with the standard method Draw(). 00182 // // The principle is the following: 00183 // // a new branch is created for t1, but in another file. 00184 // 00185 // TNNUtils *tu1=new TNNUtils(); 00186 // tu1->SetTree(t1); // the tree 00187 // tu1->SetNewBranchFile("testnewbr1.root"); // the file 00188 // tu1->SetFormula("v1:v2:v3:v4:v5"); // the formula to compute the NN inputs 00189 // tu1->SetKernel(k); // the NN kernel 00190 // 00191 // TNNUtils *tu2=new TNNUtils(); // idem 00192 // tu2->SetTree(t2); 00193 // tu2->SetNewBranchFile("testnewbr2.root"); 00194 // tu2->SetFormula("v1:v2:v3:v4:v5"); 00195 // tu2->SetKernel(k); 00196 // 00197 // // histos to plot signal and background : 00198 // 00199 // TH1F h1tr("histo1 train","NN output",50,0.,1.); 00200 // h1tr.SetFillColor(45); 00201 // TH1F h2tr("histo2 train","NN output",50,0.,1.); 00202 // 00203 // TH1F h1va("histo1 valid","NN output",50,0.,1.); 00204 // TH1F h2va("histo2 valid","NN output",50,0.,1.); 00205 // 00206 // // canevas to plot histos 00207 // 00208 // chist = new TCanvas("chist","NN output",600,100,600,350); 00209 // chist->SetFillColor(42); 00210 // 00211 // // canevas to plot curves 00212 // 00213 // ccurves = new TCanvas("ccurves","NN output",600,500,600,350); 00214 // ccurves->SetFillColor(40); 00215 // 00216 // //////////////////////////////////////////////////////////////// 00217 // // the beginning of the work : 00218 // // we first associate the train tree to the kernel 00219 // 00220 // k->SetTrainTree(&train); 00221 // printf(" nbr evts for training : %i \n",k->GetNTrainEvents()); 00222 // 00223 // // we then associate the valid tree to the kernel 00224 // 00225 // k->SetValidTree(&valid); 00226 // printf(" nbr evts for validation : %i \n",k->GetNValidEvents()); 00227 // 00228 // // we will use a canevas to plot the output errors during training : 00229 // TNNControlE conte; 00230 // 00231 // // here we train on 70 cycles, with a plot of error each 5 cycles : 00232 // 00233 // k->TrainNCycles(&conte,5,70); 00234 // 00235 // /////////////////////////////////////////////////////////////// 00236 // // now the kernel is trained 00237 // // you can export it in an ascii file with 00238 // // k->Export("name_of_file") 00239 // // you can import one another with 00240 // // k->Import("name_of_file") 00241 // // in this case the structure of the NN k will be adapted 00242 // // to the new numbers 00243 // //////////////////////////////////////////////////////////////// 00244 // 00245 // // results exploitation : call the macro nngrap.C 00246 // printf("now you can call the macro nngrap.C : .x nngrap.C\n"); 00247 // 00248 // } 00249 // 00250 // 00251 // 00252 // 00253 // //////////////////// macro nngrap.C >>>>>>>>>>>>>>>>>>> 00254 // { 00255 // //////////// this is a demo macro for NeuNet Classes. 00256 // //////////// its name is nngrap.C, use before the macro 00257 // //////////// nn.C 00258 // //////////////////////////////////////////////////////////////// 00259 // // results exploitation 00260 // /////////////////////////////////////////////////////////////// 00261 // // IMPORTANT : the following line fill the new branch, and you 00262 // // have to do that if you train more the kernel 00263 // 00264 // tu1->FillNB();tu2->FillNB(); 00265 // 00266 // ///////////////////////////////////////////////////// 00267 // ///// now it is standard ROOT : 00268 // chist->cd();chist->Clear(); 00269 // t1->Draw("OutputU[0]>>histo1 train","","",4000,0); // training event 00270 // t2->Draw("OutputU[0]>>histo2 train","","",4000,0); 00271 // t1->Draw("OutputU[0]>>histo1 valid","","",1000,4000); // others events 00272 // t2->Draw("OutputU[0]>>histo2 valid","","",1000,4000); 00273 // h1va.Sumw2(); 00274 // h2va.Sumw2(); 00275 // h1tr.Scale(100./h1tr.Integral()); // rescale --> 100 events per histo 00276 // h2tr.Scale(100./h2tr.Integral()); 00277 // h1va.Scale(100./h1va.Integral()); 00278 // h2va.Scale(100./h2va.Integral()); 00279 // Float_t maxihtr=(Float_t)TMath::Max(h1tr.GetMaximum(),h2tr.GetMaximum()); 00280 // Float_t maxihva=(Float_t)TMath::Max(h1va.GetMaximum(),h2va.GetMaximum()); 00281 // maxihtr=TMath::Max(maxihtr,maxihva); 00282 // h1tr.SetMaximum(maxihtr+::sqrt(maxihtr)); 00283 // 00284 // h1tr.Draw();h2tr.Draw("same"); 00285 // h1va.Draw("esame");h2va.Draw("esame"); 00286 // 00287 // ccurves->cd();ccurves->Clear(); 00288 // // rejection versus efficiency graph : 00289 // tu1->XY(tu1->HIntegral(&h1tr,1),tu1->HIntegral(&h2tr,0),2)->Draw("ALP"); 00290 // tu1->XY(tu1->HIntegral(&h1va,1),tu1->HIntegral(&h2va,0),3)->Draw("LP"); 00291 // 00292 // TText xleg(0.4,0.02,"signal efficiency");xleg.SetNDC();xleg.Draw(); 00293 // TText yleg(0.05,0.2,"background rejection");yleg.SetNDC();yleg.SetTextAngle(90);yleg.Draw(); 00294 // 00295 // 00296 // } 00297 // 00298 // 00299 // 00301 // 00302 // 00303 // 00304 // 00305 // 00306 // 00308 00309 #include "NeuNet.h" 00310 00312 // 00313 // TNNFormula 00314 // 00316 00317 ClassImp(TNNFormula) 00318 00319 00320 TNNFormula::TNNFormula(Text_t *name, Text_t *formula, Text_t *cut, TTree *tree):TNamed(name,"NN Formula") 00321 { 00322 // constructor 00323 fNValues=0; 00324 fTTCut=0; 00325 fTTFormula=0; 00326 fClip=1; 00327 fTree=0; 00328 fFormula=0; 00329 fCut=0; 00330 fRefresh=0; 00331 SetTree(tree); 00332 SetFormula(formula); 00333 SetCut(cut); 00334 } 00335 00336 00337 TNNFormula::~TNNFormula() 00338 { 00339 // destructor 00340 Int_t i; 00341 00342 for(i=0;i<fNValues;i++) delete fTTFormula[i]; 00343 if(fNValues){delete [] fTTFormula;fTTFormula=0;} 00344 if(fFormula){delete fFormula;fFormula=0;} 00345 if(fCut){delete fCut;fCut=0;} 00346 if(fTTCut){delete fTTCut;fTTCut=0;} 00347 } 00348 00349 void TNNFormula::SetFormula(Text_t *formula) 00350 { 00351 if(!fTree){printf("A Tree must be set before !\n");return;} 00352 if(!RMBlanks(formula)){printf("EMPTY INPUT FORMULA !\n");return;} 00353 if(!fRefresh) 00354 { 00355 if(fFormula){delete fFormula;fFormula=0;} 00356 fFormula=new TStringLong(formula); 00357 } 00358 Int_t i,j; 00359 TString *oneUnit; 00360 00361 for(i=0;i<fNValues;i++)delete fTTFormula[i]; 00362 if(fNValues){delete [] fTTFormula;fTTFormula=0;} 00363 00364 fNValues=1; 00365 for (i=0;formula[i];i++)if(formula[i]==':')fNValues++; 00366 00367 fTTFormula = new TTreeFormula*[fNValues]; 00368 00369 oneUnit=new TString(); 00370 j=0; 00371 for (i=0;formula[i];i++) 00372 { 00373 if (formula[i]!=':') 00374 oneUnit->Append(formula[i]); 00375 else 00376 { 00377 fTTFormula[j] = new TTreeFormula("Input",oneUnit->Data(),fTree); 00378 delete oneUnit;oneUnit=new TString(); 00379 j++; 00380 } 00381 } 00382 fTTFormula[j] = new TTreeFormula("Input",oneUnit->Data(),fTree); 00383 delete oneUnit; 00384 00385 printf("Formula = >>%s<< with %3i values\n",formula,fNValues); 00386 } 00387 00388 void TNNFormula::SetCut(Text_t *cutarg) 00389 { 00390 if(!fTree){printf("A Tree must be set before !\n");return;} 00391 Text_t cut[500]; 00392 strcpy(cut,cutarg); 00393 if(!fRefresh) 00394 { 00395 if(fCut){delete fCut;fCut=0;} 00396 fCut=new TStringLong(cut); 00397 } 00398 if (fTTCut){delete fTTCut; fTTCut=0;} 00399 if (RMBlanks(cut)) 00400 fTTCut = new TTreeFormula("NNFormula Selection",cut,fTree); 00401 00402 printf("Cut applied in TNNFormula set to >>%s<< \n",cut); 00403 00404 } 00405 00406 void TNNFormula::SetTree(TTree *tree) 00407 { 00408 // set the current TTree, and update formula and cut if there are any 00409 00410 if(!tree){printf("No Tree !\n");return;} 00411 00412 Int_t i; 00413 fTree=tree; 00414 for (i=0;i<fNValues;i++) fTTFormula[i]->SetTree(fTree); 00415 if(fTTCut)fTTCut->SetTree(fTree); 00416 } 00417 00418 Bool_t TNNFormula::Find(Int_t iEvent, Float_t *values) 00419 { 00420 // Finds and Selects with TNNFormula cut, and returns the array values. 00421 // this function gets event iEvent in tree pointed by tree. 00422 // if the cut isn't satisfied, Find returns FALSE. 00423 00424 if(!fTTFormula){printf("Empty input string !\n");return 0;} 00425 Int_t i; 00426 00427 // fill the array 00428 00429 // fTree->LoadTree(fTree->GetEventNumber(iEvent));// load event 00430 fTree->LoadTree(iEvent);// load event 00431 00432 for (i=0;i<fNValues;i++) 00433 values[i]=Clip(fTTFormula[i]->EvalInstance(0)); 00434 00435 // return true or false according to the fTTCUt value 00436 00437 if (fTTCut) 00438 return (Bool_t) fTTCut->EvalInstance(0); 00439 else 00440 return (Bool_t) 1; 00441 } 00442 00443 void TNNFormula::Find(Int_t iEvent) 00444 { 00445 // Finds and Selects with TNNFormula cut, and prints values. 00446 // this function gets event iEvent in tree pointed by tree. 00447 00448 if(!fTTFormula){printf("Empty input string !\n");return;} 00449 Int_t i; 00450 00451 // fTree->LoadTree(fTree->GetEventNumber(iEvent));// load event 00452 fTree->LoadTree(iEvent);// load event 00453 00454 for (i=0;i<fNValues;i++) printf("%6.2f ",Clip(fTTFormula[i]->EvalInstance(0))); 00455 if(fTTCut) 00456 if(fTTCut->EvalInstance(0)) printf(" SELECTED\n"); else printf(" NOT SELECTED\n"); 00457 else printf("\n"); 00458 } 00459 00460 Int_t TNNFormula::RMBlanks(Text_t *str) 00461 { 00462 // remove blanks in a string 00463 if(!strlen(str)) return 0; 00464 TStringLong temp(str); 00465 Int_t posi; 00466 while((posi=(Int_t)temp.Index(" "))!=-1){temp.Remove(posi,1);} 00467 strcpy(str,temp.Data()); 00468 return strlen(str); 00469 } 00470 00471 void TNNFormula::SetClip(Bool_t trueForClip) 00472 { 00473 fClip=trueForClip; 00474 } 00475 00476 Float_t TNNFormula::Clip(Float_t x) 00477 { 00478 if (!fClip) return x; 00479 if (x<0) return 0.; 00480 if (x>1) return 1.; 00481 return x; 00482 } 00483 00485 // 00486 // Neural Network class TNNTree 00487 // 00489 00490 ClassImp(TNNTree) 00491 00492 TNNTree::TNNTree(Text_t *name):TNamed(name,"Neural Network") 00493 { 00494 // constructor 00495 fTree=0; 00496 fNTrees=0; 00497 fFile=0; 00498 fInfos=0; 00499 fFName=0; 00500 fInput=0; 00501 fOutput=0; 00502 fNInput=0; 00503 fNOutput=0; 00504 } 00505 00506 TNNTree::~TNNTree() 00507 { 00508 // destructor 00509 DeleteTree(); 00510 if (fFName) delete [] fFName; 00511 } 00512 00513 00514 void TNNTree::AddTree(TTree *tree, Int_t begin, Int_t end) 00515 { 00516 // function to add a piece of a given Tree in fTree 00517 // this function uses the input and output TNNFormula's 00518 // you must have called before : 00519 // SetFile(Text_t *fName) to choose a file for saving the tree 00520 // SetFormulaTree(TTree *tree) to define formula's 00521 // SetInFormula(Text_t *formula) 00522 // SetOutFormula(Text_t *formula) 00523 00524 if(!fFName){printf("NO FILE SELECTED !\n");return;} 00525 if(!tree){printf("Bad pointer on TTree !\n");return;} 00526 if(fFormula.Length()==0){printf("EMPTY INPUT FORMULA, GIVE AN INPUT FORMULA FIRST !\n");return;} 00527 if(!fFormula.GetNValues()){printf("NO INPUT UNIT, GIVE AN INPUT FORMULA FIRST !\n");return;} 00528 if(fOutFormula.Length()==0){printf("EMPTY OUTPUT FORMULA, GIVE AN OUTPUT FORMULA FIRST !\n");return;} 00529 if(!fOutFormula.GetNValues()){printf("NO OUTPUT UNIT, GIVE AN OUTPUT FORMULA FIRST !\n");return;} 00530 00531 SetFormulaTree(tree);RefreshInFormula();RefreshOutFormula(); 00532 00533 if(fTree&&fFormula.GetNValues()!=fNInput) DeleteTree(); 00534 if(fTree&&fOutFormula.GetNValues()!=fNOutput) DeleteTree(); 00535 00536 fNInput=fFormula.GetNValues(); 00537 fNOutput=fOutFormula.GetNValues(); 00538 CheckRange(&begin,&end,(Int_t)(tree->GetEntries())-1); 00539 00540 Int_t i,nSelected,range; 00541 00542 if (!fTree) CreateTree(); // create the Tree 00543 00544 fFormula.SetTree(tree); 00545 fOutFormula.SetTree(tree); 00546 nSelected=(Int_t)(fTree->GetEntries()); 00547 for (i=begin;i<=end;i++)if(fFormula.Find(i,fInput)){fOutFormula.Find(i,fOutput);fTree->Fill();} 00548 fFile->Write(); 00549 nSelected=(Int_t)(fTree->GetEntries())-nSelected; 00550 range=end-begin+1; 00551 00552 fInfos[fNTrees]=new Text_t[strlen(tree->GetName())+60]; 00553 sprintf(fInfos[fNTrees],"%s ,range [%7i,%7i]=%7i, %7i selected",tree->GetName(),begin,end,range,nSelected); 00554 printf("%s \n",fInfos[fNTrees]); 00555 fNTrees++; 00556 } 00557 00558 00559 void TNNTree::AddTree(TTree *tree, Text_t *out, Int_t begin, Int_t end) 00560 { 00561 // function to add a piece of a given Tree in fTree 00562 // this function uses the input TNNFormula only 00563 // you must have called before : 00564 // SetFile(Text_t *fName) to choose a file for saving the tree 00565 // SetFormulaTree(TTree *tree) to define input formula 00566 // SetInFormula(Text_t *formula) 00567 // and the values of outputs are in the string out : "0.1:0.6:0.2" 00568 00569 if(!fFName){printf("NO FILE SELECTED !\n");return;} 00570 if(!tree){printf("Bad pointer on TTree !\n");return;} 00571 if(fFormula.Length()==0){printf("EMPTY INPUT FORMULA, GIVE AN INPUT FORMULA FIRST !\n");return;} 00572 if(!fFormula.GetNValues()){printf("NO INPUT UNIT, GIVE AN INPUT FORMULA FIRST !\n");return;} 00573 SetFormulaTree(tree);RefreshInFormula(); 00574 if(fTree&&fFormula.GetNValues()!=fNInput) DeleteTree(); 00575 Int_t newNO=NumberOut(out); 00576 if(fTree&&newNO!=fNOutput) DeleteTree(); 00577 00578 fNInput=fFormula.GetNValues(); 00579 fNOutput=newNO; 00580 CheckRange(&begin,&end,(Int_t)(tree->GetEntries())-1); 00581 00582 Int_t i,nSelected,range; 00583 00584 if (!fTree) CreateTree(); // create the Tree 00585 00586 fFormula.SetTree(tree); 00587 Decode(out); 00588 nSelected=(Int_t)(fTree->GetEntries()); 00589 for (i=begin;i<=end;i++)if(fFormula.Find(i,fInput))fTree->Fill(); 00590 fFile->Write(); 00591 nSelected=(Int_t)(fTree->GetEntries())-nSelected; 00592 range=end-begin+1; 00593 00594 fInfos[fNTrees]=new Text_t[strlen(tree->GetName())+60]; 00595 sprintf(fInfos[fNTrees],"%s ,range [%7i,%7i]=%7i, %7i selected",tree->GetName(),begin,end,range,nSelected); 00596 printf("%s \n",fInfos[fNTrees]); 00597 fNTrees++; 00598 } 00599 00600 void TNNTree::Infos() 00601 { 00602 // prints infos about fTree 00603 00604 Int_t i; 00605 if(fFName)printf("%s \n",fFName); 00606 for (i=0;i<fNTrees;i++)printf("%3i : %s \n",i,fInfos[i]); 00607 } 00608 00609 void TNNTree::SetFile(Text_t *namearg) 00610 { 00611 // sets the file associated with fTree 00612 // example : "/home/someone/rootfiles/tree.root" 00613 00614 if(fTree){printf("File already set and used !\n");return;} 00615 Text_t name[500]; 00616 strcpy(name,namearg); 00617 if(!fFName) fFName=new Text_t[500]; 00618 RMBlanks(name); 00619 strcpy(fFName,name); 00620 printf("File to record Tree set to >>%s<< \n",fFName); 00621 } 00622 00623 void TNNTree::CheckRange(Int_t *begin, Int_t *end, Int_t indexMax) 00624 { 00625 Int_t temp; 00626 00627 if (*begin<0) *begin=0; 00628 if (*end<0) *end=0; 00629 if (*begin>indexMax) *begin=indexMax; 00630 if (*end>indexMax) *end=indexMax; 00631 00632 if (*begin>*end) 00633 { 00634 temp=*begin; 00635 *begin=*end; 00636 *end=temp; 00637 } 00638 } 00639 00640 Int_t TNNTree::RMBlanks(Text_t *str) 00641 { 00642 // remove blanks in a string 00643 if(!strlen(str)) return 0; 00644 TStringLong temp(str); 00645 Int_t posi; 00646 while((posi=(Int_t)temp.Index(" "))!=-1){temp.Remove(posi,1);} 00647 strcpy(str,temp.Data()); 00648 return strlen(str); 00649 } 00650 00651 void TNNTree::GetEvent(Float_t *input, Float_t *output, Int_t iEvent) 00652 { 00653 // Make a copy in input and output, these adresses are filled 00654 if(!fTree){printf("No Tree !\n");return;} 00655 Int_t i; 00656 fTree->GetEvent(iEvent); 00657 for(i=0;i<fNInput;i++)input[i]=fInput[i]; 00658 for(i=0;i<fNOutput;i++)output[i]=fOutput[i]; 00659 } 00660 00661 void TNNTree::DeleteTree() 00662 { 00663 // delete the tree, its file and its buffers 00664 if(!fTree) return; 00665 Int_t i; 00666 delete fTree;fTree=0; 00667 fFile->Close(); fFile->Delete(); fFile=0; 00668 for(i=0;i<fNTrees;i++) delete [] fInfos[i]; 00669 delete [] fInfos; 00670 if (fInput) delete [] fInput; 00671 if (fOutput) delete [] fOutput; 00672 fNTrees=0; 00673 fNInput=0; 00674 fNOutput=0; 00675 } 00676 00677 00678 void TNNTree::CreateTree() 00679 { 00680 Text_t *varnamei,*varnameo,*nunits; 00681 00682 fFile = new TFile(fFName,"RECREATE","Neural Network"); 00683 00684 nunits=new Text_t[4]; 00685 sprintf(nunits,"%i",fNInput); 00686 varnamei=new Text_t[13]; 00687 strcpy(varnamei,""); 00688 strcat(varnamei,"InputU[");strcat(varnamei,nunits);strcat(varnamei,"]/F"); 00689 strcpy(nunits,""); 00690 sprintf(nunits,"%i",fNOutput); 00691 varnameo=new Text_t[14]; 00692 strcpy(varnameo,""); 00693 strcat(varnameo,"OutputU[");strcat(varnameo,nunits);strcat(varnameo,"]/F"); 00694 delete [] nunits; 00695 00696 fInput=new Float_t[fNInput]; 00697 fOutput=new Float_t[fNOutput]; 00698 fFile->cd(); 00699 fTree = new TTree(fFName,"Neural Network"); 00700 fTree->Branch("InputU",fInput,varnamei); 00701 fTree->Branch("OutputU",fOutput,varnameo); 00702 delete [] varnamei;delete [] varnameo; 00703 gROOT->cd(); 00704 00705 fInfos=new Text_t*[200]; // up to 200 Trees Added 00706 } 00707 00708 Int_t TNNTree::NumberOut(Text_t *ttext) 00709 { 00710 Int_t i,n; 00711 Text_t text[200]; 00712 strcpy(text,ttext); 00713 n=1; 00714 for (i=0;text[i];i++)if(text[i]==':')n++; 00715 return n; 00716 } 00717 00718 void TNNTree::Decode(Text_t *ttext) 00719 { 00720 Int_t i,j; 00721 TString *number; 00722 Text_t text[200]; 00723 strcpy(text,ttext); 00724 00725 j=0; 00726 for (i=0;i<fNOutput;i++) 00727 { 00728 number=new TString(); 00729 while(text[j]&&(text[j]!=':')){number->Append(text[j]);j++;} 00730 j++; 00731 sscanf(number->Data(),"%f",&fOutput[i]); 00732 delete number; 00733 } 00734 } 00735 00737 // 00738 // TNNKernel 00739 // Feed-Forward Neural Network 00740 // 00742 00743 ClassImp(TNNKernel) 00744 00745 00746 00747 TNNKernel::TNNKernel(Text_t *name, Int_t nInput, Text_t *hidden, Int_t nOutput):TNamed(name,"Neural Network") 00748 { 00749 // constructor 00750 00751 fValues=0; 00752 fErrors=0; 00753 fBiases=0; 00754 fNUnits=0; 00755 fW=0; 00756 fValidTree=0; 00757 fArrayOut=0; 00758 fArrayIn=0; 00759 fTeach=0; 00760 fEventsList=0; 00761 fNWeights=0; 00762 fDW=0; 00763 fDB=0; 00764 fNHiddL=0; 00765 00766 AllocateVW(nInput,hidden,nOutput); 00767 00768 fUseBiases=1.; 00769 fLearnParam=0.2; 00770 fFlatSE=0.; 00771 fMu=0.; 00772 fLowerInitWeight=-1.; 00773 fUpperInitWeight=1.; 00774 00775 fNTrainEvents=0; 00776 fNValidEvents=0; 00777 fNTrainCycles=0; 00778 00779 TDatime temps; 00780 fRandom.SetSeed(temps.Convert()); 00781 printf("First Random Seed = %i\n",fRandom.GetSeed()); 00782 printf("Neural Network is created : \n"); 00783 // PrintS(); 00784 00785 } 00786 00787 TNNKernel::TNNKernel() 00788 { 00789 // constructor witn no parameter 00790 fValues=0; 00791 fErrors=0; 00792 fBiases=0; 00793 fNUnits=0; 00794 fW=0; 00795 fValidTree=0; 00796 fArrayOut=0; 00797 fArrayIn=0; 00798 fTeach=0; 00799 fEventsList=0; 00800 fNWeights=0; 00801 fDW=0; 00802 fDB=0; 00803 00804 00805 fUseBiases=1.; 00806 fLearnParam=0.2; 00807 fFlatSE=0.; 00808 fMu=0.; 00809 fLowerInitWeight=-1.; 00810 fUpperInitWeight=1.; 00811 fNHiddL=0; 00812 00813 fNTrainEvents=0; 00814 fNValidEvents=0; 00815 fNTrainCycles=0; 00816 00817 TDatime temps; 00818 fRandom.SetSeed(temps.Convert()); 00819 printf("First Random Seed = %i\n",fRandom.GetSeed()); 00820 } 00821 00822 00823 TNNKernel::~TNNKernel() 00824 { 00825 // destructor 00826 00827 DeleteArray(); 00828 FreeVW(); 00829 if(fEventsList) delete [] fEventsList; 00830 } 00831 00832 00833 void TNNKernel::SetHidden(Text_t *ttext) 00834 { 00835 Int_t i,j; 00836 TString *number; 00837 Text_t text[100]; 00838 strcpy(text,ttext); 00839 00840 fNHiddL=1; 00841 for (i=0;text[i];i++)if(text[i]==':')fNHiddL++; 00842 if (fNUnits) delete [] fNUnits; 00843 fNUnits = new Int_t[fNHiddL+2]; 00844 00845 j=0; 00846 for (i=1;i<=fNHiddL;i++) 00847 { 00848 number=new TString(); 00849 while(text[j]&&(text[j]!=':')){number->Append(text[j]);j++;} 00850 j++; 00851 sscanf(number->Data(),"%i",&fNUnits[i]); 00852 delete number; 00853 // printf("%i \n",fNUnits[i]); 00854 } 00855 00856 } 00857 00858 00859 void TNNKernel::FreeVW() 00860 { 00861 Int_t i,l; 00862 00863 // free of values 00864 00865 if (fValues) 00866 { 00867 for (i=0;i<fNHiddL+2;i++) 00868 {delete [] fValues[i]; delete [] fErrors[i]; delete [] fBiases[i];delete [] fDB[i];} 00869 delete [] fValues; delete [] fErrors; delete [] fBiases;delete [] fDB; 00870 fValues=0; 00871 } 00872 00873 // free of teaching 00874 00875 if (fTeach) 00876 { 00877 delete [] fTeach; 00878 fTeach=0; 00879 } 00880 00881 // free of weights 00882 00883 if (fW) 00884 { 00885 for (i=0;i<fNHiddL+1;i++) 00886 { 00887 for(l=0;l<fNUnits[i];l++){delete [] fW[i][l];delete [] fDW[i][l];} 00888 delete [] fW[i];delete [] fDW[i]; 00889 } 00890 fW=0; 00891 } 00892 00893 // free of units 00894 00895 if (fNUnits){ delete [] fNUnits; fNUnits=0;} 00896 } 00897 00898 void TNNKernel::AllocateVW(Int_t nInput, Text_t *hidden, Int_t nOutput) 00899 { 00900 Int_t i,l; 00901 00902 if(fW){printf("free memory first !\n");return;} 00903 00904 SetHidden(hidden); 00905 fNUnits[0]=nInput; 00906 fNUnits[fNHiddL+1]=nOutput; 00907 00908 // allocation of values 00909 00910 fValues = new Float_t*[fNHiddL+2]; 00911 fErrors = new Double_t*[fNHiddL+2]; 00912 fBiases = new Double_t*[fNHiddL+2]; 00913 fDB = new Double_t*[fNHiddL+2]; 00914 00915 for (i=0;i<fNHiddL+2;i++) 00916 { 00917 fValues[i]=new Float_t[fNUnits[i]]; 00918 fErrors[i]=new Double_t[fNUnits[i]]; 00919 fBiases[i]=new Double_t[fNUnits[i]]; 00920 fDB[i]=new Double_t[fNUnits[i]]; 00921 } 00922 00923 // allocation of teaching 00924 00925 fTeach=new Float_t[fNUnits[fNHiddL+1]]; 00926 00927 // allocation of weights 00928 00929 fW=new Double_t**[fNHiddL+1]; 00930 fDW=new Double_t**[fNHiddL+1]; 00931 00932 for (i=0;i<fNHiddL+1;i++) 00933 { 00934 fW[i]=new Double_t*[fNUnits[i]]; 00935 fDW[i]=new Double_t*[fNUnits[i]]; 00936 for (l=0;l<fNUnits[i];l++) 00937 { 00938 fW[i][l]=new Double_t[fNUnits[i+1]]; 00939 fDW[i][l]=new Double_t[fNUnits[i+1]]; 00940 } 00941 } 00942 00943 } 00944 00945 void TNNKernel::SetKernel(Int_t nInput, Text_t *hidden, Int_t nOutput) 00946 { 00947 FreeVW(); 00948 AllocateVW(nInput,hidden,nOutput); 00949 } 00950 00951 void TNNKernel::SetLearnParam(Double_t learnParam,Double_t fse,Double_t mu) 00952 { 00953 // Sets the learning parameters : 00954 // the main learning parameter is around 0.2 (in ]0,1]) 00955 // fse is for flat spot elimination, with values in [0,0.25], often 0.1 00956 // mu is for backprop momentum, values in [0,1] 00957 fLearnParam=fabs(learnParam); 00958 fFlatSE=fabs(fse); 00959 fMu=fabs(mu); 00960 00961 if (fLearnParam>1.0) printf("Warning : %6.2f is not an usual value\n",fLearnParam); 00962 if (fLearnParam==0.0) printf("Warning : 0 is a stupid value\n"); 00963 printf("Learning Parameter set to : %6.2f\n",fLearnParam); 00964 printf("Flat Spot elimination value set to : %6.2f\n",fFlatSE); 00965 printf("Momentum set to : %6.2f\n",fMu); 00966 } 00967 00968 void TNNKernel::SetInitParam(Float_t lowerInitWeight, Float_t upperInitWeight) 00969 { 00970 // Sets the initialisation parameters : max and min weights 00971 Float_t temp; 00972 00973 fLowerInitWeight=lowerInitWeight; 00974 fUpperInitWeight=upperInitWeight; 00975 if (fLowerInitWeight>fUpperInitWeight) 00976 { 00977 temp=fUpperInitWeight; 00978 fUpperInitWeight=fLowerInitWeight; 00979 fLowerInitWeight=temp; 00980 } 00981 if (fLowerInitWeight==fUpperInitWeight)printf("Warning : the weights initialisation bounds are equal !\n"); 00982 printf("Init Parameters set to :\n"); 00983 printf(" --> Lower bound = %6.2f\n",fLowerInitWeight); 00984 printf(" --> Upper bound = %6.2f\n",fUpperInitWeight); 00985 00986 } 00987 00988 00989 Float_t TNNKernel::Alea() 00990 { 00991 return fLowerInitWeight+fRandom.Rndm()*(fUpperInitWeight-fLowerInitWeight); 00992 } 00993 00994 void TNNKernel::Init() 00995 { 00996 // initialisation of biases and weights. 00997 // the init parameters can be changed by : 00998 // SetInitParam(Float_t lowerInitWeight, Float_t upperInitWeight) 00999 // The default is -1 and 1 01000 01001 Int_t i,l,c; 01002 01003 if(!fW){printf("allocate memory first !\n");return;} 01004 01005 // init of weights 01006 01007 for (i=0;i<fNHiddL+1;i++) 01008 for (l=0;l<fNUnits[i];l++) 01009 for (c=0;c<fNUnits[i+1];c++) fW[i][l][c]=(Double_t)Alea(); 01010 01011 for(i=0;i<fNHiddL+1;i++)for(l=0;l<fNUnits[i];l++)for(c=0;c<fNUnits[i+1];c++) 01012 fDW[i][l][c]=0.; 01013 01014 // init of biases 01015 01016 for (i=0;i<fNHiddL+2;i++) 01017 for (l=0;l<fNUnits[i];l++) fBiases[i][l]=(Double_t)(Alea())*fUseBiases; 01018 01019 for(i=0;i<fNHiddL+2;i++)for(l=0;l<fNUnits[i];l++)fDB[i][l]=0.; 01020 01021 01022 fNTrainCycles=0; 01023 printf("Initialisation done\n"); 01024 } 01025 01026 void TNNKernel::PrintS() 01027 { 01028 // prints structure of network on screen 01029 Int_t i,l,c; 01030 01031 if(!fW){printf("no unit !\n");return;} 01032 01033 printf("+++++++++ Neural Network %s ++++++++++++\n",GetName()); 01034 for(i=0;i<fNHiddL+2;i++)printf("Layer %1i contains %2i units\n",i,fNUnits[i]); 01035 01036 if(fUseBiases)printf(">>>>>>> Biases USED");else printf(">>>>>>>Biases DUMMY"); 01037 01038 printf("\n ---------- Biases ---------- \n"); 01039 Int_t maxl=0; 01040 for(i=0;i<fNHiddL+2;i++)if(fNUnits[i]>=maxl)maxl=fNUnits[i]; 01041 for(i=0;i<fNHiddL+2;i++)printf(" %1i | ",i);printf("\n"); 01042 for(i=0;i<fNHiddL+2;i++)printf("--------|-");printf("\n"); 01043 for(l=0;l<maxl;l++) 01044 { 01045 for(i=0;i<fNHiddL+2;i++) 01046 if(l<fNUnits[i])printf("%6.2f | ",fBiases[i][l]);else printf(" | "); 01047 printf("\n"); 01048 } 01049 01050 01051 printf("\n ---------- Weights ----------- \n"); 01052 for(i=0;i<fNHiddL+1;i++) 01053 { 01054 printf(" From %1i to %1i : \n",i,i+1); 01055 printf("%2i |",i);for(l=0;l<fNUnits[i];l++)printf(" %3i |",l);printf("\n"); 01056 printf("===|");for(l=0;l<fNUnits[i];l++)printf("-------");printf("\n"); 01057 printf("%2i |",i+1);for(l=0;l<fNUnits[i];l++)printf("-------");printf("\n"); 01058 for(c=0;c<fNUnits[i+1];c++) 01059 { 01060 printf("%2i |",c); 01061 for(l=0;l<fNUnits[i];l++)printf("%6.2f|",fW[i][l][c]); 01062 printf("\n"); 01063 } 01064 printf("\n"); 01065 } 01066 01067 printf("\n"); 01068 printf("Learning parameter = %6.2f\n",fLearnParam); 01069 printf("Flat Spot elimination value = %6.2f\n",fFlatSE); 01070 printf("Momentum = %6.2f\n",fMu); 01071 printf("Lower initialisation weight = %6.2f\n",fLowerInitWeight); 01072 printf("Upper initialisation weight = %6.2f\n",fUpperInitWeight); 01073 printf("Number of events for training = %5i\n",fNTrainEvents); 01074 printf("Number of events for validation = %5i\n",fNValidEvents); 01075 printf("Number of cycles done = %3i\n",fNTrainCycles); 01076 printf("+++++++++++++++++++++++++++++++++++++++++++++++\n"); 01077 01078 } 01079 01080 void TNNKernel::Forward() 01081 { 01082 // general function to propagate the input activation 01083 // The input activation array must be filled 01084 Int_t i,l,c; 01085 Double_t sum; 01086 01087 if(!fW){printf("no unit !\n");return;} 01088 01089 for (i=0;i<fNHiddL+1;i++) 01090 for (c=0;c<fNUnits[i+1];c++) 01091 { 01092 sum=0.; 01093 for(l=0;l<fNUnits[i];l++)sum+=fW[i][l][c]*(Double_t)fValues[i][l]; 01094 fValues[i+1][c]=(Float_t)Sigmoide(sum+fBiases[i+1][c]*fUseBiases); 01095 } 01096 } 01097 01098 void TNNKernel::LearnBackward() 01099 { 01100 // gradient retropropagation (updates of biases and weights) 01101 01102 if(fNTrainEvents<1){printf("No event to train !!!\n");return;} 01103 if(!fW){printf("no unit !\n");return;} 01104 01105 Int_t i,l,c; 01106 Double_t delta; 01107 01108 // weights 01109 01110 for (i=0;i<fNHiddL+1;i++) 01111 for (l=0;l<fNUnits[i];l++) 01112 for(c=0;c<fNUnits[i+1];c++) 01113 { 01114 delta=fLearnParam*fErrors[i+1][c]*(Double_t)fValues[i][l]+fMu*fDW[i][l][c]; 01115 fW[i][l][c]+=delta; 01116 fDW[i][l][c]=delta; 01117 } 01118 // biases 01119 if(((Bool_t)fUseBiases)) 01120 { 01121 for (i=1;i<fNHiddL+2;i++) 01122 for (l=0;l<fNUnits[i];l++) 01123 { 01124 delta=fLearnParam*fErrors[i][l]+fMu*fDB[i][l]; 01125 fBiases[i][l]+=delta; 01126 fDB[i][l]=delta; 01127 } 01128 } 01129 } 01130 01131 Double_t TNNKernel::Error() 01132 { 01133 // function to compute the errors between forward propagation and teaching. 01134 // this error is = |teaching-computed| summed on NN outputs and divided by their number. 01135 Int_t i,l,c; 01136 Double_t sum,error=0,errorOneUnit; 01137 if(!fW){printf("no unit !\n");return 0;} 01138 01139 // Error on Output Units 01140 01141 for(l=0;l<fNUnits[fNHiddL+1];l++) 01142 { 01143 errorOneUnit=(Double_t)(fTeach[l]-fValues[fNHiddL+1][l]); 01144 error+=fabs(errorOneUnit); 01145 fErrors[fNHiddL+1][l]=errorOneUnit*(SigPrim(fValues[fNHiddL+1][l])+fFlatSE); 01146 } 01147 error=error/(Double_t)fNUnits[fNHiddL+1]; 01148 01149 // Error on Hidden Units 01150 01151 for(i=fNHiddL;i==1;i--) 01152 { 01153 for(l=0;l<fNUnits[i];l++) 01154 { 01155 sum=0.; 01156 for(c=0;c<fNUnits[i+1];c++) sum+=fW[i][l][c]*fErrors[i+1][c]; 01157 fErrors[i][l]=sum*(SigPrim((Double_t)fValues[i][l])+fFlatSE); 01158 } 01159 } 01160 01161 return error; 01162 } 01163 01164 Double_t TNNKernel::ErrorO() 01165 { 01166 // function to compute the errors between forward propagation and teaching. 01167 // this error is = |teaching-computed| summed on NN outputs and divided by their number. 01168 // Error on Output Units 01169 01170 Int_t l; 01171 Double_t error=0; 01172 if(!fW){printf("no unit !\n");return 0;} 01173 for(l=0;l<fNUnits[fNHiddL+1];l++) 01174 error+=fabs((Double_t)(fTeach[l]-fValues[fNHiddL+1][l])); 01175 01176 error=error/(Double_t)fNUnits[fNHiddL+1]; 01177 01178 return error; 01179 01180 } 01181 01182 Double_t TNNKernel::TrainOneCycle() 01183 { 01184 // one loop on internal events = one cycle. 01185 // takes each event from internal array in an order fixed by an array ( fEventsList ). 01186 // It is necessary to call the method Mix() before each call to this function 01187 // in order to change the presentation order. 01188 // The learning is done by this function. 01189 // The private variable fNTrainCycles is incremented. 01190 01191 if(fNTrainEvents<1){printf("No event to train !!!\n");return 0.;} 01192 if(!fW){printf("no unit !\n");return 0.;} 01193 01194 Int_t i; 01195 Double_t error=0.; 01196 01197 for(i=0;i<fNTrainEvents;i++) 01198 { 01199 GetArrayEvt(fEventsList[i]); 01200 Forward(); 01201 error+=Error(); 01202 LearnBackward(); 01203 } 01204 01205 fNTrainCycles++; 01206 error=error/(Double_t)fNTrainEvents; 01207 // printf("cycle %i : E_t = %6.4f ",fNTrainCycles,error); 01208 01209 return error; 01210 } 01211 01212 Double_t TNNKernel::Valid() 01213 { 01214 // one loop on valid events. 01215 // takes each event from validation tree. 01216 // the events are passed trough the kernel, and a mean output 01217 // error is computed. 01218 01219 if(fNValidEvents<1) return 0.; 01220 01221 // we will now pass all the validation events through the kernel, and 01222 // compute the mean error on output 01223 Double_t error=0.; 01224 for (Int_t j=0;j<fNValidEvents;j++) 01225 { 01226 fValidTree->GetEvent(GetInputAdr(),GetTeachAdr(),j); 01227 error+=GoThrough(); // forward propagation and error on one event 01228 } 01229 error=error/(Double_t)fNValidEvents; // mean 01230 return error; 01231 } 01232 01233 void TNNKernel::TrainNCycles(TNNControlE *conte, Int_t period, Int_t nCycles) 01234 { 01235 // method to train on N cycles, with mixing and plot of errors 01236 // on the controller conte. 01237 01238 if(!conte){printf("no controller !\n");return;} 01239 Float_t errt,errv; 01240 for(Int_t i=0;i<nCycles;i++) 01241 { 01242 Mix(); 01243 errt=(Float_t)TrainOneCycle(); 01244 errv=(Float_t)Valid(); 01245 printf("cycle %3i > train : %7.3f",fNTrainCycles,errt); 01246 if(fNValidEvents)printf(" and valid : %7.3f \n",errv);else printf("\n"); 01247 if(!(i%period)||i==(nCycles-1)) 01248 { 01249 conte->AddTP(fNTrainCycles,errt); // add Train Point 01250 conte->AddVP(fNTrainCycles,errv); // add Valid Point 01251 conte->UpdateG(); // update graphics 01252 } 01253 01254 } 01255 01256 } 01257 01258 void TNNKernel::Export(Text_t *fileName) 01259 { 01260 // Put the structure in a file 01261 // WARNING : the weights and biases are stored with 4 digits 01262 // in decimal part. 01263 // Learning parameters are not stored 01264 Int_t i,l,c; 01265 01266 if(!fW){printf("no unit !\n");return;} 01267 01268 FILE *file; 01269 file=fopen(fileName,"w"); 01270 01271 fprintf(file,"%3i\n",fNHiddL); 01272 for(i=0;i<fNHiddL+2;i++)fprintf(file,"%3i\n",fNUnits[i]); 01273 01274 for(i=0;i<fNHiddL+2;i++) 01275 for(l=0;l<fNUnits[i];l++)fprintf(file,"%8.4f\n",fBiases[i][l]); 01276 01277 for(i=0;i<fNHiddL+1;i++) 01278 for(l=0;l<fNUnits[i];l++) 01279 for(c=0;c<fNUnits[i+1];c++)fprintf(file,"%8.4f\n",fW[i][l][c]); 01280 01281 fprintf(file,"%5i\n",fNTrainCycles); 01282 fprintf(file,"%2.0f\n",fUseBiases); 01283 01284 fclose(file); 01285 } 01286 01287 void TNNKernel::Import(Text_t *fileName) 01288 { 01289 // Get the structure from a file 01290 // WARNING : the weights and biases are stored with 4 digits 01291 // in decimal part. 01292 // Learning parameteres are not stored. 01293 Int_t i,l,c,newI,newHL,newO; 01294 Text_t hidden[100],piece[5]; 01295 FILE *file; 01296 file=fopen(fileName,"r"); 01297 01298 fscanf(file,"%3i",&newHL); 01299 fscanf(file,"%3i",&newI); 01300 strcpy(hidden,""); 01301 for(i=1;i<newHL;i++) 01302 {fscanf(file,"%s",piece);strcat(hidden,piece);strcat(hidden,":");} 01303 fscanf(file,"%s",piece);strcat(hidden,piece); 01304 fscanf(file,"%3i",&newO); 01305 01306 printf("New NN set to : %3i %s %3i \n",newI,hidden,newO); 01307 FreeVW(); 01308 AllocateVW(newI,hidden,newO); 01309 Float_t tmpfl; 01310 for(i=0;i<fNHiddL+2;i++) 01311 for(l=0;l<fNUnits[i];l++){fDB[i][l]=0.;fscanf(file,"%f",&tmpfl);*(fBiases[i]+l)=(Double_t)tmpfl;} 01312 01313 for(i=0;i<fNHiddL+1;i++) 01314 for(l=0;l<fNUnits[i];l++) 01315 for(c=0;c<fNUnits[i+1];c++){fDW[i][l][c]=0.;fscanf(file,"%f",&tmpfl);*(fW[i][l]+c)=(Double_t)tmpfl;} 01316 01317 01318 fscanf(file,"%5i",&fNTrainCycles); 01319 fscanf(file,"%f",&tmpfl);fUseBiases=(Double_t)tmpfl; 01320 01321 fclose(file); 01322 } 01323 01324 void TNNKernel::Mix() 01325 { 01326 // mix the events before learning. VERY IMPORTANT. 01327 // is has to be used before TrainOneCycle() , 01328 // IT IS NOT used by TrainOneCycle() , you have to do the call yourself 01329 01330 Int_t i,i1,i2; 01331 Int_t temp; 01332 for (i=0;i<3*fNTrainEvents;i++) 01333 { 01334 i1=(Int_t)(fRandom.Rndm()*(Float_t)fNTrainEvents); 01335 i2=(Int_t)(fRandom.Rndm()*(Float_t)fNTrainEvents); 01336 temp=fEventsList[i1]; 01337 fEventsList[i1]=fEventsList[i2]; 01338 fEventsList[i2]=temp; 01339 } 01340 01341 // for (i=0;i<fNTrainEvents;i++)printf("%i \n",fEventsList[i]); 01342 // printf("Mixed ... "); 01343 } 01344 01345 void TNNKernel::SetArraySize(Int_t size) 01346 { 01347 DeleteArray(); 01348 if (fEventsList) delete [] fEventsList; 01349 if(!size)return; 01350 Int_t i; 01351 fNTrainEvents=size; 01352 fArrayIn = new Float_t*[fNTrainEvents]; 01353 for (i=0;i<fNTrainEvents;i++) fArrayIn[i] = new Float_t[fNUnits[0]]; 01354 01355 fArrayOut = new Float_t*[fNTrainEvents]; 01356 for (i=0;i<fNTrainEvents;i++) fArrayOut[i] = new Float_t[fNUnits[fNHiddL+1]]; 01357 01358 fEventsList = new Int_t[fNTrainEvents]; 01359 for (i=0;i<fNTrainEvents;i++)fEventsList[i]=i; 01360 } 01361 01362 void TNNKernel::DeleteArray() 01363 { 01364 Int_t i; 01365 01366 if(fArrayIn) 01367 { 01368 for (i=0;i<fNTrainEvents;i++)delete [] fArrayIn[i]; 01369 delete [] fArrayIn; 01370 fArrayIn=0; 01371 } 01372 01373 if(fArrayOut) 01374 { 01375 for (i=0;i<fNTrainEvents;i++)delete [] fArrayOut[i]; 01376 delete [] fArrayOut; 01377 fArrayOut=0; 01378 } 01379 01380 } 01381 01382 void TNNKernel::SetTrainTree(TNNTree *t) 01383 { 01384 // method to associate a TNNTree to the kernel : 01385 // the events of the tree will be transferred in the internal 01386 // array of the kernel. 01387 01388 if(!t){printf("no tree !\n");return;} 01389 Int_t i; 01390 01391 //allocation 01392 01393 SetArraySize((Int_t)(t->GetTree()->GetEntries())); 01394 printf(" nbr evts for training : %i \n",GetNTrainEvents()); 01395 01396 // loop 01397 // the methods GetInputAdr() and GetTeachAdr() 01398 // return the adresses of arrays in kernel, and the method 01399 // GetEvent fills these adresses with event i of the train tree t 01400 // the method Fill(i) translates the filled arrays in the internal array 01401 01402 for (i=0;i<(Int_t)(t->GetTree()->GetEntries());i++) 01403 { 01404 t->GetEvent(GetInputAdr(),GetTeachAdr(),i); 01405 Fill(i); 01406 } 01407 01408 } 01409 01410 void TNNKernel::SetValidTree(TNNTree *t) 01411 { 01412 // method to associate a TNNTree to the kernel : 01413 // a link will be done between the tree and the kernel. 01414 // it is not necessary to keep these events in the kernel 01415 01416 if(!t){printf("no tree !\n");return;} 01417 fValidTree=t; 01418 fNValidEvents=(Int_t)(t->GetTree()->GetEntries()); 01419 } 01420 01422 // 01423 // TNNControlE 01424 // tool to plot output error for training and validation 01425 // in a canvas 01426 // 01428 01429 01430 ClassImp(TNNControlE) 01431 01432 01433 TNNControlE::TNNControlE():TCanvas("NN Errors","NN Errors",10,10,550,430) 01434 { 01435 // constructor 01436 SetFillColor(41); 01437 SetGridx(); 01438 SetGridy(); 01439 GetFrame()->SetFillColor(21); 01440 GetFrame()->SetBorderSize(12); 01441 01442 fXT = new Float_t[50]; 01443 fYT = new Float_t[50]; 01444 fXV = new Float_t[50]; 01445 fYV = new Float_t[50]; 01446 01447 fGraphT = new TGraph(); 01448 fGraphT->SetFillColor(19); 01449 fGraphT->SetLineColor(3); 01450 fGraphT->SetLineWidth(4); 01451 fGraphT->SetMarkerColor(3); 01452 fGraphT->SetMarkerStyle(21); 01453 fGraphT->SetTitle("training"); 01454 01455 fGraphV = new TGraph(); 01456 fGraphV->SetFillColor(19); 01457 fGraphV->SetLineColor(5); 01458 fGraphV->SetLineWidth(4); 01459 fGraphV->SetMarkerColor(5); 01460 fGraphV->SetMarkerStyle(20); 01461 fGraphV->SetTitle("validation"); 01462 01463 fNT=0; 01464 fNV=0; 01465 01466 } 01467 01468 TNNControlE::~TNNControlE() 01469 { 01470 // destructor 01471 if(fGraphV){fGraphV->Delete();fGraphV=0;} 01472 if(fGraphT){fGraphT->Delete();fGraphT=0;} 01473 delete [] fXT; 01474 delete [] fYT; 01475 delete [] fXV; 01476 delete [] fYV; 01477 } 01478 01479 void TNNControlE::UpdateG() 01480 { 01481 // update graphs 01482 cd();Clear(); 01483 01484 if(!fNT)return; 01485 fGraphT->DrawGraph(fNT,fXT,fYT,"ALP"); 01486 01487 if (fNV) 01488 { 01489 fGraphV->DrawGraph(fNV,fXV,fYV,"LP"); 01490 DrawT("Validation",0.7,0.7,0.,5); 01491 } 01492 DrawT("Training",0.7, 0.8, 0., 3); 01493 DrawT("Number of cycles",0.35, 0.015, 0., 2); 01494 DrawT("NN Output Error",0.03,0.35,90.,2); 01495 Update(); 01496 } 01497 01498 void TNNControlE::AddTP(Int_t n,Float_t e) 01499 { 01500 // add a point in train curve : n in x and e (error) in y 01501 fNT++; 01502 fXT[fNT-1]=(Float_t)n;fYT[fNT-1]=e; 01503 if((fNT%50))return; 01504 // if fNT is a multiple of 50 (50, 100, ...), the array is full and we 01505 // prepare a bigger one (+50) 01506 Int_t i; 01507 Float_t *x=new Float_t[fNT],*y=new Float_t[fNT]; 01508 01509 for(i=0;i<fNT;i++){x[i]=fXT[i];y[i]=fYT[i];} 01510 delete [] fXT;fXT = new Float_t[fNT+50]; 01511 delete [] fYT;fYT = new Float_t[fNT+50]; 01512 for(i=0;i<fNT;i++) {fXT[i]=x[i];fYT[i]=y[i];} 01513 01514 delete [] x;delete [] y; 01515 } 01516 01517 01518 void TNNControlE::AddVP(Int_t n,Float_t e) 01519 { 01520 // add a point in valid curve : n in x and e (error) in y 01521 fNV++; 01522 fXV[fNV-1]=(Float_t)n;fYV[fNV-1]=e; 01523 if((fNV%50))return; 01524 // if fNV is a multiple of 50 (50, 100, ...), the array is full and we 01525 // prepare a bigger one (+50) 01526 Int_t i; 01527 Float_t *x=new Float_t[fNV],*y=new Float_t[fNV]; 01528 01529 for(i=0;i<fNV;i++){x[i]=fXV[i];y[i]=fYV[i];} 01530 delete [] fXV;fXV = new Float_t[fNV+50]; 01531 delete [] fYV;fYV = new Float_t[fNV+50]; 01532 for(i=0;i<fNV;i++) {fXV[i]=x[i];fYV[i]=y[i];} 01533 01534 delete [] x;delete [] y; 01535 } 01536 01538 // 01539 // TNNUtils 01540 // 01542 01543 ClassImp(TNNUtils) 01544 01545 // destructor 01546 TNNUtils::~TNNUtils() 01547 { 01548 //if(fB) delete fB; //AAPSUAIDE 01549 } 01550 01551 01552 Int_t TNNUtils::UpdateNewBranch() 01553 { 01554 // create or update the new branch for the tree. 01555 // create in case of no branch, 01556 // update if the kernel has changed (different number of output units or 01557 // different output units). 01558 // this function is private and called by FillNB() 01559 01560 if(!fT){printf("no tree associated!\n");return 0;} 01561 if(!fK){printf("no kernel associated!\n");return 0;} 01562 if(!fFName){printf("no file associated to contain the new branch!\n");return 0;} 01563 if(fOAdr==(UInt_t)(fK->GetOutputAdr())&&fNOut==fK->GetNOutput())return 1; 01564 01565 if(fB) delete fB; 01566 01567 Text_t *varname,*noutunits; 01568 01569 noutunits=new Text_t[4];sprintf(noutunits,"%i",fK->GetNOutput()); 01570 varname=new Text_t[14];strcpy(varname,""); 01571 strcat(varname,"OutputU[");strcat(varname,noutunits);strcat(varname,"]/F"); 01572 delete [] noutunits; 01573 fB=(fT->Branch("OutputU",fK->GetOutputAdr(),varname)); 01574 delete [] varname; 01575 fB->SetFile(fFName); 01576 fOAdr=(UInt_t)fK->GetOutputAdr(); 01577 fNOut=fK->GetNOutput(); 01578 return 1; 01579 } 01580 01581 01582 Int_t TNNUtils::FillNB() 01583 { 01584 // function to call to fill the new branch. 01585 // The tree, kernel, and formula must have been specified before with 01586 // SetTree(TTree *t), SetKernel(TNNKernel *k), SetFormula(Text_t *form). 01587 01588 if(!UpdateNewBranch())return 0; 01589 if(fForm.GetNValues()!=fK->GetNInput()){printf("input mismatch\n");return 0;} 01590 01591 fB->Reset(); 01592 for(Int_t i=0;i<(Int_t)fT->GetEntries();i++) 01593 { 01594 fForm.Find(i,fK->GetInputAdr()); 01595 fK->GoThrough(); 01596 fB->Fill(); 01597 } 01598 return 1; 01599 } 01600 01601 01602 TH1F* TNNUtils::HIntegral(TH1F *hOrig, Int_t efficiency, Text_t *name, Text_t *title) 01603 { 01604 // method to compute the running integral of an histogram : 01605 // if efficiency is set to 1 (default) : for the bin i, the sum is the one of i and next bins 01606 // ---> we compute an efficiency when a cut is applied on the variable (var > cut). 01607 // if efficiency is set to 0 : for the bin i, the sum is the one of i and previous bins 01608 // ---> we compute a rejection when a cut is applied on the variable (var > cut). 01609 01610 if (!hOrig){printf("No input histo!\n");return 0;} 01611 Int_t i,nBins; 01612 nBins=hOrig->GetNbinsX(); 01613 01614 TH1F *histo= new TH1F(name,title,nBins,0.,1.); 01615 if(efficiency) 01616 for(i=0;i<=nBins+1;i++)histo->SetBinContent(i,hOrig->Integral(i,nBins+1)); 01617 else 01618 { 01619 for(i=1;i<=nBins+1;i++)histo->SetBinContent(i,hOrig->Integral(0,i-1)); 01620 histo->SetBinContent(0,0); 01621 } 01622 return histo; 01623 } 01624 01625 TGraph* TNNUtils::XY(TH1F *hX, TH1F *hY, Int_t color) 01626 { 01627 // method to do a graph from 2 histos with the same number of bins : 01628 // we can with this method do rejection-efficiency curves ... 01629 // ex : util.XY(util.HIntegral(h1,1),util.HIntegral(h2,0))->Draw("ALP") 01630 01631 if (!hX||!hY){printf("Missing input histo!\n");return 0;} 01632 Int_t nBX=hX->GetNbinsX(); 01633 Int_t nBY=hY->GetNbinsX(); 01634 if(nBX!=nBY){printf("histos don't martch!\n");return 0;} 01635 01636 Float_t *X=new Float_t[nBX],*Y=new Float_t[nBY]; 01637 for(Int_t i=1;i<=nBX;i++) 01638 { 01639 X[i-1]=hX->GetBinContent(i); 01640 Y[i-1]=hY->GetBinContent(i); 01641 } 01642 01643 TGraph* g=new TGraph(nBX,X,Y); 01644 g->SetLineColor(color); 01645 g->SetLineWidth(4); 01646 return g; 01647 01648 } 01649

Generated on Sun Mar 15 04:54:21 2009 for StRoot by doxygen 1.3.7