StEmcUtil/neuralNet/NeuNet.cxx
00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045
00046
00047
00048
00049
00050
00051
00052
00053
00054
00055
00056
00057
00058
00059
00060
00061
00062
00063
00064
00065
00066
00067
00068
00069
00071
00073
00074
00075
00076
00077
00078
00079
00080
00081
00082
00083
00084
00085
00086
00087
00088
00089
00090
00091
00092
00093
00094
00095
00096
00097
00098
00099
00100
00101
00102
00103
00104
00105
00106
00107
00108
00109
00110
00111
00112
00113
00114
00115
00116
00117
00118
00119
00120
00121
00122
00123
00124
00125
00126
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153
00154
00155
00156
00157
00158
00159
00160
00161
00162
00163
00164
00165
00166
00167
00168
00169
00170
00171
00172
00173
00174
00175
00176
00177
00178
00179
00180
00181
00182
00183
00184
00185
00186
00187
00188
00189
00190
00191
00192
00193
00194
00195
00196
00197
00198
00199
00200
00201
00202
00203
00204
00205
00206
00207
00208
00209
00210
00211
00212
00213
00214
00215
00216
00217
00218
00219
00220
00221
00222
00223
00224
00225
00226
00227
00228
00229
00230
00231
00232
00233
00234
00235
00236
00237
00238
00239
00240
00241
00242
00243
00244
00245
00246
00247
00248
00249
00250
00251
00252
00253
00254
00255
00256
00257
00258
00259
00260
00261
00262
00263
00264
00265
00266
00267
00268
00269
00270
00271
00272
00273
00274
00275
00276
00277
00278
00279
00280
00281
00282
00283
00284
00285
00286
00287
00288
00289
00290
00291
00292
00293
00294
00295
00296
00297
00298
00299
00301
00302
00303
00304
00305
00306
00308
00309 #include "NeuNet.h"
00310
00312
00313
00314
00316
00317 ClassImp(TNNFormula)
00318
00319
00320 TNNFormula::TNNFormula(Text_t *name, Text_t *formula, Text_t *cut, TTree *tree):TNamed(name,"NN Formula")
00321 {
00322
00323 fNValues=0;
00324 fTTCut=0;
00325 fTTFormula=0;
00326 fClip=1;
00327 fTree=0;
00328 fFormula=0;
00329 fCut=0;
00330 fRefresh=0;
00331 SetTree(tree);
00332 SetFormula(formula);
00333 SetCut(cut);
00334 }
00335
00336
00337 TNNFormula::~TNNFormula()
00338 {
00339
00340 Int_t i;
00341
00342 for(i=0;i<fNValues;i++) delete fTTFormula[i];
00343 if(fNValues){delete [] fTTFormula;fTTFormula=0;}
00344 if(fFormula){delete fFormula;fFormula=0;}
00345 if(fCut){delete fCut;fCut=0;}
00346 if(fTTCut){delete fTTCut;fTTCut=0;}
00347 }
00348
00349 void TNNFormula::SetFormula(Text_t *formula)
00350 {
00351 if(!fTree){printf("A Tree must be set before !\n");return;}
00352 if(!RMBlanks(formula)){printf("EMPTY INPUT FORMULA !\n");return;}
00353 if(!fRefresh)
00354 {
00355 if(fFormula){delete fFormula;fFormula=0;}
00356 fFormula=new TStringLong(formula);
00357 }
00358 Int_t i,j;
00359 TString *oneUnit;
00360
00361 for(i=0;i<fNValues;i++)delete fTTFormula[i];
00362 if(fNValues){delete [] fTTFormula;fTTFormula=0;}
00363
00364 fNValues=1;
00365 for (i=0;formula[i];i++)if(formula[i]==':')fNValues++;
00366
00367 fTTFormula = new TTreeFormula*[fNValues];
00368
00369 oneUnit=new TString();
00370 j=0;
00371 for (i=0;formula[i];i++)
00372 {
00373 if (formula[i]!=':')
00374 oneUnit->Append(formula[i]);
00375 else
00376 {
00377 fTTFormula[j] = new TTreeFormula("Input",oneUnit->Data(),fTree);
00378 delete oneUnit;oneUnit=new TString();
00379 j++;
00380 }
00381 }
00382 fTTFormula[j] = new TTreeFormula("Input",oneUnit->Data(),fTree);
00383 delete oneUnit;
00384
00385 printf("Formula = >>%s<< with %3i values\n",formula,fNValues);
00386 }
00387
00388 void TNNFormula::SetCut(Text_t *cutarg)
00389 {
00390 if(!fTree){printf("A Tree must be set before !\n");return;}
00391 Text_t cut[500];
00392 strcpy(cut,cutarg);
00393 if(!fRefresh)
00394 {
00395 if(fCut){delete fCut;fCut=0;}
00396 fCut=new TStringLong(cut);
00397 }
00398 if (fTTCut){delete fTTCut; fTTCut=0;}
00399 if (RMBlanks(cut))
00400 fTTCut = new TTreeFormula("NNFormula Selection",cut,fTree);
00401
00402 printf("Cut applied in TNNFormula set to >>%s<< \n",cut);
00403
00404 }
00405
00406 void TNNFormula::SetTree(TTree *tree)
00407 {
00408
00409
00410 if(!tree){printf("No Tree !\n");return;}
00411
00412 Int_t i;
00413 fTree=tree;
00414 for (i=0;i<fNValues;i++) fTTFormula[i]->SetTree(fTree);
00415 if(fTTCut)fTTCut->SetTree(fTree);
00416 }
00417
00418 Bool_t TNNFormula::Find(Int_t iEvent, Float_t *values)
00419 {
00420
00421
00422
00423
00424 if(!fTTFormula){printf("Empty input string !\n");return 0;}
00425 Int_t i;
00426
00427
00428
00429
00430 fTree->LoadTree(iEvent);
00431
00432 for (i=0;i<fNValues;i++)
00433 values[i]=Clip(fTTFormula[i]->EvalInstance(0));
00434
00435
00436
00437 if (fTTCut)
00438 return (Bool_t) fTTCut->EvalInstance(0);
00439 else
00440 return (Bool_t) 1;
00441 }
00442
00443 void TNNFormula::Find(Int_t iEvent)
00444 {
00445
00446
00447
00448 if(!fTTFormula){printf("Empty input string !\n");return;}
00449 Int_t i;
00450
00451
00452 fTree->LoadTree(iEvent);
00453
00454 for (i=0;i<fNValues;i++) printf("%6.2f ",Clip(fTTFormula[i]->EvalInstance(0)));
00455 if(fTTCut)
00456 if(fTTCut->EvalInstance(0)) printf(" SELECTED\n"); else printf(" NOT SELECTED\n");
00457 else printf("\n");
00458 }
00459
00460 Int_t TNNFormula::RMBlanks(Text_t *str)
00461 {
00462
00463 if(!strlen(str)) return 0;
00464 TStringLong temp(str);
00465 Int_t posi;
00466 while((posi=(Int_t)temp.Index(" "))!=-1){temp.Remove(posi,1);}
00467 strcpy(str,temp.Data());
00468 return strlen(str);
00469 }
00470
00471 void TNNFormula::SetClip(Bool_t trueForClip)
00472 {
00473 fClip=trueForClip;
00474 }
00475
00476 Float_t TNNFormula::Clip(Float_t x)
00477 {
00478 if (!fClip) return x;
00479 if (x<0) return 0.;
00480 if (x>1) return 1.;
00481 return x;
00482 }
00483
00485
00486
00487
00489
00490 ClassImp(TNNTree)
00491
00492 TNNTree::TNNTree(Text_t *name):TNamed(name,"Neural Network")
00493 {
00494
00495 fTree=0;
00496 fNTrees=0;
00497 fFile=0;
00498 fInfos=0;
00499 fFName=0;
00500 fInput=0;
00501 fOutput=0;
00502 fNInput=0;
00503 fNOutput=0;
00504 }
00505
00506 TNNTree::~TNNTree()
00507 {
00508
00509 DeleteTree();
00510 if (fFName) delete [] fFName;
00511 }
00512
00513
00514 void TNNTree::AddTree(TTree *tree, Int_t begin, Int_t end)
00515 {
00516
00517
00518
00519
00520
00521
00522
00523
00524 if(!fFName){printf("NO FILE SELECTED !\n");return;}
00525 if(!tree){printf("Bad pointer on TTree !\n");return;}
00526 if(fFormula.Length()==0){printf("EMPTY INPUT FORMULA, GIVE AN INPUT FORMULA FIRST !\n");return;}
00527 if(!fFormula.GetNValues()){printf("NO INPUT UNIT, GIVE AN INPUT FORMULA FIRST !\n");return;}
00528 if(fOutFormula.Length()==0){printf("EMPTY OUTPUT FORMULA, GIVE AN OUTPUT FORMULA FIRST !\n");return;}
00529 if(!fOutFormula.GetNValues()){printf("NO OUTPUT UNIT, GIVE AN OUTPUT FORMULA FIRST !\n");return;}
00530
00531 SetFormulaTree(tree);RefreshInFormula();RefreshOutFormula();
00532
00533 if(fTree&&fFormula.GetNValues()!=fNInput) DeleteTree();
00534 if(fTree&&fOutFormula.GetNValues()!=fNOutput) DeleteTree();
00535
00536 fNInput=fFormula.GetNValues();
00537 fNOutput=fOutFormula.GetNValues();
00538 CheckRange(&begin,&end,(Int_t)(tree->GetEntries())-1);
00539
00540 Int_t i,nSelected,range;
00541
00542 if (!fTree) CreateTree();
00543
00544 fFormula.SetTree(tree);
00545 fOutFormula.SetTree(tree);
00546 nSelected=(Int_t)(fTree->GetEntries());
00547 for (i=begin;i<=end;i++)if(fFormula.Find(i,fInput)){fOutFormula.Find(i,fOutput);fTree->Fill();}
00548 fFile->Write();
00549 nSelected=(Int_t)(fTree->GetEntries())-nSelected;
00550 range=end-begin+1;
00551
00552 fInfos[fNTrees]=new Text_t[strlen(tree->GetName())+60];
00553 sprintf(fInfos[fNTrees],"%s ,range [%7i,%7i]=%7i, %7i selected",tree->GetName(),begin,end,range,nSelected);
00554 printf("%s \n",fInfos[fNTrees]);
00555 fNTrees++;
00556 }
00557
00558
00559 void TNNTree::AddTree(TTree *tree, Text_t *out, Int_t begin, Int_t end)
00560 {
00561
00562
00563
00564
00565
00566
00567
00568
00569 if(!fFName){printf("NO FILE SELECTED !\n");return;}
00570 if(!tree){printf("Bad pointer on TTree !\n");return;}
00571 if(fFormula.Length()==0){printf("EMPTY INPUT FORMULA, GIVE AN INPUT FORMULA FIRST !\n");return;}
00572 if(!fFormula.GetNValues()){printf("NO INPUT UNIT, GIVE AN INPUT FORMULA FIRST !\n");return;}
00573 SetFormulaTree(tree);RefreshInFormula();
00574 if(fTree&&fFormula.GetNValues()!=fNInput) DeleteTree();
00575 Int_t newNO=NumberOut(out);
00576 if(fTree&&newNO!=fNOutput) DeleteTree();
00577
00578 fNInput=fFormula.GetNValues();
00579 fNOutput=newNO;
00580 CheckRange(&begin,&end,(Int_t)(tree->GetEntries())-1);
00581
00582 Int_t i,nSelected,range;
00583
00584 if (!fTree) CreateTree();
00585
00586 fFormula.SetTree(tree);
00587 Decode(out);
00588 nSelected=(Int_t)(fTree->GetEntries());
00589 for (i=begin;i<=end;i++)if(fFormula.Find(i,fInput))fTree->Fill();
00590 fFile->Write();
00591 nSelected=(Int_t)(fTree->GetEntries())-nSelected;
00592 range=end-begin+1;
00593
00594 fInfos[fNTrees]=new Text_t[strlen(tree->GetName())+60];
00595 sprintf(fInfos[fNTrees],"%s ,range [%7i,%7i]=%7i, %7i selected",tree->GetName(),begin,end,range,nSelected);
00596 printf("%s \n",fInfos[fNTrees]);
00597 fNTrees++;
00598 }
00599
00600 void TNNTree::Infos()
00601 {
00602
00603
00604 Int_t i;
00605 if(fFName)printf("%s \n",fFName);
00606 for (i=0;i<fNTrees;i++)printf("%3i : %s \n",i,fInfos[i]);
00607 }
00608
00609 void TNNTree::SetFile(Text_t *namearg)
00610 {
00611
00612
00613
00614 if(fTree){printf("File already set and used !\n");return;}
00615 Text_t name[500];
00616 strcpy(name,namearg);
00617 if(!fFName) fFName=new Text_t[500];
00618 RMBlanks(name);
00619 strcpy(fFName,name);
00620 printf("File to record Tree set to >>%s<< \n",fFName);
00621 }
00622
00623 void TNNTree::CheckRange(Int_t *begin, Int_t *end, Int_t indexMax)
00624 {
00625 Int_t temp;
00626
00627 if (*begin<0) *begin=0;
00628 if (*end<0) *end=0;
00629 if (*begin>indexMax) *begin=indexMax;
00630 if (*end>indexMax) *end=indexMax;
00631
00632 if (*begin>*end)
00633 {
00634 temp=*begin;
00635 *begin=*end;
00636 *end=temp;
00637 }
00638 }
00639
00640 Int_t TNNTree::RMBlanks(Text_t *str)
00641 {
00642
00643 if(!strlen(str)) return 0;
00644 TStringLong temp(str);
00645 Int_t posi;
00646 while((posi=(Int_t)temp.Index(" "))!=-1){temp.Remove(posi,1);}
00647 strcpy(str,temp.Data());
00648 return strlen(str);
00649 }
00650
00651 void TNNTree::GetEvent(Float_t *input, Float_t *output, Int_t iEvent)
00652 {
00653
00654 if(!fTree){printf("No Tree !\n");return;}
00655 Int_t i;
00656 fTree->GetEvent(iEvent);
00657 for(i=0;i<fNInput;i++)input[i]=fInput[i];
00658 for(i=0;i<fNOutput;i++)output[i]=fOutput[i];
00659 }
00660
00661 void TNNTree::DeleteTree()
00662 {
00663
00664 if(!fTree) return;
00665 Int_t i;
00666 delete fTree;fTree=0;
00667 fFile->Close(); fFile->Delete(); fFile=0;
00668 for(i=0;i<fNTrees;i++) delete [] fInfos[i];
00669 delete [] fInfos;
00670 if (fInput) delete [] fInput;
00671 if (fOutput) delete [] fOutput;
00672 fNTrees=0;
00673 fNInput=0;
00674 fNOutput=0;
00675 }
00676
00677
00678 void TNNTree::CreateTree()
00679 {
00680 Text_t *varnamei,*varnameo,*nunits;
00681
00682 fFile = new TFile(fFName,"RECREATE","Neural Network");
00683
00684 nunits=new Text_t[4];
00685 sprintf(nunits,"%i",fNInput);
00686 varnamei=new Text_t[13];
00687 strcpy(varnamei,"");
00688 strcat(varnamei,"InputU[");strcat(varnamei,nunits);strcat(varnamei,"]/F");
00689 strcpy(nunits,"");
00690 sprintf(nunits,"%i",fNOutput);
00691 varnameo=new Text_t[14];
00692 strcpy(varnameo,"");
00693 strcat(varnameo,"OutputU[");strcat(varnameo,nunits);strcat(varnameo,"]/F");
00694 delete [] nunits;
00695
00696 fInput=new Float_t[fNInput];
00697 fOutput=new Float_t[fNOutput];
00698 fFile->cd();
00699 fTree = new TTree(fFName,"Neural Network");
00700 fTree->Branch("InputU",fInput,varnamei);
00701 fTree->Branch("OutputU",fOutput,varnameo);
00702 delete [] varnamei;delete [] varnameo;
00703 gROOT->cd();
00704
00705 fInfos=new Text_t*[200];
00706 }
00707
00708 Int_t TNNTree::NumberOut(Text_t *ttext)
00709 {
00710 Int_t i,n;
00711 Text_t text[200];
00712 strcpy(text,ttext);
00713 n=1;
00714 for (i=0;text[i];i++)if(text[i]==':')n++;
00715 return n;
00716 }
00717
00718 void TNNTree::Decode(Text_t *ttext)
00719 {
00720 Int_t i,j;
00721 TString *number;
00722 Text_t text[200];
00723 strcpy(text,ttext);
00724
00725 j=0;
00726 for (i=0;i<fNOutput;i++)
00727 {
00728 number=new TString();
00729 while(text[j]&&(text[j]!=':')){number->Append(text[j]);j++;}
00730 j++;
00731 sscanf(number->Data(),"%f",&fOutput[i]);
00732 delete number;
00733 }
00734 }
00735
00737
00738
00739
00740
00742
00743 ClassImp(TNNKernel)
00744
00745
00746
00747 TNNKernel::TNNKernel(Text_t *name, Int_t nInput, Text_t *hidden, Int_t nOutput):TNamed(name,"Neural Network")
00748 {
00749
00750
00751 fValues=0;
00752 fErrors=0;
00753 fBiases=0;
00754 fNUnits=0;
00755 fW=0;
00756 fValidTree=0;
00757 fArrayOut=0;
00758 fArrayIn=0;
00759 fTeach=0;
00760 fEventsList=0;
00761 fNWeights=0;
00762 fDW=0;
00763 fDB=0;
00764 fNHiddL=0;
00765
00766 AllocateVW(nInput,hidden,nOutput);
00767
00768 fUseBiases=1.;
00769 fLearnParam=0.2;
00770 fFlatSE=0.;
00771 fMu=0.;
00772 fLowerInitWeight=-1.;
00773 fUpperInitWeight=1.;
00774
00775 fNTrainEvents=0;
00776 fNValidEvents=0;
00777 fNTrainCycles=0;
00778
00779 TDatime temps;
00780 fRandom.SetSeed(temps.Convert());
00781 printf("First Random Seed = %i\n",fRandom.GetSeed());
00782 printf("Neural Network is created : \n");
00783
00784
00785 }
00786
00787 TNNKernel::TNNKernel()
00788 {
00789
00790 fValues=0;
00791 fErrors=0;
00792 fBiases=0;
00793 fNUnits=0;
00794 fW=0;
00795 fValidTree=0;
00796 fArrayOut=0;
00797 fArrayIn=0;
00798 fTeach=0;
00799 fEventsList=0;
00800 fNWeights=0;
00801 fDW=0;
00802 fDB=0;
00803
00804
00805 fUseBiases=1.;
00806 fLearnParam=0.2;
00807 fFlatSE=0.;
00808 fMu=0.;
00809 fLowerInitWeight=-1.;
00810 fUpperInitWeight=1.;
00811 fNHiddL=0;
00812
00813 fNTrainEvents=0;
00814 fNValidEvents=0;
00815 fNTrainCycles=0;
00816
00817 TDatime temps;
00818 fRandom.SetSeed(temps.Convert());
00819 printf("First Random Seed = %i\n",fRandom.GetSeed());
00820 }
00821
00822
00823 TNNKernel::~TNNKernel()
00824 {
00825
00826
00827 DeleteArray();
00828 FreeVW();
00829 if(fEventsList) delete [] fEventsList;
00830 }
00831
00832
00833 void TNNKernel::SetHidden(Text_t *ttext)
00834 {
00835 Int_t i,j;
00836 TString *number;
00837 Text_t text[100];
00838 strcpy(text,ttext);
00839
00840 fNHiddL=1;
00841 for (i=0;text[i];i++)if(text[i]==':')fNHiddL++;
00842 if (fNUnits) delete [] fNUnits;
00843 fNUnits = new Int_t[fNHiddL+2];
00844
00845 j=0;
00846 for (i=1;i<=fNHiddL;i++)
00847 {
00848 number=new TString();
00849 while(text[j]&&(text[j]!=':')){number->Append(text[j]);j++;}
00850 j++;
00851 sscanf(number->Data(),"%i",&fNUnits[i]);
00852 delete number;
00853
00854 }
00855
00856 }
00857
00858
00859 void TNNKernel::FreeVW()
00860 {
00861 Int_t i,l;
00862
00863
00864
00865 if (fValues)
00866 {
00867 for (i=0;i<fNHiddL+2;i++)
00868 {delete [] fValues[i]; delete [] fErrors[i]; delete [] fBiases[i];delete [] fDB[i];}
00869 delete [] fValues; delete [] fErrors; delete [] fBiases;delete [] fDB;
00870 fValues=0;
00871 }
00872
00873
00874
00875 if (fTeach)
00876 {
00877 delete [] fTeach;
00878 fTeach=0;
00879 }
00880
00881
00882
00883 if (fW)
00884 {
00885 for (i=0;i<fNHiddL+1;i++)
00886 {
00887 for(l=0;l<fNUnits[i];l++){delete [] fW[i][l];delete [] fDW[i][l];}
00888 delete [] fW[i];delete [] fDW[i];
00889 }
00890 fW=0;
00891 }
00892
00893
00894
00895 if (fNUnits){ delete [] fNUnits; fNUnits=0;}
00896 }
00897
00898 void TNNKernel::AllocateVW(Int_t nInput, Text_t *hidden, Int_t nOutput)
00899 {
00900 Int_t i,l;
00901
00902 if(fW){printf("free memory first !\n");return;}
00903
00904 SetHidden(hidden);
00905 fNUnits[0]=nInput;
00906 fNUnits[fNHiddL+1]=nOutput;
00907
00908
00909
00910 fValues = new Float_t*[fNHiddL+2];
00911 fErrors = new Double_t*[fNHiddL+2];
00912 fBiases = new Double_t*[fNHiddL+2];
00913 fDB = new Double_t*[fNHiddL+2];
00914
00915 for (i=0;i<fNHiddL+2;i++)
00916 {
00917 fValues[i]=new Float_t[fNUnits[i]];
00918 fErrors[i]=new Double_t[fNUnits[i]];
00919 fBiases[i]=new Double_t[fNUnits[i]];
00920 fDB[i]=new Double_t[fNUnits[i]];
00921 }
00922
00923
00924
00925 fTeach=new Float_t[fNUnits[fNHiddL+1]];
00926
00927
00928
00929 fW=new Double_t**[fNHiddL+1];
00930 fDW=new Double_t**[fNHiddL+1];
00931
00932 for (i=0;i<fNHiddL+1;i++)
00933 {
00934 fW[i]=new Double_t*[fNUnits[i]];
00935 fDW[i]=new Double_t*[fNUnits[i]];
00936 for (l=0;l<fNUnits[i];l++)
00937 {
00938 fW[i][l]=new Double_t[fNUnits[i+1]];
00939 fDW[i][l]=new Double_t[fNUnits[i+1]];
00940 }
00941 }
00942
00943 }
00944
00945 void TNNKernel::SetKernel(Int_t nInput, Text_t *hidden, Int_t nOutput)
00946 {
00947 FreeVW();
00948 AllocateVW(nInput,hidden,nOutput);
00949 }
00950
00951 void TNNKernel::SetLearnParam(Double_t learnParam,Double_t fse,Double_t mu)
00952 {
00953
00954
00955
00956
00957 fLearnParam=fabs(learnParam);
00958 fFlatSE=fabs(fse);
00959 fMu=fabs(mu);
00960
00961 if (fLearnParam>1.0) printf("Warning : %6.2f is not an usual value\n",fLearnParam);
00962 if (fLearnParam==0.0) printf("Warning : 0 is a stupid value\n");
00963 printf("Learning Parameter set to : %6.2f\n",fLearnParam);
00964 printf("Flat Spot elimination value set to : %6.2f\n",fFlatSE);
00965 printf("Momentum set to : %6.2f\n",fMu);
00966 }
00967
00968 void TNNKernel::SetInitParam(Float_t lowerInitWeight, Float_t upperInitWeight)
00969 {
00970
00971 Float_t temp;
00972
00973 fLowerInitWeight=lowerInitWeight;
00974 fUpperInitWeight=upperInitWeight;
00975 if (fLowerInitWeight>fUpperInitWeight)
00976 {
00977 temp=fUpperInitWeight;
00978 fUpperInitWeight=fLowerInitWeight;
00979 fLowerInitWeight=temp;
00980 }
00981 if (fLowerInitWeight==fUpperInitWeight)printf("Warning : the weights initialisation bounds are equal !\n");
00982 printf("Init Parameters set to :\n");
00983 printf(" --> Lower bound = %6.2f\n",fLowerInitWeight);
00984 printf(" --> Upper bound = %6.2f\n",fUpperInitWeight);
00985
00986 }
00987
00988
00989 Float_t TNNKernel::Alea()
00990 {
00991 return fLowerInitWeight+fRandom.Rndm()*(fUpperInitWeight-fLowerInitWeight);
00992 }
00993
00994 void TNNKernel::Init()
00995 {
00996
00997
00998
00999
01000
01001 Int_t i,l,c;
01002
01003 if(!fW){printf("allocate memory first !\n");return;}
01004
01005
01006
01007 for (i=0;i<fNHiddL+1;i++)
01008 for (l=0;l<fNUnits[i];l++)
01009 for (c=0;c<fNUnits[i+1];c++) fW[i][l][c]=(Double_t)Alea();
01010
01011 for(i=0;i<fNHiddL+1;i++)for(l=0;l<fNUnits[i];l++)for(c=0;c<fNUnits[i+1];c++)
01012 fDW[i][l][c]=0.;
01013
01014
01015
01016 for (i=0;i<fNHiddL+2;i++)
01017 for (l=0;l<fNUnits[i];l++) fBiases[i][l]=(Double_t)(Alea())*fUseBiases;
01018
01019 for(i=0;i<fNHiddL+2;i++)for(l=0;l<fNUnits[i];l++)fDB[i][l]=0.;
01020
01021
01022 fNTrainCycles=0;
01023 printf("Initialisation done\n");
01024 }
01025
01026 void TNNKernel::PrintS()
01027 {
01028
01029 Int_t i,l,c;
01030
01031 if(!fW){printf("no unit !\n");return;}
01032
01033 printf("+++++++++ Neural Network %s ++++++++++++\n",GetName());
01034 for(i=0;i<fNHiddL+2;i++)printf("Layer %1i contains %2i units\n",i,fNUnits[i]);
01035
01036 if(fUseBiases)printf(">>>>>>> Biases USED");else printf(">>>>>>>Biases DUMMY");
01037
01038 printf("\n ---------- Biases ---------- \n");
01039 Int_t maxl=0;
01040 for(i=0;i<fNHiddL+2;i++)if(fNUnits[i]>=maxl)maxl=fNUnits[i];
01041 for(i=0;i<fNHiddL+2;i++)printf(" %1i | ",i);printf("\n");
01042 for(i=0;i<fNHiddL+2;i++)printf("--------|-");printf("\n");
01043 for(l=0;l<maxl;l++)
01044 {
01045 for(i=0;i<fNHiddL+2;i++)
01046 if(l<fNUnits[i])printf("%6.2f | ",fBiases[i][l]);else printf(" | ");
01047 printf("\n");
01048 }
01049
01050
01051 printf("\n ---------- Weights ----------- \n");
01052 for(i=0;i<fNHiddL+1;i++)
01053 {
01054 printf(" From %1i to %1i : \n",i,i+1);
01055 printf("%2i |",i);for(l=0;l<fNUnits[i];l++)printf(" %3i |",l);printf("\n");
01056 printf("===|");for(l=0;l<fNUnits[i];l++)printf("-------");printf("\n");
01057 printf("%2i |",i+1);for(l=0;l<fNUnits[i];l++)printf("-------");printf("\n");
01058 for(c=0;c<fNUnits[i+1];c++)
01059 {
01060 printf("%2i |",c);
01061 for(l=0;l<fNUnits[i];l++)printf("%6.2f|",fW[i][l][c]);
01062 printf("\n");
01063 }
01064 printf("\n");
01065 }
01066
01067 printf("\n");
01068 printf("Learning parameter = %6.2f\n",fLearnParam);
01069 printf("Flat Spot elimination value = %6.2f\n",fFlatSE);
01070 printf("Momentum = %6.2f\n",fMu);
01071 printf("Lower initialisation weight = %6.2f\n",fLowerInitWeight);
01072 printf("Upper initialisation weight = %6.2f\n",fUpperInitWeight);
01073 printf("Number of events for training = %5i\n",fNTrainEvents);
01074 printf("Number of events for validation = %5i\n",fNValidEvents);
01075 printf("Number of cycles done = %3i\n",fNTrainCycles);
01076 printf("+++++++++++++++++++++++++++++++++++++++++++++++\n");
01077
01078 }
01079
01080 void TNNKernel::Forward()
01081 {
01082
01083
01084 Int_t i,l,c;
01085 Double_t sum;
01086
01087 if(!fW){printf("no unit !\n");return;}
01088
01089 for (i=0;i<fNHiddL+1;i++)
01090 for (c=0;c<fNUnits[i+1];c++)
01091 {
01092 sum=0.;
01093 for(l=0;l<fNUnits[i];l++)sum+=fW[i][l][c]*(Double_t)fValues[i][l];
01094 fValues[i+1][c]=(Float_t)Sigmoide(sum+fBiases[i+1][c]*fUseBiases);
01095 }
01096 }
01097
01098 void TNNKernel::LearnBackward()
01099 {
01100
01101
01102 if(fNTrainEvents<1){printf("No event to train !!!\n");return;}
01103 if(!fW){printf("no unit !\n");return;}
01104
01105 Int_t i,l,c;
01106 Double_t delta;
01107
01108
01109
01110 for (i=0;i<fNHiddL+1;i++)
01111 for (l=0;l<fNUnits[i];l++)
01112 for(c=0;c<fNUnits[i+1];c++)
01113 {
01114 delta=fLearnParam*fErrors[i+1][c]*(Double_t)fValues[i][l]+fMu*fDW[i][l][c];
01115 fW[i][l][c]+=delta;
01116 fDW[i][l][c]=delta;
01117 }
01118
01119 if(((Bool_t)fUseBiases))
01120 {
01121 for (i=1;i<fNHiddL+2;i++)
01122 for (l=0;l<fNUnits[i];l++)
01123 {
01124 delta=fLearnParam*fErrors[i][l]+fMu*fDB[i][l];
01125 fBiases[i][l]+=delta;
01126 fDB[i][l]=delta;
01127 }
01128 }
01129 }
01130
01131 Double_t TNNKernel::Error()
01132 {
01133
01134
01135 Int_t i,l,c;
01136 Double_t sum,error=0,errorOneUnit;
01137 if(!fW){printf("no unit !\n");return 0;}
01138
01139
01140
01141 for(l=0;l<fNUnits[fNHiddL+1];l++)
01142 {
01143 errorOneUnit=(Double_t)(fTeach[l]-fValues[fNHiddL+1][l]);
01144 error+=fabs(errorOneUnit);
01145 fErrors[fNHiddL+1][l]=errorOneUnit*(SigPrim(fValues[fNHiddL+1][l])+fFlatSE);
01146 }
01147 error=error/(Double_t)fNUnits[fNHiddL+1];
01148
01149
01150
01151 for(i=fNHiddL;i==1;i--)
01152 {
01153 for(l=0;l<fNUnits[i];l++)
01154 {
01155 sum=0.;
01156 for(c=0;c<fNUnits[i+1];c++) sum+=fW[i][l][c]*fErrors[i+1][c];
01157 fErrors[i][l]=sum*(SigPrim((Double_t)fValues[i][l])+fFlatSE);
01158 }
01159 }
01160
01161 return error;
01162 }
01163
01164 Double_t TNNKernel::ErrorO()
01165 {
01166
01167
01168
01169
01170 Int_t l;
01171 Double_t error=0;
01172 if(!fW){printf("no unit !\n");return 0;}
01173 for(l=0;l<fNUnits[fNHiddL+1];l++)
01174 error+=fabs((Double_t)(fTeach[l]-fValues[fNHiddL+1][l]));
01175
01176 error=error/(Double_t)fNUnits[fNHiddL+1];
01177
01178 return error;
01179
01180 }
01181
01182 Double_t TNNKernel::TrainOneCycle()
01183 {
01184
01185
01186
01187
01188
01189
01190
01191 if(fNTrainEvents<1){printf("No event to train !!!\n");return 0.;}
01192 if(!fW){printf("no unit !\n");return 0.;}
01193
01194 Int_t i;
01195 Double_t error=0.;
01196
01197 for(i=0;i<fNTrainEvents;i++)
01198 {
01199 GetArrayEvt(fEventsList[i]);
01200 Forward();
01201 error+=Error();
01202 LearnBackward();
01203 }
01204
01205 fNTrainCycles++;
01206 error=error/(Double_t)fNTrainEvents;
01207
01208
01209 return error;
01210 }
01211
01212 Double_t TNNKernel::Valid()
01213 {
01214
01215
01216
01217
01218
01219 if(fNValidEvents<1) return 0.;
01220
01221
01222
01223 Double_t error=0.;
01224 for (Int_t j=0;j<fNValidEvents;j++)
01225 {
01226 fValidTree->GetEvent(GetInputAdr(),GetTeachAdr(),j);
01227 error+=GoThrough();
01228 }
01229 error=error/(Double_t)fNValidEvents;
01230 return error;
01231 }
01232
01233 void TNNKernel::TrainNCycles(TNNControlE *conte, Int_t period, Int_t nCycles)
01234 {
01235
01236
01237
01238 if(!conte){printf("no controller !\n");return;}
01239 Float_t errt,errv;
01240 for(Int_t i=0;i<nCycles;i++)
01241 {
01242 Mix();
01243 errt=(Float_t)TrainOneCycle();
01244 errv=(Float_t)Valid();
01245 printf("cycle %3i > train : %7.3f",fNTrainCycles,errt);
01246 if(fNValidEvents)printf(" and valid : %7.3f \n",errv);else printf("\n");
01247 if(!(i%period)||i==(nCycles-1))
01248 {
01249 conte->AddTP(fNTrainCycles,errt);
01250 conte->AddVP(fNTrainCycles,errv);
01251 conte->UpdateG();
01252 }
01253
01254 }
01255
01256 }
01257
01258 void TNNKernel::Export(Text_t *fileName)
01259 {
01260
01261
01262
01263
01264 Int_t i,l,c;
01265
01266 if(!fW){printf("no unit !\n");return;}
01267
01268 FILE *file;
01269 file=fopen(fileName,"w");
01270
01271 fprintf(file,"%3i\n",fNHiddL);
01272 for(i=0;i<fNHiddL+2;i++)fprintf(file,"%3i\n",fNUnits[i]);
01273
01274 for(i=0;i<fNHiddL+2;i++)
01275 for(l=0;l<fNUnits[i];l++)fprintf(file,"%8.4f\n",fBiases[i][l]);
01276
01277 for(i=0;i<fNHiddL+1;i++)
01278 for(l=0;l<fNUnits[i];l++)
01279 for(c=0;c<fNUnits[i+1];c++)fprintf(file,"%8.4f\n",fW[i][l][c]);
01280
01281 fprintf(file,"%5i\n",fNTrainCycles);
01282 fprintf(file,"%2.0f\n",fUseBiases);
01283
01284 fclose(file);
01285 }
01286
01287 void TNNKernel::Import(Text_t *fileName)
01288 {
01289
01290
01291
01292
01293 Int_t i,l,c,newI,newHL,newO;
01294 Text_t hidden[100],piece[5];
01295 FILE *file;
01296 file=fopen(fileName,"r");
01297
01298 fscanf(file,"%3i",&newHL);
01299 fscanf(file,"%3i",&newI);
01300 strcpy(hidden,"");
01301 for(i=1;i<newHL;i++)
01302 {fscanf(file,"%s",piece);strcat(hidden,piece);strcat(hidden,":");}
01303 fscanf(file,"%s",piece);strcat(hidden,piece);
01304 fscanf(file,"%3i",&newO);
01305
01306 printf("New NN set to : %3i %s %3i \n",newI,hidden,newO);
01307 FreeVW();
01308 AllocateVW(newI,hidden,newO);
01309 Float_t tmpfl;
01310 for(i=0;i<fNHiddL+2;i++)
01311 for(l=0;l<fNUnits[i];l++){fDB[i][l]=0.;fscanf(file,"%f",&tmpfl);*(fBiases[i]+l)=(Double_t)tmpfl;}
01312
01313 for(i=0;i<fNHiddL+1;i++)
01314 for(l=0;l<fNUnits[i];l++)
01315 for(c=0;c<fNUnits[i+1];c++){fDW[i][l][c]=0.;fscanf(file,"%f",&tmpfl);*(fW[i][l]+c)=(Double_t)tmpfl;}
01316
01317
01318 fscanf(file,"%5i",&fNTrainCycles);
01319 fscanf(file,"%f",&tmpfl);fUseBiases=(Double_t)tmpfl;
01320
01321 fclose(file);
01322 }
01323
01324 void TNNKernel::Mix()
01325 {
01326
01327
01328
01329
01330 Int_t i,i1,i2;
01331 Int_t temp;
01332 for (i=0;i<3*fNTrainEvents;i++)
01333 {
01334 i1=(Int_t)(fRandom.Rndm()*(Float_t)fNTrainEvents);
01335 i2=(Int_t)(fRandom.Rndm()*(Float_t)fNTrainEvents);
01336 temp=fEventsList[i1];
01337 fEventsList[i1]=fEventsList[i2];
01338 fEventsList[i2]=temp;
01339 }
01340
01341
01342
01343 }
01344
01345 void TNNKernel::SetArraySize(Int_t size)
01346 {
01347 DeleteArray();
01348 if (fEventsList) delete [] fEventsList;
01349 if(!size)return;
01350 Int_t i;
01351 fNTrainEvents=size;
01352 fArrayIn = new Float_t*[fNTrainEvents];
01353 for (i=0;i<fNTrainEvents;i++) fArrayIn[i] = new Float_t[fNUnits[0]];
01354
01355 fArrayOut = new Float_t*[fNTrainEvents];
01356 for (i=0;i<fNTrainEvents;i++) fArrayOut[i] = new Float_t[fNUnits[fNHiddL+1]];
01357
01358 fEventsList = new Int_t[fNTrainEvents];
01359 for (i=0;i<fNTrainEvents;i++)fEventsList[i]=i;
01360 }
01361
01362 void TNNKernel::DeleteArray()
01363 {
01364 Int_t i;
01365
01366 if(fArrayIn)
01367 {
01368 for (i=0;i<fNTrainEvents;i++)delete [] fArrayIn[i];
01369 delete [] fArrayIn;
01370 fArrayIn=0;
01371 }
01372
01373 if(fArrayOut)
01374 {
01375 for (i=0;i<fNTrainEvents;i++)delete [] fArrayOut[i];
01376 delete [] fArrayOut;
01377 fArrayOut=0;
01378 }
01379
01380 }
01381
01382 void TNNKernel::SetTrainTree(TNNTree *t)
01383 {
01384
01385
01386
01387
01388 if(!t){printf("no tree !\n");return;}
01389 Int_t i;
01390
01391
01392
01393 SetArraySize((Int_t)(t->GetTree()->GetEntries()));
01394 printf(" nbr evts for training : %i \n",GetNTrainEvents());
01395
01396
01397
01398
01399
01400
01401
01402 for (i=0;i<(Int_t)(t->GetTree()->GetEntries());i++)
01403 {
01404 t->GetEvent(GetInputAdr(),GetTeachAdr(),i);
01405 Fill(i);
01406 }
01407
01408 }
01409
01410 void TNNKernel::SetValidTree(TNNTree *t)
01411 {
01412
01413
01414
01415
01416 if(!t){printf("no tree !\n");return;}
01417 fValidTree=t;
01418 fNValidEvents=(Int_t)(t->GetTree()->GetEntries());
01419 }
01420
01422
01423
01424
01425
01426
01428
01429
01430 ClassImp(TNNControlE)
01431
01432
01433 TNNControlE::TNNControlE():TCanvas("NN Errors","NN Errors",10,10,550,430)
01434 {
01435
01436 SetFillColor(41);
01437 SetGridx();
01438 SetGridy();
01439 GetFrame()->SetFillColor(21);
01440 GetFrame()->SetBorderSize(12);
01441
01442 fXT = new Float_t[50];
01443 fYT = new Float_t[50];
01444 fXV = new Float_t[50];
01445 fYV = new Float_t[50];
01446
01447 fGraphT = new TGraph();
01448 fGraphT->SetFillColor(19);
01449 fGraphT->SetLineColor(3);
01450 fGraphT->SetLineWidth(4);
01451 fGraphT->SetMarkerColor(3);
01452 fGraphT->SetMarkerStyle(21);
01453 fGraphT->SetTitle("training");
01454
01455 fGraphV = new TGraph();
01456 fGraphV->SetFillColor(19);
01457 fGraphV->SetLineColor(5);
01458 fGraphV->SetLineWidth(4);
01459 fGraphV->SetMarkerColor(5);
01460 fGraphV->SetMarkerStyle(20);
01461 fGraphV->SetTitle("validation");
01462
01463 fNT=0;
01464 fNV=0;
01465
01466 }
01467
01468 TNNControlE::~TNNControlE()
01469 {
01470
01471 if(fGraphV){fGraphV->Delete();fGraphV=0;}
01472 if(fGraphT){fGraphT->Delete();fGraphT=0;}
01473 delete [] fXT;
01474 delete [] fYT;
01475 delete [] fXV;
01476 delete [] fYV;
01477 }
01478
01479 void TNNControlE::UpdateG()
01480 {
01481
01482 cd();Clear();
01483
01484 if(!fNT)return;
01485 fGraphT->DrawGraph(fNT,fXT,fYT,"ALP");
01486
01487 if (fNV)
01488 {
01489 fGraphV->DrawGraph(fNV,fXV,fYV,"LP");
01490 DrawT("Validation",0.7,0.7,0.,5);
01491 }
01492 DrawT("Training",0.7, 0.8, 0., 3);
01493 DrawT("Number of cycles",0.35, 0.015, 0., 2);
01494 DrawT("NN Output Error",0.03,0.35,90.,2);
01495 Update();
01496 }
01497
01498 void TNNControlE::AddTP(Int_t n,Float_t e)
01499 {
01500
01501 fNT++;
01502 fXT[fNT-1]=(Float_t)n;fYT[fNT-1]=e;
01503 if((fNT%50))return;
01504
01505
01506 Int_t i;
01507 Float_t *x=new Float_t[fNT],*y=new Float_t[fNT];
01508
01509 for(i=0;i<fNT;i++){x[i]=fXT[i];y[i]=fYT[i];}
01510 delete [] fXT;fXT = new Float_t[fNT+50];
01511 delete [] fYT;fYT = new Float_t[fNT+50];
01512 for(i=0;i<fNT;i++) {fXT[i]=x[i];fYT[i]=y[i];}
01513
01514 delete [] x;delete [] y;
01515 }
01516
01517
01518 void TNNControlE::AddVP(Int_t n,Float_t e)
01519 {
01520
01521 fNV++;
01522 fXV[fNV-1]=(Float_t)n;fYV[fNV-1]=e;
01523 if((fNV%50))return;
01524
01525
01526 Int_t i;
01527 Float_t *x=new Float_t[fNV],*y=new Float_t[fNV];
01528
01529 for(i=0;i<fNV;i++){x[i]=fXV[i];y[i]=fYV[i];}
01530 delete [] fXV;fXV = new Float_t[fNV+50];
01531 delete [] fYV;fYV = new Float_t[fNV+50];
01532 for(i=0;i<fNV;i++) {fXV[i]=x[i];fYV[i]=y[i];}
01533
01534 delete [] x;delete [] y;
01535 }
01536
01538
01539
01540
01542
01543 ClassImp(TNNUtils)
01544
01545
01546 TNNUtils::~TNNUtils()
01547 {
01548
01549 }
01550
01551
01552 Int_t TNNUtils::UpdateNewBranch()
01553 {
01554
01555
01556
01557
01558
01559
01560 if(!fT){printf("no tree associated!\n");return 0;}
01561 if(!fK){printf("no kernel associated!\n");return 0;}
01562 if(!fFName){printf("no file associated to contain the new branch!\n");return 0;}
01563 if(fOAdr==(UInt_t)(fK->GetOutputAdr())&&fNOut==fK->GetNOutput())return 1;
01564
01565 if(fB) delete fB;
01566
01567 Text_t *varname,*noutunits;
01568
01569 noutunits=new Text_t[4];sprintf(noutunits,"%i",fK->GetNOutput());
01570 varname=new Text_t[14];strcpy(varname,"");
01571 strcat(varname,"OutputU[");strcat(varname,noutunits);strcat(varname,"]/F");
01572 delete [] noutunits;
01573 fB=(fT->Branch("OutputU",fK->GetOutputAdr(),varname));
01574 delete [] varname;
01575 fB->SetFile(fFName);
01576 fOAdr=(UInt_t)fK->GetOutputAdr();
01577 fNOut=fK->GetNOutput();
01578 return 1;
01579 }
01580
01581
01582 Int_t TNNUtils::FillNB()
01583 {
01584
01585
01586
01587
01588 if(!UpdateNewBranch())return 0;
01589 if(fForm.GetNValues()!=fK->GetNInput()){printf("input mismatch\n");return 0;}
01590
01591 fB->Reset();
01592 for(Int_t i=0;i<(Int_t)fT->GetEntries();i++)
01593 {
01594 fForm.Find(i,fK->GetInputAdr());
01595 fK->GoThrough();
01596 fB->Fill();
01597 }
01598 return 1;
01599 }
01600
01601
01602 TH1F* TNNUtils::HIntegral(TH1F *hOrig, Int_t efficiency, Text_t *name, Text_t *title)
01603 {
01604
01605
01606
01607
01608
01609
01610 if (!hOrig){printf("No input histo!\n");return 0;}
01611 Int_t i,nBins;
01612 nBins=hOrig->GetNbinsX();
01613
01614 TH1F *histo= new TH1F(name,title,nBins,0.,1.);
01615 if(efficiency)
01616 for(i=0;i<=nBins+1;i++)histo->SetBinContent(i,hOrig->Integral(i,nBins+1));
01617 else
01618 {
01619 for(i=1;i<=nBins+1;i++)histo->SetBinContent(i,hOrig->Integral(0,i-1));
01620 histo->SetBinContent(0,0);
01621 }
01622 return histo;
01623 }
01624
01625 TGraph* TNNUtils::XY(TH1F *hX, TH1F *hY, Int_t color)
01626 {
01627
01628
01629
01630
01631 if (!hX||!hY){printf("Missing input histo!\n");return 0;}
01632 Int_t nBX=hX->GetNbinsX();
01633 Int_t nBY=hY->GetNbinsX();
01634 if(nBX!=nBY){printf("histos don't martch!\n");return 0;}
01635
01636 Float_t *X=new Float_t[nBX],*Y=new Float_t[nBY];
01637 for(Int_t i=1;i<=nBX;i++)
01638 {
01639 X[i-1]=hX->GetBinContent(i);
01640 Y[i-1]=hY->GetBinContent(i);
01641 }
01642
01643 TGraph* g=new TGraph(nBX,X,Y);
01644 g->SetLineColor(color);
01645 g->SetLineWidth(4);
01646 return g;
01647
01648 }
01649
Generated on Sun Mar 15 04:54:21 2009 for StRoot by
1.3.7