using namespace std; #include #include #include #include #include #include #include void mlpField(int idx=1) { if (!gROOT->GetClass("TMultiLayerPerceptron")) { gSystem->Load("libMLP"); } // Put data in a TTree Double_t r,z,phi; Double_t Br,Bz; TTree* tree1 = new TTree("Aphi","phi component of the potential"); tree1->Branch("r",&r,"r/D"); tree1->Branch("z",&z,"z/D"); tree1->Branch("phi",&phi,"phi/D"); ifstream input1("aphirect1caso.TXT"); while( input1 >> r >> z >> phi) tree1->Fill(); input1.close(); TTree* tree2 = new TTree("Bfield_r","radial component of the B field"); tree2->Branch("r",&r,"r/D"); tree2->Branch("z",&z,"z/D"); tree2->Branch("Br",&Br,"Br/D"); ifstream input2("brrect1caso.TXT"); while (input2 >> r >> z >> Br) tree2->Fill(); input2.close(); TTree* tree3 = new TTree("Bfield_z","z component of the B field"); tree3->Branch("r",&r,"r/D"); tree3->Branch("r",&r,"r/D"); tree3->Branch("z",&z,"z/D"); tree3->Branch("Bz",&Bz,"Bz/D"); ifstream input3("bzrect2caso.TXT"); while (input3 >> r >> z >> Bz) tree3->Fill(); input3.close(); Int_t n1 = (Int_t)tree1->GetEntries(); Int_t n2 = (Int_t)tree2->GetEntries(); Int_t n3 = (Int_t)tree3->GetEntries(); switch(idx) { case 1: { // instanciates the first NN // all events are put in both event lists TMultiLayerPerceptron network1("r,z:8:@phi",tree1,"1","1"); network1.Train(1000,"textgraph,update=10"); network1.Export("Aphi_fct"); // this can be used later network1.Export("Aphi_fct","Python"); // this can be used later //plots and compares the results ifstream input1b("aphirect1caso.TXT"); Float_t* r1 = new Float_t[n1]; Float_t* z1 = new Float_t[n1]; Float_t* phi1 = new Float_t[n1]; Float_t* NN1 = new Float_t[n1]; Int_t i=0; Double_t in[2] = {0}; while( input1b >> r1[i] >> z1[i] >> phi1[i]) { in[0] = r1[i]; in[1] = z1[i]; NN1[i] = network1.Evaluate(0,in); i++; } input1b.close(); TCanvas* c = new TCanvas; c->Divide(2,1); c->cd(1); TGraph2D *g1a = new TGraph2D(n1, r1, z1, phi1); g1a->SetNameTitle("g1a","Original"); g1a->Draw("TRI"); c->cd(2); TGraph2D *g1b = new TGraph2D(n1, r1, z1, NN1); g1b->SetNameTitle("g1b","Neural Net"); g1b->Draw("TRI"); break; } case 2: { // instanciates the second NN // 1/2 in test sample, 1/2 in training sample TMultiLayerPerceptron network2("r,z:10:@Br",tree2); network2.Train(1000,"textgraph,update=10"); network2.Export("Br_fct"); // this can be used later //plots and compares the results ifstream input2b("brrect1caso.TXT"); Float_t* r2 = new Float_t[n2]; Float_t* z2 = new Float_t[n2]; Float_t* Br2 = new Float_t[n2]; Float_t* NN2 = new Float_t[n2]; Int_t i=0; Double_t in[2] = {0}; while( input2b >> r2[i] >> z2[i] >> Br2[i]) { in[0] = r2[i]; in[1] = z2[i]; NN2[i] = network2.Evaluate(0,in); i++; } input2b.close(); TCanvas* c = new TCanvas; c->Divide(2,1); c->cd(1); TGraph2D *g2a = new TGraph2D(n2, r2, z2, Br2); g2a->SetNameTitle("g2a","Original"); g2a->Draw("TRI"); c->cd(2); TGraph2D *g2b = new TGraph2D(n2, r2, z2, NN2); g2b->SetNameTitle("g2b","Neural Net"); g2b->Draw("TRI"); break; } case 3: { // instanciates the third NN // 2/3, 1/3 TMultiLayerPerceptron network3("r,z:20:@Bz",tree3,"Entry$%3"); network3.Train(1000,"textgraph,update=10"); network3.Export("Bz_fct"); // this can be used later //plots and compares the results ifstream input3b("bzrect2caso.TXT"); Float_t* r3 = new Float_t[n3]; Float_t* z3 = new Float_t[n3]; Float_t* Bz3 = new Float_t[n3]; Float_t* NN3 = new Float_t[n3]; Int_t i=0; Double_t in[2] = {0}; while( input3b >> r3[i] >> z3[i] >> Bz3[i]) { in[0] = r3[i]; in[1] = z3[i]; NN3[i] = network3.Evaluate(0,in); i++; } input3b.close(); TCanvas* c = new TCanvas; c->Divide(2,1); c->cd(1); TGraph2D *g3a = new TGraph2D(n3, r3, z3, Bz3); g3a->SetNameTitle("g3a","Original"); g3a->Draw("TRI"); c->cd(2); TGraph2D *g3b = new TGraph2D(n3, r3, z3, NN3); g3b->SetNameTitle("g3b","Neural Net"); g3b->Draw("TRI"); break; } } }