SND@LHC Software
Loading...
Searching...
No Matches
shipDigiReco.py
Go to the documentation of this file.
1from __future__ import print_function
2from __future__ import division
3import os,ROOT,shipVertex,shipDet_conf
4import global_variables
5if global_variables.realPR == "Prev":
6 import shipPatRec_prev as shipPatRec # The previous version of the pattern recognition
7else: import shipPatRec
8import shipunit as u
9import rootUtils as ut
10from array import array
11import sys
12from math import fabs
13stop = ROOT.TVector3()
14start = ROOT.TVector3()
15
17 " convert FairSHiP MC hits / digitized hits to measurements"
18 def __init__(self,fout,fgeo):
19 self.fn = ROOT.TFile.Open(fout,'update')
20 self.sTree = self.fn.cbmsim
21 if self.sTree.GetBranch("FitTracks"):
22 print("remove RECO branches and rerun reconstruction")
23 self.fn.Close()
24 # make a new file without reco branches
25 f = ROOT.TFile(fout)
26 sTree = f.cbmsim
27 if sTree.GetBranch("FitTracks"): sTree.SetBranchStatus("FitTracks",0)
28 if sTree.GetBranch("goodTracks"): sTree.SetBranchStatus("goodTracks",0)
29 if sTree.GetBranch("VetoHitOnTrack"): sTree.SetBranchStatus("VetoHitOnTrack",0)
30 if sTree.GetBranch("Particles"): sTree.SetBranchStatus("Particles",0)
31 if sTree.GetBranch("fitTrack2MC"): sTree.SetBranchStatus("fitTrack2MC",0)
32 if sTree.GetBranch("EcalClusters"): sTree.SetBranchStatus("EcalClusters",0)
33 if sTree.GetBranch("EcalReconstructed"): sTree.SetBranchStatus("EcalReconstructed",0)
34 if sTree.GetBranch("Pid"): sTree.SetBranchStatus("Pid",0)
35 if sTree.GetBranch("Digi_StrawtubesHits"): sTree.SetBranchStatus("Digi_StrawtubesHits",0)
36 if sTree.GetBranch("Digi_SBTHits"): sTree.SetBranchStatus("Digi_SBTHits",0)
37 if sTree.GetBranch("digiSBT2MC"): sTree.SetBranchStatus("digiSBT2MC",0)
38 if sTree.GetBranch("Digi_TimeDetHits"): sTree.SetBranchStatus("Digi_TimeDetHits",0)
39 if sTree.GetBranch("Digi_UpstreamTaggerHits"): sTree.SetBranchStatus("Digi_UpstreamTaggerHits",0)
40 if sTree.GetBranch("Digi_MuonHits"): sTree.SetBranchStatus("Digi_MuonHits",0)
41
42 rawFile = fout.replace("_rec.root","_raw.root")
43 recf = ROOT.TFile(rawFile,"recreate")
44 newTree = sTree.CloneTree(0)
45 for n in range(sTree.GetEntries()):
46 sTree.GetEntry(n)
47 rc = newTree.Fill()
48 sTree.Clear()
49 newTree.AutoSave()
50 f.Close()
51 recf.Close()
52 os.system('cp '+rawFile +' '+fout)
53 self.fn = ROOT.TFile(fout,'update')
54 self.sTree = self.fn.cbmsim
55# check that all containers are present, otherwise create dummy version
57 branch_class = {"vetoPoint":"vetoPoint","ShipRpcPoint":"ShipRpcPoint","TargetPoint":"TargetPoint",\
58 "strawtubesPoint":"strawtubesPoint","EcalPointLite":"ecalPoint",\
59 "TimeDetPoint":"TimeDetPoint","muonPoint":"muonPoint","UpstreamTaggerPoint":"UpstreamTaggerPoint"}
60 for x in branch_class:
61 if not self.sTree.GetBranch(x):
62 self.dummyContainers[x+"_array"] = ROOT.TClonesArray(branch_class[x])
63 self.dummyContainers[x] = self.sTree.Branch(x,self.dummyContainers[x+"_array"],32000,-1)
64 setattr(self.sTree,x,self.dummyContainers[x+"_array"])
65 self.dummyContainers[x].Fill()
66#
67 if self.sTree.GetBranch("GeoTracks"): self.sTree.SetBranchStatus("GeoTracks",0)
68# prepare for output
69# event header
70 self.header = ROOT.FairEventHeader()
71 self.eventHeader = self.sTree.Branch("ShipEventHeader",self.header,32000,-1)
72# fitted tracks
73 self.fGenFitArray = ROOT.TClonesArray("genfit::Track")
74 self.fGenFitArray.BypassStreamer(ROOT.kFALSE)
75 self.fitTrack2MC = ROOT.std.vector('int')()
76 self.goodTracksVect = ROOT.std.vector('int')()
77 self.mcLink = self.sTree.Branch("fitTrack2MC",self.fitTrack2MC,32000,-1)
78 self.fitTracks = self.sTree.Branch("FitTracks", self.fGenFitArray,32000,-1)
79 self.goodTracksBranch = self.sTree.Branch("goodTracks",self.goodTracksVect,32000,-1)
80 self.fTrackletsArray = ROOT.TClonesArray("Tracklet")
81 self.Tracklets = self.sTree.Branch("Tracklets", self.fTrackletsArray,32000,-1)
82#
83 self.digiStraw = ROOT.TClonesArray("strawtubesHit")
84 self.digiStrawBranch = self.sTree.Branch("Digi_StrawtubesHits",self.digiStraw,32000,-1)
85 self.digiSBT = ROOT.TClonesArray("vetoHit")
86 self.digiSBTBranch=self.sTree.Branch("Digi_SBTHits",self.digiSBT,32000,-1)
87 self.vetoHitOnTrackArray = ROOT.TClonesArray("vetoHitOnTrack")
88 self.vetoHitOnTrackBranch=self.sTree.Branch("VetoHitOnTrack",self.vetoHitOnTrackArray,32000,-1)
89 self.digiSBT2MC = ROOT.std.vector('std::vector< int >')()
90 self.mcLinkSBT = self.sTree.Branch("digiSBT2MC",self.digiSBT2MC,32000,-1)
91 self.digiTimeDet = ROOT.TClonesArray("TimeDetHit")
92 self.digiTimeDetBranch=self.sTree.Branch("Digi_TimeDetHits",self.digiTimeDet,32000,-1)
93 self.digiUpstreamTagger = ROOT.TClonesArray("UpstreamTaggerHit")
94 self.digiUpstreamTaggerBranch=self.sTree.Branch("Digi_UpstreamTaggerHits",self.digiUpstreamTagger,32000,-1)
95 self.digiMuon = ROOT.TClonesArray("muonHit")
96 self.digiMuonBranch=self.sTree.Branch("Digi_muonHits",self.digiMuon,32000,-1)
97# for the digitizing step
98 self.v_drift = global_variables.modules["Strawtubes"].StrawVdrift()
99 self.sigma_spatial = global_variables.modules["Strawtubes"].StrawSigmaSpatial()
100# optional if present, splitcalCluster
101 if self.sTree.GetBranch("splitcalPoint"):
102 self.digiSplitcal = ROOT.TClonesArray("splitcalHit")
103 self.digiSplitcalBranch=self.sTree.Branch("Digi_SplitcalHits",self.digiSplitcal,32000,-1)
104 self.recoSplitcal = ROOT.TClonesArray("splitcalCluster")
105 self.recoSplitcalBranch=self.sTree.Branch("Reco_SplitcalClusters",self.recoSplitcal,32000,-1)
106
107# setup ecal reconstruction
108 self.caloTasks = []
109 if self.sTree.GetBranch("EcalPoint") and not self.sTree.GetBranch("splitcalPoint"):
110# Creates. exports and fills calorimeter structure
111 dflag = 10 if global_variables.debug else 0
112 ecalGeo = global_variables.ecalGeoFile + 'z' + str(global_variables.ShipGeo.ecal.z) + ".geo"
113 if not ecalGeo in os.listdir(os.environ["FAIRSHIP"]+"/geometry"):
114 shipDet_conf.makeEcalGeoFile(global_variables.ShipGeo.ecal.z, global_variables.ShipGeo.ecal.File)
115 ecalFiller=ROOT.ecalStructureFiller("ecalFiller", dflag,ecalGeo)
116 ecalFiller.SetUseMCPoints(ROOT.kTRUE)
117 ecalFiller.StoreTrackInformation()
118 self.caloTasks.append(ecalFiller)
119 #GeV -> ADC conversion
120 ecalDigi=ROOT.ecalDigi("ecalDigi",0)
121 self.caloTasks.append(ecalDigi)
122 #ADC -> GeV conversion
123 ecalPrepare=ROOT.ecalPrepare("ecalPrepare",0)
124 self.caloTasks.append(ecalPrepare)
125 # Maximums locator
126 ecalMaximumFind=ROOT.ecalMaximumLocator("maximumFinder",dflag)
127 self.caloTasks.append(ecalMaximumFind)
128 # Cluster calibration
129 ecalClusterCalib=ROOT.ecalClusterCalibration("ecalClusterCalibration", 0)
130 #4x4 cm cells
131 ecalCl3PhS=ROOT.TFormula("ecalCl3PhS", "[0]+x*([1]+x*([2]+x*[3]))")
132 ecalCl3PhS.SetParameters(6.77797e-04, 5.75385e+00, 3.42690e-03, -1.16383e-04)
133 ecalClusterCalib.SetStraightCalibration(3, ecalCl3PhS)
134 ecalCl3Ph=ROOT.TFormula("ecalCl3Ph", "[0]+x*([1]+x*([2]+x*[3]))+[4]*x*y+[5]*x*y*y")
135 ecalCl3Ph.SetParameters(0.000750975, 5.7552, 0.00282783, -8.0025e-05, -0.000823651, 0.000111561)
136 ecalClusterCalib.SetCalibration(3, ecalCl3Ph)
137#6x6 cm cells
138 ecalCl2PhS=ROOT.TFormula("ecalCl2PhS", "[0]+x*([1]+x*([2]+x*[3]))")
139 ecalCl2PhS.SetParameters(8.14724e-04, 5.67428e+00, 3.39030e-03, -1.28388e-04)
140 ecalClusterCalib.SetStraightCalibration(2, ecalCl2PhS)
141 ecalCl2Ph=ROOT.TFormula("ecalCl2Ph", "[0]+x*([1]+x*([2]+x*[3]))+[4]*x*y+[5]*x*y*y")
142 ecalCl2Ph.SetParameters(0.000948095, 5.67471, 0.00339177, -0.000122629, -0.000169109, 8.33448e-06)
143 ecalClusterCalib.SetCalibration(2, ecalCl2Ph)
144 self.caloTasks.append(ecalClusterCalib)
145# Cluster finder
146 ecalClusterFind=ROOT.ecalClusterFinder("clusterFinder",dflag)
147 self.caloTasks.append(ecalClusterFind)
148# Calorimeter reconstruction
149 ecalReco=ROOT.ecalReco('ecalReco',0)
150 self.caloTasks.append(ecalReco)
151# Match reco to MC
152 ecalMatch=ROOT.ecalMatch('ecalMatch',0)
153 self.caloTasks.append(ecalMatch)
154 if global_variables.EcalDebugDraw:
155 # ecal drawer: Draws calorimeter structure, incoming particles, clusters, maximums
156 ecalDrawer=ROOT.ecalDrawer("clusterFinder",10)
157 self.caloTasks.append(ecalDrawer)
158 # add pid reco
159 import shipPid
160 self.caloTasks.append(shipPid.Task(self))
161# prepare vertexing
162 self.Vertexing = shipVertex.Task(global_variables.h, self.sTree)
163# setup random number generator
164 self.random = ROOT.TRandom()
165 ROOT.gRandom.SetSeed(13)
166 self.PDG = ROOT.TDatabasePDG.Instance()
167# access ShipTree
168 self.sTree.GetEvent(0)
169 if len(self.caloTasks)>0:
170 print("** initialize Calo reconstruction **")
171 self.ecalStructure = ecalFiller.InitPython(self.sTree.EcalPointLite)
174 self.ecalMaximums = ecalMaximumFind.InitPython(self.ecalStructure)
175 self.ecalCalib = ecalClusterCalib.InitPython()
176 self.ecalClusters = ecalClusterFind.InitPython(self.ecalStructure, self.ecalMaximums, self.ecalCalib)
177 self.EcalClusters = self.sTree.Branch("EcalClusters",self.ecalClusters,32000,-1)
179 self.EcalReconstructed = self.sTree.Branch("EcalReconstructed",self.ecalReconstructed,32000,-1)
181 if global_variables.EcalDebugDraw:
182 ecalDrawer.InitPython(self.sTree.MCTrack, self.sTree.EcalPoint, self.ecalStructure, self.ecalClusters)
183 else:
184 ecalClusters = ROOT.TClonesArray("ecalCluster")
185 ecalReconstructed = ROOT.TClonesArray("ecalReconstructed")
186 self.EcalClusters = self.sTree.Branch("EcalClusters",ecalClusters,32000,-1)
187 self.EcalReconstructed = self.sTree.Branch("EcalReconstructed",ecalReconstructed,32000,-1)
188#
189# init geometry and mag. field
190 gMan = ROOT.gGeoManager
191 self.geoMat = ROOT.genfit.TGeoMaterialInterface()
192#
193 self.bfield = ROOT.genfit.FairShipFields()
194 self.bfield.setField(global_variables.fieldMaker.getGlobalField())
195 self.fM = ROOT.genfit.FieldManager.getInstance()
196 self.fM.init(self.bfield)
197 ROOT.genfit.MaterialEffects.getInstance().init(self.geoMat)
198
199 # init fitter, to be done before importing shipPatRec
200 #fitter = ROOT.genfit.KalmanFitter()
201 #fitter = ROOT.genfit.KalmanFitterRefTrack()
202 self.fitter = ROOT.genfit.DAF()
203 self.fitter.setMaxIterations(50)
204 if global_variables.debug:
205 self.fitter.setDebugLvl(1) # produces lot of printout
206 #set to True if "real" pattern recognition is required also
207
208# for 'real' PatRec
210
211 def reconstruct(self):
212 ntracks = self.findTracks()
213 nGoodTracks = self.findGoodTracks()
214 self.linkVetoOnTracks()
215 for x in self.caloTasks:
216 if hasattr(x,'execute'): x.execute()
217 elif x.GetName() == 'ecalFiller': x.Exec('start',self.sTree.EcalPointLite)
218 elif x.GetName() == 'ecalMatch': x.Exec('start',self.ecalReconstructed, self.sTree.MCTrack)
219 else : x.Exec('start')
220 if len(self.caloTasks)>0:
221 self.EcalClusters.Fill()
222 self.EcalReconstructed.Fill()
223 if global_variables.vertexing:
224# now go for 2-track combinations
225 self.Vertexing.execute()
226
227 def digitize(self):
228 self.sTree.t0 = self.random.Rndm()*1*u.microsecond
229 self.header.SetEventTime( self.sTree.t0 )
230 self.header.SetRunId( self.sTree.MCEventHeader.GetRunID() )
231 self.header.SetMCEntryNumber( self.sTree.MCEventHeader.GetEventID() ) # counts from 1
232 self.eventHeader.Fill()
233 self.digiSBT.Delete()
234 self.digiSBT2MC.clear()
235 self.digitizeSBT()
236 self.digiSBTBranch.Fill()
237 self.mcLinkSBT.Fill()
238 self.digiStraw.Delete()
239 self.digitizeStrawTubes()
240 self.digiStrawBranch.Fill()
241 self.digiTimeDet.Delete()
242 self.digitizeTimeDet()
243 self.digiTimeDetBranch.Fill()
244 # self.digiUpstreamTagger.Delete()
245 # self.digitizeUpstreamTagger() TR 19/6/2020 work in progress
246 # self.digiUpstreamTaggerBranch.Fill()
247 self.digiMuon.Delete()
248 self.digitizeMuon()
249 self.digiMuonBranch.Fill()
250 if self.sTree.GetBranch("splitcalPoint"):
251 self.digiSplitcal.Delete()
252 self.recoSplitcal.Delete()
253 self.digitizeSplitcal()
254 self.digiSplitcalBranch.Fill()
255 self.recoSplitcalBranch.Fill()
256
258 listOfDetID = {} # the idea is to keep only one hit for each cell/strip and if more points fall in the same cell/strip just sum up the energy
259 index = 0
260 for aMCPoint in self.sTree.splitcalPoint:
261 aHit = ROOT.splitcalHit(aMCPoint,self.sTree.t0)
262 detID = aHit.GetDetectorID()
263 if detID not in listOfDetID:
264 if self.digiSplitcal.GetSize() == index:
265 self.digiSplitcal.Expand(index+1000)
266 listOfDetID[detID] = index
267 self.digiSplitcal[index]=aHit
268 index+=1
269 else:
270 indexOfExistingHit = listOfDetID[detID]
271 self.digiSplitcal[indexOfExistingHit].UpdateEnergy(aHit.GetEnergy())
272 self.digiSplitcal.Compress() #remove empty slots from array
273
274
277
278 # hit selection
279 # step 0: select hits above noise threshold to use in cluster reconstruction
280 noise_energy_threshold = 0.002 #GeV
281 #noise_energy_threshold = 0.0015 #GeV
282 list_hits_above_threshold = []
283 # print '--- digitizeSplitcal - self.digiSplitcal.GetSize() = ', self.digiSplitcal.GetSize()
284 for hit in self.digiSplitcal:
285 if hit.GetEnergy() > noise_energy_threshold:
286 hit.SetIsUsed(0)
287 # hit.SetEnergyWeight(1)
288 list_hits_above_threshold.append(hit)
289
290 self.list_hits_above_threshold = list_hits_above_threshold
291
292 # print '--- digitizeSplitcal - n hits above threshold = ', len(list_hits_above_threshold)
293
294 # clustering
295 # step 1: group of neighbouring cells: loose criteria -> splitting clusters is easier than merging clusters
296
297 self.step = 1
298 self.input_hits = list_hits_above_threshold
299 list_clusters_of_hits = self.Clustering()
300
301 # step 2: to check if clusters can be split do clustering separtely in the XZ and YZ planes
302
303 self.step = 2
304 # print "--- digitizeSplitcal ==== STEP 2 ==== "
305 list_final_clusters = {}
306 index_final_cluster = 0
307
308 for i in list_clusters_of_hits:
309
310 list_hits_x = []
311 list_hits_y = []
312 for hit in list_clusters_of_hits[i]:
313 hit.SetIsUsed(0)
314 if hit.IsX(): list_hits_x.append(hit)
315 if hit.IsY(): list_hits_y.append(hit) # FIXME: check if this could work also with high precision layers
316
317
318
319 #re-run reclustering only in xz plane possibly with different criteria
320 self.input_hits = list_hits_x
321 list_subclusters_of_x_hits = self.Clustering()
322 cluster_energy_x = self.GetClusterEnergy(list_hits_x)
323
324 # print "--- digitizeSplitcal - len(list_subclusters_of_x_hits) = ", len(list_subclusters_of_x_hits)
325
326 self.list_subclusters_of_hits = list_subclusters_of_x_hits
327 list_of_subclusters_x = self.GetSubclustersExcludingFragments()
328
329 # compute energy weight
330 weights_from_x_splitting = {}
331 for index_subcluster in list_of_subclusters_x:
332 subcluster_energy_x = self.GetClusterEnergy(list_of_subclusters_x[index_subcluster])
333 weight = subcluster_energy_x/cluster_energy_x
334 # print "======> weight = ", weight
335 weights_from_x_splitting[index_subcluster] = weight
336
337
338
339 #re-run reclustering only in yz plane possibly with different criteria
340 self.input_hits = list_hits_y
341 list_subclusters_of_y_hits = self.Clustering()
342 cluster_energy_y = self.GetClusterEnergy(list_hits_y)
343
344 # print "--- digitizeSplitcal - len(list_subclusters_of_y_hits) = ", len(list_subclusters_of_y_hits)
345
346 self.list_subclusters_of_hits = list_subclusters_of_y_hits
347 list_of_subclusters_y = self.GetSubclustersExcludingFragments()
348
349 # compute energy weight
350 weights_from_y_splitting = {}
351 for index_subcluster in list_of_subclusters_y:
352 subcluster_energy_y = self.GetClusterEnergy(list_of_subclusters_y[index_subcluster])
353 weight = subcluster_energy_y/cluster_energy_y
354 # print "======> weight = ", weight
355 weights_from_y_splitting[index_subcluster] = weight
356
357
358
359
360 # final list of clusters
361 # In principle one could go directly with the loop without checking if the size of subcluster x/y are == 1.
362 # But I noticed that in the second step the reclustering can trow away few lonely hits, making the weight just below 1
363 # While looking for how to recover that, this is a quick fix
364 if list_of_subclusters_x == 1 and list_of_subclusters_y == 1:
365 list_final_clusters[index_final_cluster] = list_clusters_of_hits[i]
366 for hit in list_final_clusters[index_final_cluster]:
367 hit.AddClusterIndex(index_final_cluster)
368 hit.AddEnergyWeight(1.)
369 index_final_cluster += 1
370 else:
371
372 # this works, but one could try to reduce the number of shared hits
373
374 for ix in list_of_subclusters_x:
375 for iy in list_of_subclusters_y:
376
377 for hit in list_of_subclusters_y[iy]:
378 hit.AddClusterIndex(index_final_cluster)
379 hit.AddEnergyWeight(weights_from_x_splitting[ix])
380
381 for hit in list_of_subclusters_x[ix]:
382 hit.AddClusterIndex(index_final_cluster)
383 hit.AddEnergyWeight(weights_from_y_splitting[iy])
384
385 list_final_clusters[index_final_cluster] = list_of_subclusters_y[iy] + list_of_subclusters_x[ix]
386 index_final_cluster += 1
387
388 # # try to reduce number of shared hits (if it does not work go back to solution above)
389 # # ok, it has potential but it needs more thinking
390
391 # for ix in list_of_subclusters_x:
392 # for iy in list_of_subclusters_y:
393
394 # list_final_clusters[index_final_cluster] = []
395
396 # for hitx in list_of_subclusters_x[ix]:
397 # self.input_hits = list_of_subclusters_y[iy]
398 # neighbours = self.getNeighbours(hitx)
399 # if len(neighbours) > 0:
400 # hitx.AddClusterIndex(index_final_cluster)
401 # hitx.AddEnergyWeight(weights_from_y_splitting[iy])
402 # list_final_clusters[index_final_cluster].append(hitx)
403 # for hity in neighbours:
404 # hity.AddClusterIndex(index_final_cluster)
405 # hity.AddEnergyWeight(weights_from_x_splitting[ix])
406 # list_final_clusters[index_final_cluster].append(hity)
407
408 # index_final_cluster += 1
409
410
411
414
415 for i in list_final_clusters:
416 # print '------------------------'
417 # print '------ digitizeSplitcal - cluster n = ', i
418 # print '------ digitizeSplitcal - cluster size = ', len(list_final_clusters[i])
419
420 for j,h in enumerate(list_final_clusters[i]):
421 if j==0: aCluster = ROOT.splitcalCluster(h)
422 else: aCluster.AddHit(h)
423
424 aCluster.SetIndex(int(i))
425 aCluster.ComputeEtaPhiE()
426 # aCluster.Print()
427
428 if self.recoSplitcal.GetSize() == i:
429 self.recoSplitcal.Expand(i+1000)
430 self.recoSplitcal[i]=aCluster
431
432 self.recoSplitcal.Compress() #remove empty slots from array
433
434
435 # #################
436 # # visualisation #
437 # #################
438
439 # c_yz = ROOT.TCanvas("c_yz","c_yz", 200, 10, 800, 800)
440 # graphs_yz = []
441
442 # for i in list_final_clusters:
443
444 # gr_yz = ROOT.TGraphErrors()
445 # gr_yz.SetLineColor( i+1 )
446 # gr_yz.SetLineWidth( 2 )
447 # gr_yz.SetMarkerColor( i+1 )
448 # gr_yz.SetMarkerStyle( 21 )
449 # gr_yz.SetTitle( 'clusters in y-z plane' )
450 # gr_yz.GetXaxis().SetTitle( 'Y [cm]' )
451 # gr_yz.GetYaxis().SetTitle( 'Z [cm]' )
452
453 # for j,hit in enumerate (list_final_clusters[i]):
454
455 # gr_yz.SetPoint(j,hit.GetY(),hit.GetZ())
456 # gr_yz.SetPointError(j,hit.GetYError(),hit.GetZError())
457
458 # gr_yz.GetXaxis().SetLimits( -620, 620 )
459 # gr_yz.GetYaxis().SetRangeUser( 3600, 3800 )
460 # graphs_yz.append(gr_yz)
461
462 # c_yz.cd()
463 # c_yz.Update()
464 # if i==0:
465 # graphs_yz[-1].Draw( 'AP' )
466 # else:
467 # graphs_yz[-1].Draw( 'P' )
468
469 # c_yz.Print("final_clusters_yz.eps")
470
471 # ############################
472
473
474
476
477 list_subclusters_excluding_fragments = {}
478
479 fragment_indices = []
480 subclusters_indices = []
481 for k in self.list_subclusters_of_hits:
482 subcluster_size = len(self.list_subclusters_of_hits[k])
483 if subcluster_size < 5: #FIXME: it can be tuned on a physics case (maybe use fraction of hits or energy)
484 fragment_indices.append(k)
485 else:
486 subclusters_indices.append(k)
487 # if len(subclusters_indices) > 1:
488 # print "--- digitizeSplitcal - *** CLUSTER NEED TO BE SPLIT - set energy weight"
489 # else:
490 # print "--- digitizeSplitcal - CLUSTER DOES NOT NEED TO BE SPLIT "
491
492 # merge fragments in the closest subcluster. If there is not subcluster but everything is fragmented, merge all the fragments together
493 minDistance = -1
494 minIndex = -1
495
496 if len(subclusters_indices) == 0 and len(fragment_indices) != 0: # only fragments
497 subclusters_indices.append(0) # merge all fragments into the first fragment
498
499 for index_fragment in fragment_indices:
500 #print "--- index_fragment = ", index_fragment
501 first_hit_fragment = self.list_subclusters_of_hits[index_fragment][0]
502 for index_subcluster in subclusters_indices:
503 #print "--- index_subcluster = ", index_subcluster
504 first_hit_subcluster = self.list_subclusters_of_hits[index_subcluster][0]
505 if first_hit_fragment.IsX():
506 distance = fabs(first_hit_fragment.GetX()-first_hit_subcluster.GetX())
507 else:
508 distance = fabs(first_hit_fragment.GetY()-first_hit_subcluster.GetY())
509 if (minDistance < 0 or distance < minDistance):
510 minDistance = distance
511 minIndex = index_subcluster
512
513 # for h in self.list_subclusters_of_hits[index_fragment]:
514 # h.SetClusterIndex(minIndex)
515
516 #print "--- minIndex = ", minIndex
517 if minIndex != index_fragment: # in case there were only fragments - this is to prevent to sum twice fragment 0
518 #print "--- BEFORE - len(self.list_subclusters_of_hits[minIndex]) = ", len(self.list_subclusters_of_hits[minIndex])
519 self.list_subclusters_of_hits[minIndex] += self.list_subclusters_of_hits[index_fragment]
520 #print "--- AFTER - len(self.list_subclusters_of_hits[minIndex]) = ", len(self.list_subclusters_of_hits[minIndex])
521
522
523 for counter, index_subcluster in enumerate(subclusters_indices):
524 list_subclusters_excluding_fragments[counter] = self.list_subclusters_of_hits[index_subcluster]
525
526 return list_subclusters_excluding_fragments
527
528
529
530 def GetClusterEnergy(self, list_hits):
531 energy = 0
532 for hit in list_hits:
533 energy += hit.GetEnergy()
534 return energy
535
536
537 def Clustering(self):
538
539 list_hits_in_cluster = {}
540 cluster_index = -1
541
542 for i,hit in enumerate(self.input_hits):
543 if hit.IsUsed()==1:
544 continue
545
546 neighbours = self.getNeighbours(hit)
547 # hit.Print()
548 # #print "--- digitizeSplitcal - index of unused hit = ", i
549 # print '--- digitizeSplitcal - hit has n neighbours = ', len(neighbours)
550
551 if len(neighbours) < 1:
552 # # hit.SetClusterIndex(-1) # lonely fragment
553 # print '--- digitizeSplitcal - lonely fragment '
554 continue
555
556 cluster_index = cluster_index + 1
557 hit.SetIsUsed(1)
558 # hit.SetClusterIndex(cluster_index)
559 list_hits_in_cluster[cluster_index] = []
560 list_hits_in_cluster[cluster_index].append(hit)
561 #print '--- digitizeSplitcal - cluster_index = ', cluster_index
562
563 for neighbouringHit in neighbours:
564 # print '--- digitizeSplitcal - in neighbouringHit - len(neighbours) = ', len(neighbours)
565
566 if neighbouringHit.IsUsed()==1:
567 continue
568
569 # neighbouringHit.SetClusterIndex(cluster_index)
570 neighbouringHit.SetIsUsed(1)
571 list_hits_in_cluster[cluster_index].append(neighbouringHit)
572
573 # ## test ###
574 # # for step 2, add hits of different type to subcluster but to not look for their neighbours
575 # not_same_type = hit.IsX() != neighbouringHit.IsX()
576 # if self.step==2 and not_same_type:
577 # continue
578 # ###########
579
580 expand_neighbours = self.getNeighbours(neighbouringHit)
581 # print '--- digitizeSplitcal - len(expand_neighbours) = ', len(expand_neighbours)
582
583 if len(expand_neighbours) >= 1:
584 for additionalHit in expand_neighbours:
585 if additionalHit not in neighbours:
586 neighbours.append(additionalHit)
587
588 return list_hits_in_cluster
589
590
591
592 def getNeighbours(self,hit):
593
594 list_neighbours = []
595 err_x_1 = hit.GetXError()
596 err_y_1 = hit.GetYError()
597 err_z_1 = hit.GetZError()
598
599 layer_1 = hit.GetLayerNumber()
600
601 # allow one or more 'missing' hit in x/y: not large difference between 1 (no gap) or 2 (one 'missing' hit)
602 max_gap = 2.
603 if hit.IsX(): err_x_1 = err_x_1*max_gap
604 if hit.IsY(): err_y_1 = err_y_1*max_gap
605
606 for hit2 in self.input_hits:
607 if hit2 is not hit:
608 Dx = fabs(hit2.GetX()-hit.GetX())
609 Dy = fabs(hit2.GetY()-hit.GetY())
610 Dz = fabs(hit2.GetZ()-hit.GetZ())
611 err_x_2 = hit2.GetXError()
612 err_y_2 = hit2.GetYError()
613 err_z_2 = hit2.GetZError()
614
615 # layer_2 = hit2.GetLayerNumber()
616 # Dlayer = fabs(layer_2-layer_1)
617
618 # allow one or more 'missing' hit in x/y
619 if hit2.IsX(): err_x_2 = err_x_2*max_gap
620 if hit2.IsY(): err_y_2 = err_y_2*max_gap
621
622 if self.step == 1: # or self.step == 2:
623 # use Dz instead of Dlayer due to split of 1m between the 2 parts of the calo
624 #if ((Dx<=(err_x_1+err_x_2) or Dy<=(err_y_1+err_y_2)) and Dz<=2*(err_z_1+err_z_2)):
625 #if ((Dx<=(err_x_1+err_x_2) and Dy<=(err_y_1+err_y_2)) and Dz<=2*(err_z_1+err_z_2)):
626 if hit.IsX():
627 if (Dx<=(err_x_1+err_x_2) and Dz<=2*(err_z_1+err_z_2) and ((Dy<=(err_y_1+err_y_2) and Dz>0.) or (Dy==0)) ):
628 list_neighbours.append(hit2)
629 if hit.IsY():
630 if (Dy<=(err_y_1+err_y_2) and Dz<=2*(err_z_1+err_z_2) and ((Dx<=(err_x_1+err_x_2) and Dz>0.) or (Dx==0)) ):
631 list_neighbours.append(hit2)
632
633 elif self.step == 2:
634 #for step 2 relax or remove at all condition on Dz
635 #(some clusters were split erroneously along z while here one wants to split only in x/y )
636 # first try: relax z condition
637 if hit.IsX():
638 if (Dx<=(err_x_1+err_x_2) and Dz<=6*(err_z_1+err_z_2) and ((Dy<=(err_y_1+err_y_2) and Dz>0.) or (Dy==0)) ):
639 #if (Dx<=(err_x_1+err_x_2) and Dz<=6*(err_z_1+err_z_2) and Dy<=(err_y_1+err_y_2) and Dz>0.):
640 list_neighbours.append(hit2)
641 if hit.IsY():
642 if (Dy<=(err_y_1+err_y_2) and Dz<=6*(err_z_1+err_z_2) and ((Dx<=(err_x_1+err_x_2) and Dz>0.) or (Dx==0)) ):
643 #if (Dy<=(err_y_1+err_y_2) and Dz<=6*(err_z_1+err_z_2) and Dx<=(err_x_1+err_x_2) and Dz>0. ):
644 list_neighbours.append(hit2)
645 else:
646 print("-- getNeighbours: ERROR: step not defined ")
647
648 return list_neighbours
649
650
651
652
654 index = 0
655 hitsPerDetId = {}
656 for aMCPoint in self.sTree.TimeDetPoint:
657 aHit = ROOT.TimeDetHit(aMCPoint,self.sTree.t0)
658 if self.digiTimeDet.GetSize() == index: self.digiTimeDet.Expand(index+1000)
659 self.digiTimeDet[index]=aHit
660 detID = aHit.GetDetectorID()
661 if aHit.isValid():
662 if detID in hitsPerDetId:
663 t = aHit.GetMeasurements()
664 ct = aHit.GetMeasurements()
665# this is not really correct, only first attempt
666# case that one measurement only is earlier not taken into account
667# SetTDC(Float_t val1, Float_t val2)
668 if t[0]>ct[0] or t[1]>ct[1]:
669 # second hit with smaller tdc
670 self.digiTimeDet[hitsPerDetId[detID]].setInvalid()
671 hitsPerDetId[detID] = index
672 index+=1
673
675 index = 0
676 hitsPerDetId = {}
677 for aMCPoint in self.sTree.UpstreamTaggerPoint:
678 aHit = ROOT.UpstreamTaggerHit(aMCPoint, global_variables.modules["UpstreamTagger"], self.sTree.t0)
679 if self.digiUpstreamTagger.GetSize() == index: self.digiUpstreamTagger.Expand(index+1000)
680 self.digiUpstreamTagger[index]=aHit
681 detID = aHit.GetDetectorID()
682 if aHit.isValid():
683 if detID in hitsPerDetId:
684 t = aHit.GetMeasurements()
685 ct = aHit.GetMeasurements()
686# this is not really correct, only first attempt
687# case that one measurement only is earlier not taken into account
688# SetTDC(Float_t val1, Float_t val2)
689 if t[0]>ct[0] or t[1]>ct[1]:
690 # second hit with smaller tdc
691 self.digiUpstreamTagger[hitsPerDetId[detID]].setInvalid()
692 hitsPerDetId[detID] = index
693 index+=1
694
695
696 def digitizeMuon(self):
697 index = 0
698 hitsPerDetId = {}
699 for aMCPoint in self.sTree.muonPoint:
700 aHit = ROOT.muonHit(aMCPoint,self.sTree.t0)
701 if self.digiMuon.GetSize() == index: self.digiMuon.Expand(index+1000)
702 self.digiMuon[index]=aHit
703 detID = aHit.GetDetectorID()
704 if aHit.isValid():
705 if detID in hitsPerDetId:
706 if self.digiMuon[hitsPerDetId[detID]].GetDigi() > aHit.GetDigi():
707 # second hit with smaller tdc
708 self.digiMuon[hitsPerDetId[detID]].setValidity(0)
709 hitsPerDetId[detID] = index
710 index+=1
711
712 def digitizeSBT(self):
713 ElossPerDetId = {}
714 tOfFlight = {}
715 listOfVetoPoints = {}
716 key=-1
717 for aMCPoint in self.sTree.vetoPoint:
718 key+=1
719 detID=aMCPoint.GetDetectorID()
720 Eloss=aMCPoint.GetEnergyLoss()
721 if detID not in ElossPerDetId:
722 ElossPerDetId[detID]=0
723 listOfVetoPoints[detID]=[]
724 tOfFlight[detID]=[]
725 ElossPerDetId[detID] += Eloss
726 listOfVetoPoints[detID].append(key)
727 tOfFlight[detID].append(aMCPoint.GetTime())
728 index=0
729 for seg in ElossPerDetId:
730 aHit = ROOT.vetoHit(seg,ElossPerDetId[seg])
731 aHit.SetTDC(min( tOfFlight[seg] ) + self.sTree.t0 )
732 if self.digiSBT.GetSize() == index:
733 self.digiSBT.Expand(index+1000)
734 if ElossPerDetId[seg]<0.045: aHit.setInvalid() # threshold for liquid scintillator, source Berlin group
735 self.digiSBT[index] = aHit
736 v = ROOT.std.vector('int')()
737 for x in listOfVetoPoints[seg]:
738 v.push_back(x)
739 self.digiSBT2MC.push_back(v)
740 index=index+1
742 # digitize FairSHiP MC hits
743 index = 0
744 hitsPerDetId = {}
745 for aMCPoint in self.sTree.strawtubesPoint:
746 aHit = ROOT.strawtubesHit(aMCPoint,self.sTree.t0)
747 if self.digiStraw.GetSize() == index: self.digiStraw.Expand(index+1000)
748 self.digiStraw[index]=aHit
749 if aHit.isValid():
750 detID = aHit.GetDetectorID()
751 if detID in hitsPerDetId:
752 if self.digiStraw[hitsPerDetId[detID]].GetTDC() > aHit.GetTDC():
753 # second hit with smaller tdc
754 self.digiStraw[hitsPerDetId[detID]].setInvalid()
755 hitsPerDetId[detID] = index
756 index+=1
757
758 def withT0Estimate(self):
759 # loop over all straw tdcs and make average, correct for ToF
760 n = 0
761 t0 = 0.
762 key = -1
763 SmearedHits = []
764 v_drift = global_variables.modules["Strawtubes"].StrawVdrift()
765 global_variables.modules["Strawtubes"].StrawEndPoints(10002001, start, stop)
766 z1 = stop.z()
767 for aDigi in self.digiStraw:
768 key+=1
769 if not aDigi.isValid(): continue
770 detID = aDigi.GetDetectorID()
771# don't use hits from straw veto
772 station = int(detID//10000000)
773 if station > 4 : continue
774 global_variables.modules["Strawtubes"].StrawEndPoints(detID, start, stop)
775 delt1 = (start[2]-z1)/u.speedOfLight
776 t0+=aDigi.GetDigi()-delt1
777 SmearedHits.append( {'digiHit':key,'xtop':stop.x(),'ytop':stop.y(),'z':stop.z(),'xbot':start.x(),'ybot':start.y(),'dist':aDigi.GetDigi(), 'detID':detID} )
778 n+=1
779 if n>0: t0 = t0/n - 73.2*u.ns
780 for s in SmearedHits:
781 delt1 = (s['z']-z1)/u.speedOfLight
782 s['dist'] = (s['dist'] -delt1 -t0)*v_drift
783 return SmearedHits
784
785 def smearHits(self,no_amb=None):
786 # smear strawtube points
787 SmearedHits = []
788 key = -1
789 v_drift = global_variables.modules["Strawtubes"].StrawVdrift()
790 global_variables.modules["Strawtubes"].StrawEndPoints(10002001, start, stop)
791 z1 = stop.z()
792 for aDigi in self.digiStraw:
793 key+=1
794 if not aDigi.isValid(): continue
795 detID = aDigi.GetDetectorID()
796# don't use hits from straw veto
797 station = int(detID//10000000)
798 if station > 4 : continue
799 global_variables.modules["Strawtubes"].StrawEndPoints(detID, start, stop)
800 #distance to wire
801 delt1 = (start[2]-z1)/u.speedOfLight
802 p=self.sTree.strawtubesPoint[key]
803 # use true t0 construction:
804 # fdigi = t0 + p->GetTime() + t_drift + ( stop[0]-p->GetX() )/ speedOfLight;
805 smear = (aDigi.GetDigi() - self.sTree.t0 - p.GetTime() - ( stop[0]-p.GetX() )/ u.speedOfLight) * v_drift
806 if no_amb: smear = p.dist2Wire()
807 SmearedHits.append( {'digiHit':key,'xtop':stop.x(),'ytop':stop.y(),'z':stop.z(),'xbot':start.x(),'ybot':start.y(),'dist':smear, 'detID':detID} )
808 # Note: top.z()==bot.z() unless misaligned, so only add key 'z' to smearedHit
809 if abs(stop.y()) == abs(start.y()):
810 global_variables.h['disty'].Fill(smear)
811 elif abs(stop.y()) > abs(start.y()):
812 global_variables.h['distu'].Fill(smear)
813 elif abs(stop.y()) < abs(start.y()):
814 global_variables.h['distv'].Fill(smear)
815
816 return SmearedHits
817
818 def findTracks(self):
819 hitPosLists = {}
820 stationCrossed = {}
821 fittedtrackids=[]
822 listOfIndices = {}
823 self.fGenFitArray.Clear()
824 self.fTrackletsArray.Delete()
825 self.fitTrack2MC.clear()
826
827#
828 if global_variables.withT0:
830 # old procedure, not including estimation of t0
831 else:
832 self.SmearedHitsSmearedHits = self.smearHits(global_variables.withNoStrawSmearing)
833
834 nTrack = -1
835 trackCandidates = []
836
837 if global_variables.realPR:
838 # Do real PatRec
839 track_hits = shipPatRec.execute(self.SmearedHitsSmearedHits, global_variables.ShipGeo, global_variables.realPR)
840 # Create hitPosLists for track fit
841 for i_track in track_hits.keys():
842 atrack = track_hits[i_track]
843 atrack_y12 = atrack['y12']
844 atrack_stereo12 = atrack['stereo12']
845 atrack_y34 = atrack['y34']
846 atrack_stereo34 = atrack['stereo34']
847 atrack_smeared_hits = list(atrack_y12) + list(atrack_stereo12) + list(atrack_y34) + list(atrack_stereo34)
848 for sm in atrack_smeared_hits:
849 detID = sm['detID']
850 station = int(detID//10000000)
851 trID = i_track
852 # Collect hits for track fit
853 if trID not in hitPosLists:
854 hitPosLists[trID] = ROOT.std.vector('TVectorD')()
855 listOfIndices[trID] = []
856 stationCrossed[trID] = {}
857 m = array('d',[sm['xtop'],sm['ytop'],sm['z'],sm['xbot'],sm['ybot'],sm['z'],sm['dist']])
858 hitPosLists[trID].push_back(ROOT.TVectorD(7,m))
859 listOfIndices[trID].append(sm['digiHit'])
860 if station not in stationCrossed[trID]:
861 stationCrossed[trID][station] = 0
862 stationCrossed[trID][station] += 1
863 else: # do fake pattern recognition
864 for sm in self.SmearedHitsSmearedHits:
865 detID = self.digiStraw[sm['digiHit']].GetDetectorID()
866 station = int(detID//10000000)
867 trID = self.sTree.strawtubesPoint[sm['digiHit']].GetTrackID()
868 if trID not in hitPosLists:
869 hitPosLists[trID] = ROOT.std.vector('TVectorD')()
870 listOfIndices[trID] = []
871 stationCrossed[trID] = {}
872 m = array('d',[sm['xtop'],sm['ytop'],sm['z'],sm['xbot'],sm['ybot'],sm['z'],sm['dist']])
873 hitPosLists[trID].push_back(ROOT.TVectorD(7,m))
874 listOfIndices[trID].append(sm['digiHit'])
875 if station not in stationCrossed[trID]: stationCrossed[trID][station]=0
876 stationCrossed[trID][station]+=1
877#
878 # for atrack in listOfIndices:
879 # # make tracklets out of trackCandidates, just for testing, should be output of proper pattern recognition
880 # nTracks = self.fTrackletsArray.GetEntries()
881 # aTracklet = self.fTrackletsArray.ConstructedAt(nTracks)
882 # listOfHits = aTracklet.getList()
883 # aTracklet.setType(3)
884 # for index in listOfIndices[atrack]:
885 # listOfHits.push_back(index)
886#
887 for atrack in hitPosLists:
888 if atrack < 0: continue # these are hits not assigned to MC track because low E cut
889 pdg = self.sTree.MCTrack[atrack].GetPdgCode()
890 if not self.PDG.GetParticle(pdg): continue # unknown particle
891 # pdg = 13
892 meas = hitPosLists[atrack]
893 nM = meas.size()
894 if nM < 25 : continue # not enough hits to make a good trackfit
895 if len(stationCrossed[atrack]) < 3 : continue # not enough stations crossed to make a good trackfit
896 if global_variables.debug:
897 mctrack = self.sTree.MCTrack[atrack]
898 # charge = self.PDG.GetParticle(pdg).Charge()/(3.)
899 posM = ROOT.TVector3(0, 0, 0)
900 momM = ROOT.TVector3(0,0,3.*u.GeV)
901# approximate covariance
902 covM = ROOT.TMatrixDSym(6)
903 resolution = self.sigma_spatial
904 if global_variables.withT0:
905 resolution *= 1.4 # worse resolution due to t0 estimate
906 for i in range(3): covM[i][i] = resolution*resolution
907 covM[0][0]=resolution*resolution*100.
908 for i in range(3,6): covM[i][i] = ROOT.TMath.Power(resolution / nM / ROOT.TMath.Sqrt(3), 2)
909# trackrep
910 rep = ROOT.genfit.RKTrackRep(pdg)
911# smeared start state
912 stateSmeared = ROOT.genfit.MeasuredStateOnPlane(rep)
913 rep.setPosMomCov(stateSmeared, posM, momM, covM)
914# create track
915 seedState = ROOT.TVectorD(6)
916 seedCov = ROOT.TMatrixDSym(6)
917 rep.get6DStateCov(stateSmeared, seedState, seedCov)
918 theTrack = ROOT.genfit.Track(rep, seedState, seedCov)
919 hitCov = ROOT.TMatrixDSym(7)
920 hitCov[6][6] = resolution*resolution
921 for m in meas:
922 tp = ROOT.genfit.TrackPoint(theTrack) # note how the point is told which track it belongs to
923 measurement = ROOT.genfit.WireMeasurement(m,hitCov,1,6,tp) # the measurement is told which trackpoint it belongs to
924 # print measurement.getMaxDistance()
925 measurement.setMaxDistance(global_variables.ShipGeo.strawtubes.InnerStrawDiameter / 2.)
926 # measurement.setLeftRightResolution(-1)
927 tp.addRawMeasurement(measurement) # package measurement in the TrackPoint
928 theTrack.insertPoint(tp) # add point to Track
929 # print "debug meas",atrack,nM,stationCrossed[atrack],self.sTree.MCTrack[atrack],pdg
930 trackCandidates.append([theTrack,atrack])
931
932 for entry in trackCandidates:
933#check
934 atrack = entry[1]
935 theTrack = entry[0]
936 if not theTrack.checkConsistency():
937 print('Problem with track before fit, not consistent',atrack,theTrack)
938 continue
939# do the fit
940 try: self.fitter.processTrack(theTrack) # processTrackWithRep(theTrack,rep,True)
941 except:
942 if global_variables.debug:
943 print("genfit failed to fit track")
944 error = "genfit failed to fit track"
945 ut.reportError(error)
946 continue
947#check
948 if not theTrack.checkConsistency():
949 if global_variables.debug:
950 print('Problem with track after fit, not consistent', atrack, theTrack)
951 error = "Problem with track after fit, not consistent"
952 ut.reportError(error)
953 continue
954 try:
955 fittedState = theTrack.getFittedState()
956 fittedMom = fittedState.getMomMag()
957 except:
958 error = "Problem with fittedstate"
959 ut.reportError(error)
960 continue
961 fitStatus = theTrack.getFitStatus()
962 nmeas = fitStatus.getNdf()
963 chi2 = fitStatus.getChi2()/nmeas
964 global_variables.h['chi2'].Fill(chi2)
965# make track persistent
966 nTrack = self.fGenFitArray.GetEntries()
967 if not global_variables.debug:
968 theTrack.prune("CFL") # http://sourceforge.net/p/genfit/code/HEAD/tree/trunk/core/include/Track.h#l280
969 self.fGenFitArray[nTrack] = theTrack
970 # self.fitTrack2MC.push_back(atrack)
971 if global_variables.debug:
972 print('save track',theTrack,chi2,nmeas,fitStatus.isFitConverged())
973 # Save MC link
974 track_ids = []
975 for index in listOfIndices[atrack]:
976 ahit = self.sTree.strawtubesPoint[index]
977 track_ids += [ahit.GetTrackID()]
978 frac, tmax = self.fracMCsame(track_ids)
979 self.fitTrack2MC.push_back(tmax)
980 # Save hits indexes of the the fitted tracks
981 nTracks = self.fTrackletsArray.GetEntries()
982 aTracklet = self.fTrackletsArray.ConstructedAt(nTracks)
983 listOfHits = aTracklet.getList()
984 aTracklet.setType(1)
985 for index in listOfIndices[atrack]:
986 listOfHits.push_back(index)
987 self.Tracklets.Fill()
988 self.fitTracks.Fill()
989 self.mcLink.Fill()
990# debug
991 if global_variables.debug:
992 print('save tracklets:')
993 for x in self.sTree.Tracklets:
994 print(x.getType(),x.getList().size())
995 return nTrack+1
996
997 def findGoodTracks(self):
998 self.goodTracksVect.clear()
999 nGoodTracks = 0
1000 for i,track in enumerate(self.fGenFitArray):
1001 fitStatus = track.getFitStatus()
1002 if not fitStatus.isFitConverged(): continue
1003 nmeas = fitStatus.getNdf()
1004 chi2 = fitStatus.getChi2()/nmeas
1005 if chi2<50 and not chi2<0:
1006 self.goodTracksVect.push_back(i)
1007 nGoodTracks+=1
1008 self.goodTracksBranch.Fill()
1009 return nGoodTracks
1010
1011 def findVetoHitOnTrack(self,track):
1012 distMin = 99999.
1013 vetoHitOnTrack = ROOT.vetoHitOnTrack()
1014 xx = track.getFittedState()
1015 rep = ROOT.genfit.RKTrackRep(xx.getPDG())
1016 state = ROOT.genfit.StateOnPlane(rep)
1017 rep.setPosMom(state,xx.getPos(),xx.getMom())
1018 for i,vetoHit in enumerate(self.digiSBT):
1019 vetoHitPos = vetoHit.GetXYZ()
1020 try:
1021 rep.extrapolateToPoint(state,vetoHitPos,False)
1022 except:
1023 error = "shipDigiReco::findVetoHitOnTrack extrapolation did not worked"
1024 ut.reportError(error)
1025 if global_variables.debug:
1026 print(error)
1027 continue
1028 dist = (rep.getPos(state) - vetoHitPos).Mag()
1029 if dist < distMin:
1030 distMin = dist
1031 vetoHitOnTrack.SetDist(distMin)
1033 return vetoHitOnTrack
1034
1036 self.vetoHitOnTrackArray.Delete()
1037 index = 0
1038 for goodTrak in self.goodTracksVect:
1039 track = self.fGenFitArray[goodTrak]
1040 if self.vetoHitOnTrackArray.GetSize() == index: self.vetoHitOnTrackArray.Expand(index+1000)
1041 self.vetoHitOnTrackArray[index] = self.findVetoHitOnTrack(track)
1042 index+=1
1043 self.vetoHitOnTrackBranch.Fill()
1044
1045 def fracMCsame(self, trackids):
1046 track={}
1047 nh=len(trackids)
1048 for tid in trackids:
1049 if tid in track:
1050 track[tid] += 1
1051 else:
1052 track[tid] = 1
1053 if track != {}:
1054 tmax = max(track, key=track.get)
1055 else:
1056 track = {-999: 0}
1057 tmax = -999
1058 frac=0.0
1059 if nh > 0:
1060 frac = float(track[tmax]) / float(nh)
1061 return frac, tmax
1062
1063 def finish(self):
1064 del self.fitter
1065 print('finished writing tree')
1066 self.sTree.Write()
1067 ut.errorSummary()
1068 ut.writeHists(global_variables.h,"recohists.root")
1069 if global_variables.realPR:
void InitPython(ecalStructure *structure)
Definition ecalDigi.cxx:85
Int_t InitPython(TClonesArray *mctracks, TClonesArray *ecalPoints, ecalStructure *structure, TClonesArray *clusters)
void InitPython(ecalStructure *str, TClonesArray *reconstructed, TClonesArray *mctracks)
void InitPython(ecalStructure *structure)
TClonesArray * InitPython(TClonesArray *clusters, ecalStructure *str, ecalClusterCalibration *calib)
Definition ecalReco.cxx:294
__init__(self, fout, fgeo)
GetClusterEnergy(self, list_hits)
smearHits(self, no_amb=None)
void SetDist(Float_t d)
void SetHitID(Int_t hitID)
TVector3 GetXYZ()
Definition vetoHit.cxx:38
makeEcalGeoFile(z, efile)
initialize(fgeo)
Definition shipPatRec.py:14
execute(smeared_hits, ship_geo, method='')
Definition shipPatRec.py:18