SND@LHC Software
Loading...
Searching...
No Matches
SndlhcMuonReco.py
Go to the documentation of this file.
1import ROOT
2import numpy as np
3import scipy.ndimage
4from array import array
5import xml.etree.ElementTree as ET
6import matplotlib.pyplot as plt
7import shipunit as unit
8
9def hit_finder(slope, intercept, box_centers, box_ds, tol = 0.) :
10 """ Finds hits intersected by Hough line """
11
12 # First check if track at center of box is within box limits
13 d = np.abs(box_centers[0,:,1] - (box_centers[0,:,0]*slope + intercept))
14 center_in_box = d < (box_ds[0,:,1]+tol)/2.
15
16 # Now check if, assuming line is not in box at box center, the slope is large enough for line to clip the box at corner
17 clips_corner = np.abs(slope) > np.abs((d - (box_ds[0,:,1]+tol)/2.)/(box_ds[0,:,0]+tol)/2.)
18
19 # If either of these is true, line goes through hit:
20 hit_mask = np.logical_or(center_in_box, clips_corner)
21
22 # Return indices
23 return np.where(hit_mask)[0]
24
25class hough() :
26 """ Hough transform implementation """
27
28 def __init__(self, n_yH, yH_range, n_xH, xH_range, z_offset, Hformat, space_scale, det_Zlen, squaretheta = False, smooth = True) :
29
30 self.n_yH = n_yH
31 self.n_xH = n_xH
32
33 self.yH_range = yH_range
34 self.xH_range = xH_range
35
36 self.z_offset = z_offset
37 self.HoughSpace_format = Hformat
38 self.space_scale = space_scale
39
40 self.det_Zlen = det_Zlen
41
42 self.smooth = smooth
43
44 self.yH_bins = np.linspace(self.yH_range[0], self.yH_range[1], n_yH)
45 if not squaretheta :
46 self.xH_bins = np.linspace(self.xH_range[0], self.xH_range[1], n_xH)
47 else :
48 self.xH_bins = np.linspace(np.sign(self.xH_range[0])*(self.xH_range[0]**0.5), np.sign(self.xH_range[1])*(self.xH_range[1]**0.5), n_xH)
49 self.xH_bins = np.sign(self.xH_bins)*np.square(self.xH_bins)
50
51 self.cos_thetas = np.cos(self.xH_bins)
52 self.sin_thetas = np.sin(self.xH_bins)
53
54 self.xH_i = np.array(list(range(n_xH)))
55
56 # A back-up Hough space designed to have more/less bins wrt the default one above.
57 # It is useful when fitting some low-E muon tracks, which are curved due to mult. scattering.
58 self.n_yH_scaled = int(n_yH*space_scale)
59 self.n_xH_scaled = int(n_xH*space_scale)
60 self.yH_bins_scaled = np.linspace(self.yH_range[0], self.yH_range[1], self.n_yH_scaled)
61 if not squaretheta :
62 self.xH_bins_scaled= np.linspace(self.xH_range[0], self.xH_range[1], self.n_xH_scaled)
63 else :
64 self.xH_bins_scaled = np.linspace(np.sign(self.xH_range[0])*(self.xH_range[0]**0.5), np.sign(self.xH_range[1])*(self.xH_range[1]**0.5), self.n_xH_scaled)
65 self.xH_bins_scaled = np.sign(self.xH_bins_scaled)*np.square(self.xH_bins_scaled)
66
69
70 self.xH_i_scaled = np.array(list(range(self.n_xH_scaled)))
71
72 def fit(self, hit_collection, is_scaled, draw, weights = None) :
73
74 if not is_scaled:
75 n_xH = self.n_xH
76 n_yH = self.n_yH
77 cos_thetas = self.cos_thetas
78 sin_thetas = self.sin_thetas
79 xH_bins = self.xH_bins
80 yH_bins = self.yH_bins
81 xH_i = self.xH_i
82 res = self.res
83 else:
84 n_xH = self.n_xH_scaled
85 n_yH = self.n_yH_scaled
86 cos_thetas = self.cos_thetas_scaled
87 sin_thetas = self.sin_thetas_scaled
88 xH_bins = self.xH_bins_scaled
89 yH_bins = self.yH_bins_scaled
90 xH_i = self.xH_i_scaled
91 res = self.res*self.space_scale
92
93 self.accumulator = np.zeros((n_yH, n_xH))
94 for i_hit, hit in enumerate(hit_collection) :
95 shifted_hitZ = hit[0] - self.z_offset
96 if self.HoughSpace_format == 'normal':
97 hit_yH = shifted_hitZ*cos_thetas + hit[1]*sin_thetas
98 elif self.HoughSpace_format == 'linearSlopeIntercept':
99 hit_yH = hit[1] - shifted_hitZ*xH_bins
100 elif self.HoughSpace_format== 'linearIntercepts':
101 hit_yH = (self.det_Zlen*hit[1] - shifted_hitZ*xH_bins)/(self.det_Zlen - shifted_hitZ)
102 out_of_range = np.logical_and(hit_yH > self.yH_range[0], hit_yH < self.yH_range[1])
103 hit_yH_i = np.floor((hit_yH[out_of_range] - self.yH_range[0])/(self.yH_range[1] - self.yH_range[0])*n_yH).astype(np.int_)
104
105 if weights is not None :
106 self.accumulator[hit_yH_i, xH_i[out_of_range]] += weights[i_hit]
107 else :
108 self.accumulator[hit_yH_i, xH_i[out_of_range]] += 1
109
110 # Smooth accumulator
111 if self.smooth_full :
112 self.accumulator = scipy.ndimage.gaussian_filter(self.accumulator, self.sigma, truncate=self.truncate)
113
114 # This might be useful for debugging, but leave out for now.
115 if draw :
116 plt.figure()
117 plt.imshow(self.accumulator, origin = "lower", extent = [self.xH_range[0], self.xH_range[-1], self.yH_range[0], self.yH_range[-1]], aspect = "auto")
118 if self.HoughSpace_format == 'normal':
119 plt.xlabel(r"$\theta$ [rad]")
120 plt.ylabel("r [cm]")
121 elif self.HoughSpace_format == 'linearSlopeIntercept':
122 plt.xlabel("slope")
123 plt.ylabel("intercept @ 1st plane [cm]")
124 elif self.HoughSpace_format == 'linearIntercepts':
125 plt.xlabel("intercept @ last plane [cm]")
126 plt.ylabel("intercept @ 1st plane [cm]")
127 plt.tight_layout()
128 plt.show()
129
130 if self.smooth_full:
131 # In case of multiple occurrences of the maximum values, argmax returns
132 # the indices corresponding to the first occurrence(along 1st axis).
133 i_max = np.unravel_index(self.accumulator.argmax(), self.accumulator.shape)
134 else:
135 # In case there are more than 1 bins with the maximal Nentries, check if the n-th quantile of
136 # found peaks along 'slope' axis(yH axis) enloses <n-th quantile> portion of all maxima 'slope' bins
137 # within a specified range. It is advisable that that range is consistent with
138 # the detector angular resolution.
139 maxima = np.argwhere(self.accumulator == np.amax(self.accumulator))
140 if len(maxima) == 1:
141 i_max = maxima[0]
142 elif len(maxima)==0 or (len(maxima) > 1 and self.HoughSpace_format == 'linearIntercepts'):
143 # if no reasonable way to select btw maxima, force track-build failure
144 # to be decided what to do in 'linearIntercepts' case and multiple maxima
145 return(-999, -999)
146 elif len(maxima) > 1 and abs(min([x[1] for x in maxima]) - max([x[1] for x in maxima])) < res:
147 i_max = maxima[0]
148 else:
149 # FIXME: the next two lines can be a single line command for sure
150 maxima_slopesAxis_list = list([x[1] for x in maxima])
151 maxima_slopesAxis_list = np.asarray(maxima_slopesAxis_list)
152 quantile = np.quantile(maxima_slopesAxis_list, self.n_quantile)
153 Nwithin = 0
154 # FIXME: a more elegant 'hidden' loop is maybe possible here too
155 for item in maxima:
156 if abs(item[1]-quantile)< res:
157 Nwithin += 1
158 if Nwithin/len(maxima) > self.n_quantile:
159 i_x = min([x[1] for x in maxima], key=lambda b: abs(b-quantile))
160 for im in maxima:
161 if im[1] == i_x : i_max = im
162 else:
163 # if no reasonable way to select btw maxima, force track-build failure
164 return(-999, -999)
165
166 found_yH = yH_bins[int(i_max[0])]
167 found_xH = xH_bins[int(i_max[1])]
168
169 if self.HoughSpace_format == 'normal':
170 slope = -1./np.tan(found_xH)
171 interceptShift = found_yH/np.sin(found_xH)
172 intercept = (np.tan(found_xH)*interceptShift + self.z_offset)/np.tan(found_xH)
173 elif self.HoughSpace_format == 'linearSlopeIntercept':
174 slope = found_xH
175 intercept = found_yH - slope*self.z_offset
176 elif self.HoughSpace_format == 'linearIntercepts':
177 slope = (found_xH - found_yH)/self.det_Zlen
178 intercept = found_yH - slope*self.z_offset
179
180 return (slope, intercept)
181
182 def fit_randomize(self, hit_collection, hit_d, n_random, is_scaled, draw, weights = None) :
183 success = True
184 if not len(hit_collection) :
185 return (-1, -1, [[],[]], [], False)
186
187 # Randomize hits
188 if (n_random > 0) :
189 random_hit_collection = []
190 for i_random in range(n_random) :
191 random_hits = np.random.uniform(size = hit_collection.shape) - 0.5
192 random_hits *= hit_d
193 random_hits += hit_collection
194 random_hit_collection.append(random_hits)
195
196 random_hit_collection = np.concatenate(random_hit_collection)
197 if weights is not None :
198 weights = np.tile(weights, n_random)
199
200 fit = self.fit(random_hit_collection, is_scaled, draw, weights)
201 else :
202 fit = self.fit(hit_collection, is_scaled, draw, weights)
203
204 return fit
205
206def numPlanesHit(systems, detector_ids) :
207 scifi_stations = []
208 mufi_ds_planes = []
209 mufi_us_planes = []
210
211 scifi_stations.append( detector_ids[systems == 0]//1000000 )
212 mufi_ds_planes.append( (detector_ids[systems == 3]%10000)//1000 )
213 mufi_us_planes.append( (detector_ids[systems == 2]%10000)//1000 )
214
215 return len(np.unique(scifi_stations)) + len(np.unique(mufi_ds_planes)) + len(np.unique(mufi_us_planes))
216
217class MuonReco(ROOT.FairTask) :
218 " Muon reconstruction "
219
220 def Init(self) :
221
222 self.logger = ROOT.FairLogger.GetLogger()
223 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
224 print("Initializing muon reconstruction task!")
225
226 self.lsOfGlobals = ROOT.gROOT.GetListOfGlobals()
227 self.scifiDet = self.lsOfGlobals.FindObject('Scifi')
228 self.mufiDet = self.lsOfGlobals.FindObject('MuFilter')
229 self.ioman = ROOT.FairRootManager.Instance()
230
231 # Pass input data through to output.
232 # self.Passthrough()
233
234 # MC or data - needed for hit timing unit
235 if self.ioman.GetInTree().GetName() == 'rawConv': self.isMC = False
236 else: self.isMC = True
237
238 # Fetch digi hit collections from online if exist
239 sink = self.ioman.GetSink()
240 eventTree = None
241 if sink: eventTree = sink.GetOutTree()
242 if eventTree:
243 self.MuFilterHits = eventTree.Digi_MuFilterHits
244 self.ScifiHits = eventTree.Digi_ScifiHits
245 self.EventHeader = eventTree.EventHeader
246 else:
247 # Try the FairRoot way
248 self.MuFilterHits = self.ioman.GetObject("Digi_MuFilterHits")
249 self.ScifiHits = self.ioman.GetObject("Digi_ScifiHits")
250 self.EventHeader = self.ioman.GetObject("EventHeader")
251
252 # If that doesn't work, try using standard ROOT
253 if self.MuFilterHits == None :
254 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
255 print("Digi_MuFilterHits not in branch list")
256 self.MuFilterHits = self.ioman.GetInTree().Digi_MuFilterHits
257 if self.ScifiHits == None :
258 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
259 print("Digi_ScifiHits not in branch list")
260 self.ScifiHits = self.ioman.GetInTree().Digi_ScifiHits
261 if self.EventHeader == None :
262 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
263 print("EventHeader not in branch list")
264 self.EventHeader = self.ioman.GetInTree().EventHeader
265
266 if self.MuFilterHits == None :
267 raise RuntimeError("Digi_MuFilterHits not found in input file.")
268 if self.ScifiHits == None :
269 raise RuntimeError("Digi_ScifiHits not found in input file.")
270 if self.EventHeader == None :
271 raise RuntimeError("EventHeader not found in input file.")
272
273 # Initialize event counters in case scaling of events is required
274 self.scale = 1
275 self.events_run = 0
276
277 # Initialize hough transform - reading parameter xml file
278 tree = ET.parse(self.par_file)
279 root = tree.getroot()
280
281 # Output track in genfit::Track or sndRecoTrack format
282 # Check if genfit::Track format is already forced
283 if hasattr(self, "genfitTrack"): pass
284 else: self.genfitTrack = int(root[0].text)
285
286 self.draw = int(root[1].text)
287
288 track_case_exists = False
289 for case in root.findall('tracking_case'):
290 if case.get('name') == self.tracking_case:
291 track_case_exists = True
292 # Use SciFi hits or clusters
293 self.Scifi_meas = int(case.find('use_Scifi_clust').text)
294 # Maximum absolute value of reconstructed angle (+/- 1 rad is the maximum angle to form a triplet in the SciFi)
295 max_angle = float(case.find('max_angle').text)
296
297 # Hough space representation
298 Hspace_format_exists = False
299 for rep in case.findall('Hough_space_format'):
300 if rep.get('name') == self.Hough_space_format:
301 Hspace_format_exists = True
302 # Number of bins per Hough accumulator axes and range
303 ''' xH and yH are the abscissa and ordinate of the Hough parameter space
304 xz and yz represent horizontal and vertical projections
305 in the SNDLHC physics coord. system '''
306 n_accumulator_yH = int(rep.find('N_yH_bins').text)
307 yH_min_xz = float(rep.find('yH_min_xz').text)
308 yH_max_xz = float(rep.find('yH_max_xz').text)
309 yH_min_yz = float(rep.find('yH_min_yz').text)
310 yH_max_yz = float(rep.find('yH_max_yz').text)
311 n_accumulator_xH = int(rep.find('N_xH_bins').text)
312 xH_min_xz = float(rep.find('xH_min_xz').text)
313 xH_max_xz = float(rep.find('xH_max_xz').text)
314 xH_min_yz = float(rep.find('xH_min_yz').text)
315 xH_max_yz = float(rep.find('xH_max_yz').text)
316
317 else: continue
318 if not Hspace_format_exists:
319 raise RuntimeError("Unknown Hough space format, check naming in parameter xml file.")
320
321 # A scale factor for a back-up Hough space having more/less bins than the default one
322 # It is useful when fitting some low-E muon tracks, which are curved due to mult. scattering.
323 self.HT_space_scale = 1 if case.find('HT_space_scale')==None else float(case.find('HT_space_scale').text)
324
325 # Number of random throws per hit
326 self.n_random = int(case.find('n_random').text)
327 # MuFilter weight. Muon filter hits are thrown more times than scifi
328 self.muon_weight = int(case.find('mufi_weight').text)
329 # Minimum number of planes hit in each of the downstream muon filter (if muon filter hits used) or scifi (if muon filter hits not used) views to try to reconstruct a muon
330 self.min_planes_hit = int(case.find('min_planes_hit').text)
331
332 # Maximum number of muons to find. To avoid spending too much time on events with lots of downstream activity.
333 self.max_reco_muons = int(case.find('max_reco_muons').text)
334
335 # How far away from Hough line hits will be assigned to the muon, for Kalman tracking
336 self.tolerance = float(case.find('tolerance').text)
337
338 # Which hits to use for track fitting.
339 self.hits_to_fit = case.find('hits_to_fit').text.strip()
340 # Which hits to use for triplet condition.
341 self.hits_for_triplet = case.find('hits_for_hough').text.strip() if case.find('hits_to_validate')==None else case.find('hits_to_validate').text.strip()
342
343 # Detector plane masking. If flag is active, a plane will be masked if its N_hits > Nhits_per_plane.
344 # In any case, plane masking will only be applied if solely Scifi hits are used in HT as it is
345 # a measure against having many maxima in HT space.
346 self.mask_plane = int(case.find('mask_plane').text)
347 self.Nhits_per_plane = int(case.find('Nhits_per_plane').text)
348
349 # Enable Gaussian smoothing over the full accumulator space.
350 self.smooth_full = int(case.find('smooth_full').text)
351 # Gaussian smoothing parameters. The kernel size is determined as 2*int(truncate*sigma+0.5)+1
352 self.sigma = int(case.find('sigma').text)
353 self.truncate = int(case.find('truncate').text)
354 # Helpers to pick up one of many HT space maxima
355 self.n_quantile = float(case.find('n_quantile').text)
356 self.res = int(case.find('res').text)
357
358 else: continue
359 if not track_case_exists:
360 raise RuntimeError("Unknown tracking case, check naming in parameter xml file.")
361
362 # Get sensor dimensions from geometry
363 self.MuFilter_ds_dx = self.mufiDet.GetConfParF("MuFilter/DownstreamBarY") # Assume y dimensions in vertical bars are the same as x dimensions in horizontal bars.
364 self.MuFilter_ds_dy = self.mufiDet.GetConfParF("MuFilter/DownstreamBarY") # Assume y dimensions in vertical bars are the same as x dimensions in horizontal bars.
365 self.MuFilter_ds_dz = self.mufiDet.GetConfParF("MuFilter/DownstreamBarZ")
366
367 self.MuFilter_us_dx = self.mufiDet.GetConfParF("MuFilter/UpstreamBarX")
368 self.MuFilter_us_dy = self.mufiDet.GetConfParF("MuFilter/UpstreamBarY")
369 self.MuFilter_us_dz = self.mufiDet.GetConfParF("MuFilter/UpstreamBarZ")
370
371 self.Scifi_dx = self.scifiDet.GetConfParF("Scifi/channel_width")
372 self.Scifi_dy = self.scifiDet.GetConfParF("Scifi/channel_width")
373 self.Scifi_dz = self.scifiDet.GetConfParF("Scifi/epoxymat_z") # From Scifi.cxx This is the variable used to define the z dimension of SiPM channels, so seems like the right dimension to use.
374
375 # Get number of readout channels
376 self.MuFilter_us_nSiPMs = self.mufiDet.GetConfParI("MuFilter/UpstreamnSiPMs")*self.mufiDet.GetConfParI("MuFilter/UpstreamnSides")
377 self.MuFilter_ds_nSiPMs_hor = self.mufiDet.GetConfParI("MuFilter/DownstreamnSiPMs")*self.mufiDet.GetConfParI("MuFilter/DownstreamnSides")
378 self.MuFilter_ds_nSiPMs_vert = self.mufiDet.GetConfParI("MuFilter/DownstreamnSiPMs")
379
380 self.Scifi_nPlanes = self.scifiDet.GetConfParI("Scifi/nscifi")
381 self.DS_nPlanes = self.mufiDet.GetConfParI("MuFilter/NDownstreamPlanes")
385
386 # get the distance between 1st and last detector planes to be used in the track fit.
387 # a z_offset is used to shift detector hits so to have smaller Hough parameter space
388 # Using geometers measurements! For safety, add a 5-cm-buffer in detector lengths and a 2.5-cm one to z_offset.
389 # This is done to account for possible det. position shifts/mismatches going from geom. measurements and sndsw physics CS.
390 if self.hits_for_triplet.find('sf') >= 0 and self.hits_for_triplet.find('ds') >= 0:
391 det_Zlen = (self.mufiDet.GetConfParF("MuFilter/Muon9Dy") - self.scifiDet.GetConfParF("Scifi/Ypos0"))*unit.cm + 5.0*unit.cm
392 z_offset = self.scifiDet.GetConfParF("Scifi/Ypos0")*unit.cm - 2.5*unit.cm
393 elif self.hits_for_triplet == 'sf':
394 det_Zlen = (self.scifiDet.GetConfParF("Scifi/Ypos4") - self.scifiDet.GetConfParF("Scifi/Ypos0"))*unit.cm + 5.0*unit.cm
395 z_offset = self.scifiDet.GetConfParF("Scifi/Ypos0")*unit.cm - 2.5*unit.cm
396 elif self.hits_for_triplet == 'ds':
397 det_Zlen = (self.mufiDet.GetConfParF("MuFilter/Muon9Dy") - self.mufiDet.GetConfParF("MuFilter/Muon6Dy"))*unit.cm + 5.0*unit.cm
398 z_offset = self.mufiDet.GetConfParF("MuFilter/Muon6Dy")*unit.cm - 2.5*unit.cm
399 # this use case is not tested with an z offset yet
400 if self.tracking_case.find('nu_') >= 0: z_offset = 0*unit.cm
401 #other use cases come here if ever added
402
403 # Initialize Hough transforms for both views:
404 if self.Hough_space_format == 'normal':
405 # rho-theta representation - must exclude theta range for tracks passing < 3 det. planes
406 self.h_ZX = hough(n_accumulator_yH, [yH_min_xz, yH_max_xz], n_accumulator_xH, [-max_angle+np.pi/2., max_angle+np.pi/2.], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
407 self.h_ZY = hough(n_accumulator_yH, [yH_min_yz, yH_max_yz], n_accumulator_xH, [-max_angle+np.pi/2., max_angle+np.pi/2.], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
408 else:
409 self.h_ZX = hough(n_accumulator_yH, [yH_min_xz, yH_max_xz], n_accumulator_xH, [xH_min_xz, xH_max_xz], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
410 self.h_ZY = hough(n_accumulator_yH, [yH_min_yz, yH_max_yz], n_accumulator_xH, [xH_min_yz, xH_max_yz], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
411
412 self.h_ZX.smooth_full = self.smooth_full
413 self.h_ZY.smooth_full = self.smooth_full
414 self.h_ZX.sigma = self.sigma
415 self.h_ZX.truncate = self.truncate
416 self.h_ZY.sigma = self.sigma
417 self.h_ZY.truncate = self.truncate
418
419 self.h_ZX.n_quantile = self.n_quantile
420 self.h_ZX.res = self.res
421 self.h_ZY.n_quantile = self.n_quantile
422 self.h_ZY.res = self.res
423
424 if self.hits_to_fit == "sf" : self.track_type = 11
425 elif self.hits_to_fit == "ds": self.track_type = 13
426 else : self.track_type = 15
427
428 # To keep temporary detector information
429 self.a = ROOT.TVector3()
430 self.b = ROOT.TVector3()
431
432 # check if track container exists
433 if self.ioman.GetObject('Reco_MuonTracks') != None:
434 self.kalman_tracks = self.ioman.GetObject('Reco_MuonTracks')
435 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
436 print('Branch activated by another task!')
437 else:
438 # Now initialize output in genfit::track or sndRecoTrack format
439 if self.genfitTrack:
440 self.kalman_tracks = ROOT.TObjArray(10)
441 if hasattr(self, "standalone") and self.standalone:
442 self.ioman.Register("Reco_MuonTracks", self.ioman.GetFolderName(), self.kalman_tracks, ROOT.kTRUE)
443 else:
444 self.kalman_tracks = ROOT.TClonesArray("sndRecoTrack", 10)
445 if hasattr(self, "standalone") and self.standalone:
446 self.ioman.Register("Reco_MuonTracks", "", self.kalman_tracks, ROOT.kTRUE)
447
448 # initialize detector class with EventHeader(runN), if SNDLHCEventHeader detected
449 # only needed if using HT tracking manager, i.e. standalone
450 if self.EventHeader.IsA().GetName()=='SNDLHCEventHeader' and hasattr(self, "standalone") and self.standalone and not self.isMC :
451 self.scifiDet.InitEvent(self.EventHeader)
452 self.mufiDet.InitEvent(self.EventHeader)
453
454 # internal storage of clusters
455 if self.Scifi_meas: self.clusScifi = ROOT.TObjArray(100)
456
457 # Kalman filter stuff
458
459 geoMat = ROOT.genfit.TGeoMaterialInterface()
460 bfield = ROOT.genfit.ConstField(0, 0, 0)
461 fM = ROOT.genfit.FieldManager.getInstance()
462 fM.init(bfield)
463 ROOT.genfit.MaterialEffects.getInstance().init(geoMat)
464 ROOT.genfit.MaterialEffects.getInstance().setNoEffects()
465
466 self.kalman_fitter = ROOT.genfit.KalmanFitter()
467 self.kalman_fitter.setMaxIterations(50)
471
472 # Init() MUST return int
473 return 0
474
475 def SetScaleFactor(self, scale):
476 self.scale = scale
477
478 def SetParFile(self, file_name):
479 self.par_file = file_name
480
481 def SetTrackingCase(self, case):
482 self.tracking_case = case
483
484 def SetHoughSpaceFormat(self, Hspace_format):
485 self.Hough_space_format = Hspace_format
486
488 self.genfitTrack = 1
489
490 # flag showing the task is run seperately from other tracking tasks
491 def SetStandalone(self):
492 self.standalone = 1
493
494 def Passthrough(self) :
495 T = self.ioman.GetInTree()
496
497 for x in T.GetListOfBranches():
498 obj_name = x.GetName()
499 if self.ioman.GetObject(obj_name) == None :
500 continue
501 self.ioman.Register(obj_name, self.ioman.GetFolderName(), self.ioman.GetObject(obj_name), ROOT.kTRUE)
502
503 def Exec(self, opt) :
504 self.kalman_tracks.Clear('C')
505
506 # Set scaling in case task is run seperately from other tracking tasks
507 if self.scale>1 and self.standalone:
508 if ROOT.gRandom.Rndm() > 1.0/self.scale: return
509
510 self.events_run += 1
511 hit_collection = {"pos" : [[], [], []],
512 "d" : [[], [], []],
513 "vert" : [],
514 "index" : [],
515 "system" : [],
516 "detectorID" : [],
517 "B" : [[], [], []],
518 "time": [],
519 "mask": []}
520
521 if ("us" in self.hits_to_fit) or ("ds" in self.hits_to_fit) or ("ve" in self.hits_to_fit) :
522 # Loop through muon filter hits
523 for i_hit, muFilterHit in enumerate(self.MuFilterHits) :
524 # Don't use veto for fitting
525 if muFilterHit.GetSystem() == 1 :
526 if "ve" not in self.hits_to_fit :
527 continue
528 elif muFilterHit.GetSystem() == 2 :
529 if "us" not in self.hits_to_fit :
530 continue
531 elif muFilterHit.GetSystem() == 3 :
532 if "ds" not in self.hits_to_fit :
533 continue
534 else :
535 if self.logger.IsLogNeeded(ROOT.fair.Severity.warn):
536 print("WARNING! Unknown MuFilter system!!")
537
538 self.mufiDet.GetPosition(muFilterHit.GetDetectorID(), self.a, self.b)
539
540 hit_collection["pos"][0].append(self.a.X())
541 hit_collection["pos"][1].append(self.a.Y())
542 hit_collection["pos"][2].append(self.a.Z())
543
544 hit_collection["B"][0].append(self.b.X())
545 hit_collection["B"][1].append(self.b.Y())
546 hit_collection["B"][2].append(self.b.Z())
547
548 hit_collection["vert"].append(muFilterHit.isVertical())
549 hit_collection["system"].append(muFilterHit.GetSystem())
550
551 hit_collection["d"][0].append(self.MuFilter_ds_dx)
552 hit_collection["d"][2].append(self.MuFilter_ds_dz)
553
554 hit_collection["index"].append(i_hit)
555
556 hit_collection["detectorID"].append(muFilterHit.GetDetectorID())
557 hit_collection["mask"].append(False)
558
559 times = []
560 # Downstream
561 if muFilterHit.GetSystem() == 3 :
562 hit_collection["d"][1].append(self.MuFilter_ds_dx)
563 for ch in range(self.MuFilter_ds_nSiPMs_hor):
564 if muFilterHit.isVertical() and ch==self.MuFilter_ds_nSiPMs_vert: break
565 if self.isMC:
566 times.append(muFilterHit.GetAllTimes()[ch]) #already in ns
567 else: times.append(muFilterHit.GetAllTimes()[ch]*6.25) #tdc2ns
568 # Upstream
569 else :
570 hit_collection["d"][1].append(self.MuFilter_us_dy)
571 for ch in range(self.MuFilter_us_nSiPMs):
572 if self.isMC:
573 times.append(muFilterHit.GetAllTimes()[ch]) #already in ns
574 else: times.append(muFilterHit.GetAllTimes()[ch]*6.25) #tdc2ns
575 hit_collection["time"].append(times)
576
577 if "sf" in self.hits_to_fit :
578 if self.Scifi_meas:
579 # Make scifi clusters
580 self.clusScifi.Clear()
581 self.scifiCluster()
582
583 # Loop through scifi clusters
584 for i_clust, scifiCl in enumerate(self.clusScifi) :
585 scifiCl.GetPosition(self.a,self.b)
586
587 hit_collection["pos"][0].append(self.a.X())
588 hit_collection["pos"][1].append(self.a.Y())
589 hit_collection["pos"][2].append(self.a.Z())
590
591 hit_collection["B"][0].append(self.b.X())
592 hit_collection["B"][1].append(self.b.Y())
593 hit_collection["B"][2].append(self.b.Z())
594
595 # take the cluster size as the active area size
596 hit_collection["d"][0].append(scifiCl.GetN()*self.Scifi_dx)
597 hit_collection["d"][1].append(scifiCl.GetN()*self.Scifi_dy)
598 hit_collection["d"][2].append(self.Scifi_dz)
599
600 if int(scifiCl.GetFirst()/100000)%10==1:
601 hit_collection["vert"].append(True)
602 else: hit_collection["vert"].append(False)
603 hit_collection["index"].append(i_clust)
604
605 hit_collection["system"].append(0)
606 hit_collection["detectorID"].append(scifiCl.GetFirst())
607 hit_collection["mask"].append(False)
608
609 times = []
610 if self.isMC : times.append(scifiCl.GetTime()/6.25) # for MC, hit time is in ns. Then for MC Scifi cluster time one has to divide by tdc2ns
611 else: times.append(scifiCl.GetTime()) # already in ns
612 hit_collection["time"].append(times)
613
614 else:
615 if self.hits_for_triplet == 'sf' and self.hits_to_fit == 'sf':
616 # Loop through scifi hits and count hits per projection and plane
617 N_plane_ZY = {1:0, 2:0, 3:0, 4:0, 5:0}
618 N_plane_ZX = {1:0, 2:0, 3:0, 4:0, 5:0}
619 for scifiHit in self.ScifiHits:
620 if not scifiHit.isValid(): continue
621 if scifiHit.isVertical():
622 N_plane_ZX[scifiHit.GetStation()] += 1
623 else:
624 N_plane_ZY[scifiHit.GetStation()] += 1
625 if self.mask_plane:
626 mask_plane_ZY = []
627 mask_plane_ZX = []
628 # sorting
629 N_plane_ZY = dict(sorted(N_plane_ZY.items(), key=lambda item: item[1], reverse = True))
630 N_plane_ZX = dict(sorted(N_plane_ZX.items(), key=lambda item: item[1], reverse = True))
631 # count planes with hits
632 n_zx = self.Scifi_nPlanes - list(N_plane_ZX.values()).count(0)
633 n_zy = self.Scifi_nPlanes - list(N_plane_ZY.values()).count(0)
634 # check with min number of hit planes
635 if n_zx < self.min_planes_hit or n_zy < self.min_planes_hit: return
636 # mask busiest planes until there are at least 3 planes with hits left
637 for ii in range(n_zx-self.min_planes_hit):
638 if list(N_plane_ZX.values())[ii] > self.Nhits_per_plane:
639 mask_plane_ZX.append(list(N_plane_ZX.keys())[ii])
640 for ii in range(n_zy-self.min_planes_hit):
641 if list(N_plane_ZY.values())[ii] > self.Nhits_per_plane:
642 mask_plane_ZY.append(list(N_plane_ZY.keys())[ii])
643
644 # Loop through scifi hits
645 for i_hit, scifiHit in enumerate(self.ScifiHits) :
646 if not scifiHit.isValid(): continue
647 self.scifiDet.GetSiPMPosition(scifiHit.GetDetectorID(), self.a, self.b)
648 hit_collection["pos"][0].append(self.a.X())
649 hit_collection["pos"][1].append(self.a.Y())
650 hit_collection["pos"][2].append(self.a.Z())
651
652 hit_collection["B"][0].append(self.b.X())
653 hit_collection["B"][1].append(self.b.Y())
654 hit_collection["B"][2].append(self.b.Z())
655
656 hit_collection["d"][0].append(self.Scifi_dx)
657 hit_collection["d"][1].append(self.Scifi_dy)
658 hit_collection["d"][2].append(self.Scifi_dz)
659
660 hit_collection["vert"].append(scifiHit.isVertical())
661 hit_collection["index"].append(i_hit)
662
663 hit_collection["system"].append(0)
664
665 hit_collection["detectorID"].append(scifiHit.GetDetectorID())
666
667 if self.hits_for_triplet == 'sf' and self.hits_to_fit == 'sf' and self.mask_plane:
668 if (scifiHit.isVertical()==0 and scifiHit.GetStation() in mask_plane_ZY) or (scifiHit.isVertical() and scifiHit.GetStation() in mask_plane_ZX):
669 hit_collection["mask"].append(True)
670 else: hit_collection["mask"].append(False)
671 else:
672 hit_collection["mask"].append(False)
673
674 times = []
675
676 if self.isMC : times.append(scifiHit.GetTime()) # already in ns
677 else: times.append(scifiHit.GetTime()*6.25) #tdc2ns
678 hit_collection["time"].append(times)
679
680 # If no hits, return
681 if len(hit_collection['pos'][0])==0: return
682
683 # Make the hit collection numpy arrays.
684 for key, item in hit_collection.items() :
685 if key == 'vert' :
686 this_dtype = np.bool_
687 elif key == 'mask' :
688 this_dtype = np.bool_
689 elif key == "index" or key == "system" or key == "detectorID" :
690 this_dtype = np.int32
691 elif key != 'time' :
692 this_dtype = np.float32
693 if key== 'time':
694 length = max(map(len, item))
695 hit_collection[key] = np.stack(np.array([xi+[None]*(length-len(xi)) for xi in item]), axis = 1)
696 else: hit_collection[key] = np.array(item, dtype = this_dtype)
697
698 # Useful for later
699 triplet_condition_system = []
700 if "sf" in self.hits_for_triplet :
701 triplet_condition_system.append(0)
702 if "ve" in self.hits_for_triplet :
703 triplet_condition_system.append(1)
704 if "us" in self.hits_for_triplet :
705 triplet_condition_system.append(2)
706 if "ds" in self.hits_for_triplet :
707 triplet_condition_system.append(3)
708
709 # Reconstruct muons until there are not enough hits in downstream muon filter
710 for i_muon in range(self.max_reco_muons) :
711
712 triplet_hits_horizontal = np.logical_and( ~hit_collection["vert"],
713 np.isin(hit_collection["system"], triplet_condition_system) )
714 triplet_hits_vertical = np.logical_and( hit_collection["vert"],
715 np.isin(hit_collection["system"], triplet_condition_system) )
716
717 n_planes_ZY = numPlanesHit(hit_collection["system"][triplet_hits_horizontal],
718 hit_collection["detectorID"][triplet_hits_horizontal])
719 if n_planes_ZY < self.min_planes_hit :
720 break
721
722 n_planes_ZX = numPlanesHit(hit_collection["system"][triplet_hits_vertical],
723 hit_collection["detectorID"][triplet_hits_vertical])
724 if n_planes_ZX < self.min_planes_hit :
725 break
726
727 # Get hits in hough transform format
728 muon_hits_horizontal = np.logical_and( np.logical_and( ~hit_collection["vert"], ~hit_collection["mask"]),
729 np.isin(hit_collection["system"], [1, 2, 3]))
730 muon_hits_vertical = np.logical_and( np.logical_and( hit_collection["vert"], ~hit_collection["mask"]),
731 np.isin(hit_collection["system"], [1, 2, 3]))
732 scifi_hits_horizontal = np.logical_and( np.logical_and( ~hit_collection["vert"], ~hit_collection["mask"]),
733 np.isin(hit_collection["system"], [0]))
734 scifi_hits_vertical = np.logical_and( np.logical_and( hit_collection["vert"], ~hit_collection["mask"]),
735 np.isin(hit_collection["system"], [0]))
736
737
738 ZY = np.dstack([np.concatenate([np.tile(hit_collection["pos"][2][muon_hits_horizontal], self.muon_weight),
739 hit_collection["pos"][2][scifi_hits_horizontal]]),
740 np.concatenate([np.tile(hit_collection["pos"][1][muon_hits_horizontal], self.muon_weight),
741 hit_collection["pos"][1][scifi_hits_horizontal]])])[0]
742
743 d_ZY = np.dstack([np.concatenate([np.tile(hit_collection["d"][2][muon_hits_horizontal], self.muon_weight),
744 hit_collection["d"][2][scifi_hits_horizontal]]),
745 np.concatenate([np.tile(hit_collection["d"][1][muon_hits_horizontal], self.muon_weight),
746 hit_collection["d"][1][scifi_hits_horizontal]])])[0]
747
748 ZX = np.dstack([np.concatenate([np.tile(hit_collection["pos"][2][muon_hits_vertical], self.muon_weight),
749 hit_collection["pos"][2][scifi_hits_vertical]]),
750 np.concatenate([np.tile(hit_collection["pos"][0][muon_hits_vertical], self.muon_weight),
751 hit_collection["pos"][0][scifi_hits_vertical]])])[0]
752
753 d_ZX = np.dstack([np.concatenate([np.tile(hit_collection["d"][2][muon_hits_vertical], self.muon_weight),
754 hit_collection["d"][2][scifi_hits_vertical]]),
755 np.concatenate([np.tile(hit_collection["d"][0][muon_hits_vertical], self.muon_weight),
756 hit_collection["d"][0][scifi_hits_vertical]])])[0]
757
758 is_scaled = False
759 ZY_hough = self.h_ZY.fit_randomize(ZY, d_ZY, self.n_random, is_scaled, self.draw)
760 ZX_hough = self.h_ZX.fit_randomize(ZX, d_ZX, self.n_random, is_scaled, self.draw)
761
762 tol = self.tolerance
763 # Special treatment for events with low hit occupancy - increase tolerance
764 # For Scifi-only tracks
765 if len(hit_collection["detectorID"]) <= self.max_n_Scifi_hits and self.hits_for_triplet == 'sf' and self.hits_to_fit == 'sf' :
766 # as there are masked Scifi planes, make sure to use hit counts before the masking
767 if max(N_plane_ZX.values()) <= self.max_n_hits_plane and max(N_plane_ZY.values()) <= self.max_n_hits_plane :
768 tol = 5*self.tolerance
769 # for DS-only tracks#
770 if self.hits_for_triplet == 'ds' and self.hits_to_fit == 'ds' :
771 # Loop through hits and count hits per projection and plane
772 N_plane_ZY = {0:0, 1:0, 2:0, 3:0}
773 N_plane_ZX = {0:0, 1:0, 2:0, 3:0}
774 for item in range(len(hit_collection["detectorID"])):
775 if hit_collection["vert"][item]: N_plane_ZX[(hit_collection["detectorID"][item]%10000)//1000] += 1
776 else: N_plane_ZY[(hit_collection["detectorID"][item]%10000)//1000] += 1
777 if max(N_plane_ZX.values()) <= self.max_n_hits_plane and max(N_plane_ZY.values()) <= self.max_n_hits_plane :
778 tol = 3*self.tolerance
779
780 # Check if track intersects minimum number of hits in each plane.
781 track_hits_for_triplet_ZY = hit_finder(ZY_hough[0], ZY_hough[1],
782 np.dstack([hit_collection["pos"][2][triplet_hits_horizontal],
783 hit_collection["pos"][1][triplet_hits_horizontal]]),
784 np.dstack([hit_collection["d"][2][triplet_hits_horizontal],
785 hit_collection["d"][1][triplet_hits_horizontal]]), tol)
786
787 track_hits_for_triplet_ZX = hit_finder(ZX_hough[0], ZX_hough[1],
788 np.dstack([hit_collection["pos"][2][triplet_hits_vertical],
789 hit_collection["pos"][0][triplet_hits_vertical]]),
790 np.dstack([hit_collection["d"][2][triplet_hits_vertical],
791 hit_collection["d"][0][triplet_hits_vertical]]), tol)
792
793 n_planes_hit_ZY = numPlanesHit(hit_collection["system"][triplet_hits_horizontal][track_hits_for_triplet_ZY],
794 hit_collection["detectorID"][triplet_hits_horizontal][track_hits_for_triplet_ZY])
795
796 n_planes_hit_ZX = numPlanesHit(hit_collection["system"][triplet_hits_vertical][track_hits_for_triplet_ZX],
797 hit_collection["detectorID"][triplet_hits_vertical][track_hits_for_triplet_ZX])
798
799 # For failed SciFi track fits, in events with little hit activity, try using less Hough-space bins
800 if (self.hits_to_fit == 'sf' and len(hit_collection["detectorID"]) <= self.max_n_Scifi_hits and \
801 (n_planes_hit_ZY < self.min_planes_hit or n_planes_hit_ZX < self.min_planes_hit)):
802
803 is_scaled = True
804 ZY_hough = self.h_ZY.fit_randomize(ZY, d_ZY, self.n_random, is_scaled, self.draw)
805 ZX_hough = self.h_ZX.fit_randomize(ZX, d_ZX, self.n_random, is_scaled, self.draw)
806
807 # Check if track intersects minimum number of hits in each plane.
808 track_hits_for_triplet_ZY = hit_finder(ZY_hough[0], ZY_hough[1],
809 np.dstack([hit_collection["pos"][2][triplet_hits_horizontal],
810 hit_collection["pos"][1][triplet_hits_horizontal]]),
811 np.dstack([hit_collection["d"][2][triplet_hits_horizontal],
812 hit_collection["d"][1][triplet_hits_horizontal]]), tol)
813
814 n_planes_hit_ZY = numPlanesHit(hit_collection["system"][triplet_hits_horizontal][track_hits_for_triplet_ZY],
815 hit_collection["detectorID"][triplet_hits_horizontal][track_hits_for_triplet_ZY])
816 if n_planes_hit_ZY < self.min_planes_hit:
817 break
818
819 track_hits_for_triplet_ZX = hit_finder(ZX_hough[0], ZX_hough[1],
820 np.dstack([hit_collection["pos"][2][triplet_hits_vertical],
821 hit_collection["pos"][0][triplet_hits_vertical]]),
822 np.dstack([hit_collection["d"][2][triplet_hits_vertical],
823 hit_collection["d"][0][triplet_hits_vertical]]), tol)
824 n_planes_hit_ZX = numPlanesHit(hit_collection["system"][triplet_hits_vertical][track_hits_for_triplet_ZX],
825 hit_collection["detectorID"][triplet_hits_vertical][track_hits_for_triplet_ZX])
826
827 if n_planes_hit_ZY < self.min_planes_hit or n_planes_hit_ZX < self.min_planes_hit:
828 break
829
830# print("Found {0} downstream ZX planes associated to muon track".format(n_planes_ds_ZX))
831# print("Found {0} downstream ZY planes associated to muon track".format(n_planes_ds_ZY))
832
833 # This time with all the hits, not just triplet condition.
834 track_hits_ZY = hit_finder(ZY_hough[0], ZY_hough[1],
835 np.dstack([hit_collection["pos"][2][~hit_collection["vert"]],
836 hit_collection["pos"][1][~hit_collection["vert"]]]),
837 np.dstack([hit_collection["d"][2][~hit_collection["vert"]],
838 hit_collection["d"][1][~hit_collection["vert"]]]), tol)
839
840 track_hits_ZX = hit_finder(ZX_hough[0], ZX_hough[1],
841 np.dstack([hit_collection["pos"][2][hit_collection["vert"]],
842 hit_collection["pos"][0][hit_collection["vert"]]]),
843 np.dstack([hit_collection["d"][2][hit_collection["vert"]],
844 hit_collection["d"][0][hit_collection["vert"]]]), tol)
845 # Onto Kalman fitter (based on SndlhcTracking.py)
846 posM = ROOT.TVector3(0, 0, 0.)
847 momM = ROOT.TVector3(0,0,100.) # default track with high momentum
848
849 # approximate covariance
850 covM = ROOT.TMatrixDSym(6)
851 if self.hits_to_fit.find('sf') >= 0:
853 if self.hits_to_fit == 'ds':
855 for i in range(3): covM[i][i] = res*res
856 for i in range(3,6): covM[i][i] = ROOT.TMath.Power(res / (4.*2.) / ROOT.TMath.Sqrt(3), 2)
857 rep = ROOT.genfit.RKTrackRep(13)
858
859 # start state
860 state = ROOT.genfit.MeasuredStateOnPlane(rep)
861 rep.setPosMomCov(state, posM, momM, covM)
862
863 # create track
864 seedState = ROOT.TVectorD(6)
865 seedCov = ROOT.TMatrixDSym(6)
866 rep.get6DStateCov(state, seedState, seedCov)
867
868 theTrack = ROOT.genfit.Track(rep, seedState, seedCov)
869
870 # Sort measurements in Z
871 hit_z = np.concatenate([hit_collection["pos"][2][hit_collection["vert"]][track_hits_ZX],
872 hit_collection["pos"][2][~hit_collection["vert"]][track_hits_ZY]])
873
874 hit_A0 = np.concatenate([hit_collection["pos"][0][hit_collection["vert"]][track_hits_ZX],
875 hit_collection["pos"][0][~hit_collection["vert"]][track_hits_ZY]])
876
877 hit_A1 = np.concatenate([hit_collection["pos"][1][hit_collection["vert"]][track_hits_ZX],
878 hit_collection["pos"][1][~hit_collection["vert"]][track_hits_ZY]])
879
880 hit_B0 = np.concatenate([hit_collection["B"][0][hit_collection["vert"]][track_hits_ZX],
881 hit_collection["B"][0][~hit_collection["vert"]][track_hits_ZY]])
882
883 hit_B1 = np.concatenate([hit_collection["B"][1][hit_collection["vert"]][track_hits_ZX],
884 hit_collection["B"][1][~hit_collection["vert"]][track_hits_ZY]])
885
886 hit_B2 = np.concatenate([hit_collection["B"][2][hit_collection["vert"]][track_hits_ZX],
887 hit_collection["B"][2][~hit_collection["vert"]][track_hits_ZY]])
888
889 hit_detid = np.concatenate([hit_collection["detectorID"][hit_collection["vert"]][track_hits_ZX],
890 hit_collection["detectorID"][~hit_collection["vert"]][track_hits_ZY]])
891
892 kalman_spatial_sigma = np.concatenate([hit_collection["d"][0][hit_collection["vert"]][track_hits_ZX] / 12**0.5,
893 hit_collection["d"][1][~hit_collection["vert"]][track_hits_ZY] / 12**0.5])
894
895 # Maximum distance. Use (d_xy/2**2 + d_z/2**2)**0.5
896 kalman_max_dis = np.concatenate([((hit_collection["d"][0][hit_collection["vert"]][track_hits_ZX]/2.)**2 +
897 (hit_collection["d"][2][hit_collection["vert"]][track_hits_ZX]/2.)**2)**0.5,
898 ((hit_collection["d"][1][~hit_collection["vert"]][track_hits_ZY]/2.)**2 +
899 (hit_collection["d"][2][~hit_collection["vert"]][track_hits_ZY]/2.)**2)**0.5])
900
901 hitID = 0 # Does it matter? We don't have a global hit ID.
902
903 hit_time = {}
904 for ch in range(hit_collection["time"].shape[0]):
905 hit_time[ch] = np.concatenate([hit_collection["time"][ch][hit_collection["vert"]][track_hits_ZX],
906 hit_collection["time"][ch][~hit_collection["vert"]][track_hits_ZY]])
907
908 for i_z_sorted in hit_z.argsort() :
909 tp = ROOT.genfit.TrackPoint()
910 hitCov = ROOT.TMatrixDSym(7)
911 hitCov[6][6] = kalman_spatial_sigma[i_z_sorted]**2
912
913 measurement = ROOT.genfit.WireMeasurement(ROOT.TVectorD(7, array('d', [hit_A0[i_z_sorted],
914 hit_A1[i_z_sorted],
915 hit_z[i_z_sorted],
916 hit_B0[i_z_sorted],
917 hit_B1[i_z_sorted],
918 hit_B2[i_z_sorted],
919 0.])),
920 hitCov,
921 1, # detid?
922 6, # hitid?
923 tp)
924
925 measurement.setMaxDistance(kalman_max_dis[i_z_sorted])
926 measurement.setDetId(int(hit_detid[i_z_sorted]))
927 measurement.setHitId(int(hitID))
928 hitID += 1
929 tp.addRawMeasurement(measurement)
930 theTrack.insertPoint(tp)
931
932 if not theTrack.checkConsistency():
933 theTrack.Delete()
934 raise RuntimeError("Kalman fitter track consistency check failed.")
935
936 # do the fit
937 self.kalman_fitter.processTrack(theTrack) # processTrackWithRep(theTrack,rep,True)
938
939 fitStatus = theTrack.getFitStatus()
940 if not fitStatus.isFitConverged() and 0>1:
941 theTrack.Delete()
942 raise RuntimeError("Kalman fit did not converge.")
943
944 # Now save the track if fit converged!
945 theTrack.SetUniqueID(self.track_type)
946 if fitStatus.isFitConverged():
947 if self.genfitTrack: self.kalman_tracks.Add(theTrack)
948 else :
949 # Load items into snd track class object
950 this_track = ROOT.sndRecoTrack(theTrack)
951 pointTimes = ROOT.std.vector(ROOT.std.vector('float'))()
952 for n, i_z_sorted in enumerate(hit_z.argsort()):
953 t_per_hit = []
954 for ch in range(len(hit_time)):
955 if hit_time[ch][i_z_sorted] != None:
956 t_per_hit.append(hit_time[ch][i_z_sorted])
957 pointTimes.push_back(t_per_hit)
958 this_track.setRawMeasTimes(pointTimes)
959 this_track.setTrackType(self.track_type)
960 # Save the track in sndRecoTrack format
961 self.kalman_tracks[i_muon] = this_track
962 # Delete the Kalman track object
963 theTrack.Delete()
964
965 # Remove track hits and try to find an additional track
966 # Find array index to be removed
967 index_to_remove_ZX = np.where(np.in1d(hit_collection["detectorID"], hit_collection["detectorID"][hit_collection["vert"]][track_hits_ZX]))[0]
968 index_to_remove_ZY = np.where(np.in1d(hit_collection["detectorID"], hit_collection["detectorID"][~hit_collection["vert"]][track_hits_ZY]))[0]
969
970 index_to_remove = np.concatenate([index_to_remove_ZX, index_to_remove_ZY])
971
972 # Remove dictionary entries
973 for key in hit_collection.keys() :
974 if len(hit_collection[key].shape) == 1 :
975 hit_collection[key] = np.delete(hit_collection[key], index_to_remove)
976 elif len(hit_collection[key].shape) == 2 :
977 hit_collection[key] = np.delete(hit_collection[key], index_to_remove, axis = 1)
978 else :
979 raise Exception("Wrong number of dimensions found when deleting hits in iterative muon identification algorithm.")
980
981 def FinishTask(self) :
982 print("Processed" ,self.events_run)
983 if not self.genfitTrack : self.kalman_tracks.Delete()
984 else : pass
985
986 # this is a copy on SndlhcTracking function with small adjustments in event object names to make it work here
987 # FIXME Should find a way to use this function straight from the SndlhcTracking!
988 def scifiCluster(self):
989 clusters = []
990 hitDict = {}
991 for k in range(self.ScifiHits.GetEntries()):
992 d = self.ScifiHits[k]
993 if not d.isValid(): continue
994 hitDict[d.GetDetectorID()] = k
995 hitList = list(hitDict.keys())
996 if len(hitList)>0:
997 hitList.sort()
998 tmp = [ hitList[0] ]
999 cprev = hitList[0]
1000 ncl = 0
1001 last = len(hitList)-1
1002 hitvector = ROOT.std.vector("sndScifiHit*")()
1003 for i in range(len(hitList)):
1004 if i==0 and len(hitList)>1: continue
1005 c=hitList[i]
1006 neighbour = False
1007 if (c-cprev)==1: # does not account for neighbours across sipms
1008 neighbour = True
1009 tmp.append(c)
1010 if not neighbour or c==hitList[last]:
1011 first = tmp[0]
1012 N = len(tmp)
1013 hitvector.clear()
1014 for aHit in tmp: hitvector.push_back( self.ScifiHits[hitDict[aHit]])
1015 aCluster = ROOT.sndCluster(first,N,hitvector,self.scifiDet,False)
1016 clusters.append(aCluster)
1017 if c!=hitList[last]:
1018 ncl+=1
1019 tmp = [c]
1020 elif not neighbour : # save last channel
1021 hitvector.clear()
1022 hitvector.push_back(self.ScifiHits[hitDict[c]])
1023 aCluster = ROOT.sndCluster(c,1,hitvector,self.scifiDet,False)
1024 clusters.append(aCluster)
1025 cprev = c
1026 self.clusScifi.Delete()
1027 for c in clusters:
1028 self.clusScifi.Add(c)
SetParFile(self, file_name)
SetHoughSpaceFormat(self, Hspace_format)
fit_randomize(self, hit_collection, hit_d, n_random, is_scaled, draw, weights=None)
fit(self, hit_collection, is_scaled, draw, weights=None)
__init__(self, n_yH, yH_range, n_xH, xH_range, z_offset, Hformat, space_scale, det_Zlen, squaretheta=False, smooth=True)
hit_finder(slope, intercept, box_centers, box_ds, tol=0.)
numPlanesHit(systems, detector_ids)