SND@LHC Software
Loading...
Searching...
No Matches
SndlhcMuonReco.py
Go to the documentation of this file.
1import ROOT
2import numpy as np
3import scipy.ndimage
4from array import array
5import xml.etree.ElementTree as ET
6import matplotlib.pyplot as plt
7import shipunit as unit
8
9def hit_finder(slope, intercept, box_centers, box_ds, tol = 0.) :
10 """ Finds hits intersected by Hough line """
11
12 # First check if track at center of box is within box limits
13 d = np.abs(box_centers[0,:,1] - (box_centers[0,:,0]*slope + intercept))
14 center_in_box = d < (box_ds[0,:,1]+tol)/2.
15
16 # Now check if, assuming line is not in box at box center, the slope is large enough for line to clip the box at corner
17 clips_corner = np.abs(slope) > np.abs((d - (box_ds[0,:,1]+tol)/2.)/(box_ds[0,:,0]+tol)/2.)
18
19 # If either of these is true, line goes through hit:
20 hit_mask = np.logical_or(center_in_box, clips_corner)
21
22 # Return indices
23 return np.where(hit_mask)[0]
24
25class hough() :
26 """ Hough transform implementation """
27
28 def __init__(self, n_yH, yH_range, n_xH, xH_range, z_offset, Hformat, space_scale, det_Zlen, squaretheta = False, smooth = True) :
29
30 self.n_yH = n_yH
31 self.n_xH = n_xH
32
33 self.yH_range = yH_range
34 self.xH_range = xH_range
35
36 self.z_offset = z_offset
37 self.HoughSpace_format = Hformat
38 self.space_scale = space_scale
39
40 self.det_Zlen = det_Zlen
41
42 self.smooth = smooth
43
44 self.yH_bins = np.linspace(self.yH_range[0], self.yH_range[1], n_yH)
45 if not squaretheta :
46 self.xH_bins = np.linspace(self.xH_range[0], self.xH_range[1], n_xH)
47 else :
48 self.xH_bins = np.linspace(np.sign(self.xH_range[0])*(self.xH_range[0]**0.5), np.sign(self.xH_range[1])*(self.xH_range[1]**0.5), n_xH)
49 self.xH_bins = np.sign(self.xH_bins)*np.square(self.xH_bins)
50
51 self.cos_thetas = np.cos(self.xH_bins)
52 self.sin_thetas = np.sin(self.xH_bins)
53
54 self.xH_i = np.array(list(range(n_xH)))
55
56 # A back-up Hough space designed to have more/less bins wrt the default one above.
57 # It is useful when fitting some low-E muon tracks, which are curved due to mult. scattering.
58 self.n_yH_scaled = int(n_yH*space_scale)
59 self.n_xH_scaled = int(n_xH*space_scale)
60 self.yH_bins_scaled = np.linspace(self.yH_range[0], self.yH_range[1], self.n_yH_scaled)
61 if not squaretheta :
62 self.xH_bins_scaled= np.linspace(self.xH_range[0], self.xH_range[1], self.n_xH_scaled)
63 else :
64 self.xH_bins_scaled = np.linspace(np.sign(self.xH_range[0])*(self.xH_range[0]**0.5), np.sign(self.xH_range[1])*(self.xH_range[1]**0.5), self.n_xH_scaled)
65 self.xH_bins_scaled = np.sign(self.xH_bins_scaled)*np.square(self.xH_bins_scaled)
66
69
70 self.xH_i_scaled = np.array(list(range(self.n_xH_scaled)))
71
72 def fit(self, hit_collection, is_scaled, draw, weights = None) :
73
74 if not is_scaled:
75 n_xH = self.n_xH
76 n_yH = self.n_yH
77 cos_thetas = self.cos_thetas
78 sin_thetas = self.sin_thetas
79 xH_bins = self.xH_bins
80 yH_bins = self.yH_bins
81 xH_i = self.xH_i
82 res = self.res
83 else:
84 n_xH = self.n_xH_scaled
85 n_yH = self.n_yH_scaled
86 cos_thetas = self.cos_thetas_scaled
87 sin_thetas = self.sin_thetas_scaled
88 xH_bins = self.xH_bins_scaled
89 yH_bins = self.yH_bins_scaled
90 xH_i = self.xH_i_scaled
91 res = self.res*self.space_scale
92
93 self.accumulator = np.zeros((n_yH, n_xH))
94 for i_hit, hit in enumerate(hit_collection) :
95 shifted_hitZ = hit[0] - self.z_offset
96 if self.HoughSpace_format == 'normal':
97 hit_yH = shifted_hitZ*cos_thetas + hit[1]*sin_thetas
98 elif self.HoughSpace_format == 'linearSlopeIntercept':
99 hit_yH = hit[1] - shifted_hitZ*xH_bins
100 elif self.HoughSpace_format== 'linearIntercepts':
101 hit_yH = (self.det_Zlen*hit[1] - shifted_hitZ*xH_bins)/(self.det_Zlen - shifted_hitZ)
102 out_of_range = np.logical_and(hit_yH > self.yH_range[0], hit_yH < self.yH_range[1])
103 hit_yH_i = np.floor((hit_yH[out_of_range] - self.yH_range[0])/(self.yH_range[1] - self.yH_range[0])*n_yH).astype(np.int_)
104
105 if weights is not None :
106 self.accumulator[hit_yH_i, xH_i[out_of_range]] += weights[i_hit]
107 else :
108 self.accumulator[hit_yH_i, xH_i[out_of_range]] += 1
109
110 # Smooth accumulator
111 if self.smooth_full :
112 self.accumulator = scipy.ndimage.gaussian_filter(self.accumulator, self.sigma, truncate=self.truncate)
113
114 # This might be useful for debugging, but leave out for now.
115 if draw :
116 plt.figure()
117 plt.imshow(self.accumulator, origin = "lower", extent = [self.xH_range[0], self.xH_range[-1], self.yH_range[0], self.yH_range[-1]], aspect = "auto")
118 if self.HoughSpace_format == 'normal':
119 plt.xlabel(r"$\theta$ [rad]")
120 plt.ylabel("r [cm]")
121 elif self.HoughSpace_format == 'linearSlopeIntercept':
122 plt.xlabel("slope")
123 plt.ylabel("intercept @ 1st plane [cm]")
124 elif self.HoughSpace_format == 'linearIntercepts':
125 plt.xlabel("intercept @ last plane [cm]")
126 plt.ylabel("intercept @ 1st plane [cm]")
127 plt.tight_layout()
128 plt.show()
129
130 if self.smooth_full:
131 # In case of multiple occurrences of the maximum values, argmax returns
132 # the indices corresponding to the first occurrence(along 1st axis).
133 i_max = np.unravel_index(self.accumulator.argmax(), self.accumulator.shape)
134 else:
135 # In case there are more than 1 bins with the maximal Nentries, check if the n-th quantile of
136 # found peaks along 'slope' axis(yH axis) enloses <n-th quantile> portion of all maxima 'slope' bins
137 # within a specified range. It is advisable that that range is consistent with
138 # the detector angular resolution.
139 maxima = np.argwhere(self.accumulator == np.amax(self.accumulator))
140 if len(maxima) == 1:
141 i_max = maxima[0]
142 elif len(maxima)==0 or (len(maxima) > 1 and self.HoughSpace_format == 'linearIntercepts'):
143 # if no reasonable way to select btw maxima, force track-build failure
144 # to be decided what to do in 'linearIntercepts' case and multiple maxima
145 return(-999, -999)
146 elif len(maxima) > 1 and abs(min([x[1] for x in maxima]) - max([x[1] for x in maxima])) < res:
147 i_max = maxima[0]
148 else:
149 # FIXME: the next two lines can be a single line command for sure
150 maxima_slopesAxis_list = list([x[1] for x in maxima])
151 maxima_slopesAxis_list = np.asarray(maxima_slopesAxis_list)
152 quantile = np.quantile(maxima_slopesAxis_list, self.n_quantile)
153 Nwithin = 0
154 # FIXME: a more elegant 'hidden' loop is maybe possible here too
155 for item in maxima:
156 if abs(item[1]-quantile)< res:
157 Nwithin += 1
158 if Nwithin/len(maxima) > self.n_quantile:
159 i_x = min([x[1] for x in maxima], key=lambda b: abs(b-quantile))
160 for im in maxima:
161 if im[1] == i_x : i_max = im
162 else:
163 # if no reasonable way to select btw maxima, force track-build failure
164 return(-999, -999)
165
166 found_yH = yH_bins[int(i_max[0])]
167 found_xH = xH_bins[int(i_max[1])]
168
169 if self.HoughSpace_format == 'normal':
170 slope = -1./np.tan(found_xH)
171 interceptShift = found_yH/np.sin(found_xH)
172 intercept = (np.tan(found_xH)*interceptShift + self.z_offset)/np.tan(found_xH)
173 elif self.HoughSpace_format == 'linearSlopeIntercept':
174 slope = found_xH
175 intercept = found_yH - slope*self.z_offset
176 elif self.HoughSpace_format == 'linearIntercepts':
177 slope = (found_xH - found_yH)/self.det_Zlen
178 intercept = found_yH - slope*self.z_offset
179
180 return (slope, intercept)
181
182 def fit_randomize(self, hit_collection, hit_d, n_random, is_scaled, draw, weights = None) :
183 success = True
184 if not len(hit_collection) :
185 return (-1, -1, [[],[]], [], False)
186
187 # Randomize hits
188 if (n_random > 0) :
189 random_hit_collection = []
190 for i_random in range(n_random) :
191 random_hits = np.random.uniform(size = hit_collection.shape) - 0.5
192 random_hits *= hit_d
193 random_hits += hit_collection
194 random_hit_collection.append(random_hits)
195
196 random_hit_collection = np.concatenate(random_hit_collection)
197 if weights is not None :
198 weights = np.tile(weights, n_random)
199
200 fit = self.fit(random_hit_collection, is_scaled, draw, weights)
201 else :
202 fit = self.fit(hit_collection, is_scaled, draw, weights)
203
204 return fit
205
206def numPlanesHit(systems, detector_ids) :
207 scifi_stations = []
208 mufi_ds_planes = []
209 mufi_us_planes = []
210
211 scifi_stations.append( detector_ids[systems == 0]//1000000 )
212 mufi_ds_planes.append( (detector_ids[systems == 3]%10000)//1000 )
213 mufi_us_planes.append( (detector_ids[systems == 2]%10000)//1000 )
214
215 return len(np.unique(scifi_stations)) + len(np.unique(mufi_ds_planes)) + len(np.unique(mufi_us_planes))
216
217class MuonReco(ROOT.FairTask) :
218 " Muon reconstruction "
219
220 def Init(self) :
221
222 self.logger = ROOT.FairLogger.GetLogger()
223 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
224 print("Initializing muon reconstruction task!")
225
226 self.lsOfGlobals = ROOT.gROOT.GetListOfGlobals()
227 self.scifiDet = self.lsOfGlobals.FindObject('Scifi')
228 self.mufiDet = self.lsOfGlobals.FindObject('MuFilter')
229 self.ioman = ROOT.FairRootManager.Instance()
230
231 # Pass input data through to output.
232 # self.Passthrough()
233
234 # MC or data - needed for hit timing unit
235 if self.ioman.GetInTree().GetName() == 'rawConv': self.isMC = False
236 else: self.isMC = True
237
238 # Fetch digi hit collections from online if exist
239 sink = self.ioman.GetSink()
240 eventTree = None
241 if sink: eventTree = sink.GetOutTree()
242 if eventTree:
243 self.MuFilterHits = eventTree.Digi_MuFilterHits
244 self.ScifiHits = eventTree.Digi_ScifiHits
245 self.EventHeader = eventTree.EventHeader
246 else:
247 # Use standard ROOT to access branches and not the FairRoot way
248 # of getting objects from the i/o manager. The latter was initially
249 # broken in v19 and fixed in a patch release.
250 self.MuFilterHits = self.ioman.GetInTree().Digi_MuFilterHits
251 self.ScifiHits = self.ioman.GetInTree().Digi_ScifiHits
252 self.EventHeader = self.ioman.GetInTree().EventHeader
253
254 if self.MuFilterHits == None :
255 raise RuntimeError("Digi_MuFilterHits not found in input file.")
256 if self.ScifiHits == None :
257 raise RuntimeError("Digi_ScifiHits not found in input file.")
258 if self.EventHeader == None :
259 raise RuntimeError("EventHeader not found in input file.")
260
261 # Initialize event counters in case scaling of events is required
262 self.scale = 1
263 self.events_run = 0
264
265 # Initialize hough transform - reading parameter xml file
266 tree = ET.parse(self.par_file)
267 root = tree.getroot()
268
269 # Output track in genfit::Track or sndRecoTrack format
270 # Check if genfit::Track format is already forced
271 if hasattr(self, "genfitTrack"): pass
272 else: self.genfitTrack = int(root[0].text)
273
274 self.draw = int(root[1].text)
275
276 track_case_exists = False
277 for case in root.findall('tracking_case'):
278 if case.get('name') == self.tracking_case:
279 track_case_exists = True
280 # Use SciFi hits or clusters
281 self.Scifi_meas = int(case.find('use_Scifi_clust').text)
282 # Maximum absolute value of reconstructed angle (+/- 1 rad is the maximum angle to form a triplet in the SciFi)
283 max_angle = float(case.find('max_angle').text)
284
285 # Hough space representation
286 Hspace_format_exists = False
287 for rep in case.findall('Hough_space_format'):
288 if rep.get('name') == self.Hough_space_format:
289 Hspace_format_exists = True
290 # Number of bins per Hough accumulator axes and range
291 ''' xH and yH are the abscissa and ordinate of the Hough parameter space
292 xz and yz represent horizontal and vertical projections
293 in the SNDLHC physics coord. system '''
294 n_accumulator_yH = int(rep.find('N_yH_bins').text)
295 yH_min_xz = float(rep.find('yH_min_xz').text)
296 yH_max_xz = float(rep.find('yH_max_xz').text)
297 yH_min_yz = float(rep.find('yH_min_yz').text)
298 yH_max_yz = float(rep.find('yH_max_yz').text)
299 n_accumulator_xH = int(rep.find('N_xH_bins').text)
300 xH_min_xz = float(rep.find('xH_min_xz').text)
301 xH_max_xz = float(rep.find('xH_max_xz').text)
302 xH_min_yz = float(rep.find('xH_min_yz').text)
303 xH_max_yz = float(rep.find('xH_max_yz').text)
304
305 else: continue
306 if not Hspace_format_exists:
307 raise RuntimeError("Unknown Hough space format, check naming in parameter xml file.")
308
309 # A scale factor for a back-up Hough space having more/less bins than the default one
310 # It is useful when fitting some low-E muon tracks, which are curved due to mult. scattering.
311 self.HT_space_scale = 1 if case.find('HT_space_scale')==None else float(case.find('HT_space_scale').text)
312
313 # Number of random throws per hit
314 self.n_random = int(case.find('n_random').text)
315 # MuFilter weight. Muon filter hits are thrown more times than scifi
316 self.muon_weight = int(case.find('mufi_weight').text)
317 # Minimum number of planes hit in each of the downstream muon filter (if muon filter hits used) or scifi (if muon filter hits not used) views to try to reconstruct a muon
318 self.min_planes_hit = int(case.find('min_planes_hit').text)
319
320 # Maximum number of muons to find. To avoid spending too much time on events with lots of downstream activity.
321 self.max_reco_muons = int(case.find('max_reco_muons').text)
322
323 # How far away from Hough line hits will be assigned to the muon, for Kalman tracking
324 self.tolerance = float(case.find('tolerance').text)
325
326 # Which hits to use for track fitting.
327 self.hits_to_fit = case.find('hits_to_fit').text.strip()
328 # Which hits to use for triplet condition.
329 self.hits_for_triplet = case.find('hits_for_hough').text.strip() if case.find('hits_to_validate')==None else case.find('hits_to_validate').text.strip()
330
331 # Detector plane masking. If flag is active, a plane will be masked if its N_hits > Nhits_per_plane.
332 # In any case, plane masking will only be applied if solely Scifi hits are used in HT as it is
333 # a measure against having many maxima in HT space.
334 self.mask_plane = int(case.find('mask_plane').text)
335 self.Nhits_per_plane = int(case.find('Nhits_per_plane').text)
336
337 # Enable Gaussian smoothing over the full accumulator space.
338 self.smooth_full = int(case.find('smooth_full').text)
339 # Gaussian smoothing parameters. The kernel size is determined as 2*int(truncate*sigma+0.5)+1
340 self.sigma = int(case.find('sigma').text)
341 self.truncate = int(case.find('truncate').text)
342 # Helpers to pick up one of many HT space maxima
343 self.n_quantile = float(case.find('n_quantile').text)
344 self.res = int(case.find('res').text)
345
346 else: continue
347 if not track_case_exists:
348 raise RuntimeError("Unknown tracking case, check naming in parameter xml file.")
349
350 # Get sensor dimensions from geometry
351 self.MuFilter_ds_dx = self.mufiDet.GetConfParF("MuFilter/DownstreamBarY") # Assume y dimensions in vertical bars are the same as x dimensions in horizontal bars.
352 self.MuFilter_ds_dy = self.mufiDet.GetConfParF("MuFilter/DownstreamBarY") # Assume y dimensions in vertical bars are the same as x dimensions in horizontal bars.
353 self.MuFilter_ds_dz = self.mufiDet.GetConfParF("MuFilter/DownstreamBarZ")
354
355 self.MuFilter_us_dx = self.mufiDet.GetConfParF("MuFilter/UpstreamBarX")
356 self.MuFilter_us_dy = self.mufiDet.GetConfParF("MuFilter/UpstreamBarY")
357 self.MuFilter_us_dz = self.mufiDet.GetConfParF("MuFilter/UpstreamBarZ")
358
359 self.Scifi_dx = self.scifiDet.GetConfParF("Scifi/channel_width")
360 self.Scifi_dy = self.scifiDet.GetConfParF("Scifi/channel_width")
361 self.Scifi_dz = self.scifiDet.GetConfParF("Scifi/epoxymat_z") # From Scifi.cxx This is the variable used to define the z dimension of SiPM channels, so seems like the right dimension to use.
362
363 # Get number of readout channels
364 self.MuFilter_us_nSiPMs = self.mufiDet.GetConfParI("MuFilter/UpstreamnSiPMs")*self.mufiDet.GetConfParI("MuFilter/UpstreamnSides")
365 self.MuFilter_ds_nSiPMs_hor = self.mufiDet.GetConfParI("MuFilter/DownstreamnSiPMs")*self.mufiDet.GetConfParI("MuFilter/DownstreamnSides")
366 self.MuFilter_ds_nSiPMs_vert = self.mufiDet.GetConfParI("MuFilter/DownstreamnSiPMs")
367
368 self.Scifi_nPlanes = self.scifiDet.GetConfParI("Scifi/nscifi")
369 self.DS_nPlanes = self.mufiDet.GetConfParI("MuFilter/NDownstreamPlanes")
373
374 # get the distance between 1st and last detector planes to be used in the track fit.
375 # a z_offset is used to shift detector hits so to have smaller Hough parameter space
376 # Using geometers measurements! For safety, add a 5-cm-buffer in detector lengths and a 2.5-cm one to z_offset.
377 # This is done to account for possible det. position shifts/mismatches going from geom. measurements and sndsw physics CS.
378 if self.hits_for_triplet.find('sf') >= 0 and self.hits_for_triplet.find('ds') >= 0:
379 det_Zlen = (self.mufiDet.GetConfParF("MuFilter/Muon9Dy") - self.scifiDet.GetConfParF("Scifi/Ypos0"))*unit.cm + 5.0*unit.cm
380 z_offset = self.scifiDet.GetConfParF("Scifi/Ypos0")*unit.cm - 2.5*unit.cm
381 elif self.hits_for_triplet == 'sf':
382 det_Zlen = (self.scifiDet.GetConfParF("Scifi/Ypos4") - self.scifiDet.GetConfParF("Scifi/Ypos0"))*unit.cm + 5.0*unit.cm
383 z_offset = self.scifiDet.GetConfParF("Scifi/Ypos0")*unit.cm - 2.5*unit.cm
384 elif self.hits_for_triplet == 'ds':
385 det_Zlen = (self.mufiDet.GetConfParF("MuFilter/Muon9Dy") - self.mufiDet.GetConfParF("MuFilter/Muon6Dy"))*unit.cm + 5.0*unit.cm
386 z_offset = self.mufiDet.GetConfParF("MuFilter/Muon6Dy")*unit.cm - 2.5*unit.cm
387 # this use case is not tested with an z offset yet
388 if self.tracking_case.find('nu_') >= 0: z_offset = 0*unit.cm
389 #other use cases come here if ever added
390
391 # Initialize Hough transforms for both views:
392 if self.Hough_space_format == 'normal':
393 # rho-theta representation - must exclude theta range for tracks passing < 3 det. planes
394 self.h_ZX = hough(n_accumulator_yH, [yH_min_xz, yH_max_xz], n_accumulator_xH, [-max_angle+np.pi/2., max_angle+np.pi/2.], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
395 self.h_ZY = hough(n_accumulator_yH, [yH_min_yz, yH_max_yz], n_accumulator_xH, [-max_angle+np.pi/2., max_angle+np.pi/2.], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
396 else:
397 self.h_ZX = hough(n_accumulator_yH, [yH_min_xz, yH_max_xz], n_accumulator_xH, [xH_min_xz, xH_max_xz], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
398 self.h_ZY = hough(n_accumulator_yH, [yH_min_yz, yH_max_yz], n_accumulator_xH, [xH_min_yz, xH_max_yz], z_offset, self.Hough_space_format, self.HT_space_scale, det_Zlen)
399
400 self.h_ZX.smooth_full = self.smooth_full
401 self.h_ZY.smooth_full = self.smooth_full
402 self.h_ZX.sigma = self.sigma
403 self.h_ZX.truncate = self.truncate
404 self.h_ZY.sigma = self.sigma
405 self.h_ZY.truncate = self.truncate
406
407 self.h_ZX.n_quantile = self.n_quantile
408 self.h_ZX.res = self.res
409 self.h_ZY.n_quantile = self.n_quantile
410 self.h_ZY.res = self.res
411
412 if self.hits_to_fit == "sf" : self.track_type = 11
413 elif self.hits_to_fit == "ds": self.track_type = 13
414 else : self.track_type = 15
415
416 # To keep temporary detector information
417 self.a = ROOT.TVector3()
418 self.b = ROOT.TVector3()
419
420 # check if track container exists
421 if self.ioman.GetObject('Reco_MuonTracks') != None:
422 self.kalman_tracks = self.ioman.GetObject('Reco_MuonTracks')
423 if self.logger.IsLogNeeded(ROOT.fair.Severity.info):
424 print('Branch activated by another task!')
425 else:
426 # Now initialize output in genfit::track or sndRecoTrack format
427 if self.genfitTrack:
428 self.kalman_tracks = ROOT.TObjArray(10)
429 if hasattr(self, "standalone") and self.standalone:
430 self.ioman.Register("Reco_MuonTracks", self.ioman.GetFolderName(), self.kalman_tracks, ROOT.kTRUE)
431 else:
432 self.kalman_tracks = ROOT.TClonesArray("sndRecoTrack", 10)
433 if hasattr(self, "standalone") and self.standalone:
434 self.ioman.Register("Reco_MuonTracks", "", self.kalman_tracks, ROOT.kTRUE)
435
436 # initialize detector class with EventHeader(runN), if SNDLHCEventHeader detected
437 # only needed if using HT tracking manager, i.e. standalone
438 if self.EventHeader.IsA().GetName()=='SNDLHCEventHeader' and hasattr(self, "standalone") and self.standalone and not self.isMC :
439 self.scifiDet.InitEvent(self.EventHeader)
440 self.mufiDet.InitEvent(self.EventHeader)
441
442 # internal storage of clusters
443 if self.Scifi_meas: self.clusScifi = ROOT.TObjArray(100)
444
445 # Kalman filter stuff
446
447 geoMat = ROOT.genfit.TGeoMaterialInterface()
448 bfield = ROOT.genfit.ConstField(0, 0, 0)
449 fM = ROOT.genfit.FieldManager.getInstance()
450 fM.init(bfield)
451 ROOT.genfit.MaterialEffects.getInstance().init(geoMat)
452 ROOT.genfit.MaterialEffects.getInstance().setNoEffects()
453
454 self.kalman_fitter = ROOT.genfit.KalmanFitter()
455 self.kalman_fitter.setMaxIterations(50)
459
460 # Init() MUST return int
461 return 0
462
463 def SetScaleFactor(self, scale):
464 self.scale = scale
465
466 def SetParFile(self, file_name):
467 self.par_file = file_name
468
469 def SetTrackingCase(self, case):
470 self.tracking_case = case
471
472 def SetHoughSpaceFormat(self, Hspace_format):
473 self.Hough_space_format = Hspace_format
474
476 self.genfitTrack = 1
477
478 # flag showing the task is run seperately from other tracking tasks
479 def SetStandalone(self):
480 self.standalone = 1
481
482 def Passthrough(self) :
483 T = self.ioman.GetInTree()
484
485 for x in T.GetListOfBranches():
486 obj_name = x.GetName()
487 if self.ioman.GetObject(obj_name) == None :
488 continue
489 self.ioman.Register(obj_name, self.ioman.GetFolderName(), self.ioman.GetObject(obj_name), ROOT.kTRUE)
490
491 def Exec(self, opt) :
492 self.kalman_tracks.Clear('C')
493
494 # Set scaling in case task is run seperately from other tracking tasks
495 if self.scale>1 and self.standalone:
496 if ROOT.gRandom.Rndm() > 1.0/self.scale: return
497
498 self.events_run += 1
499 hit_collection = {"pos" : [[], [], []],
500 "d" : [[], [], []],
501 "vert" : [],
502 "index" : [],
503 "system" : [],
504 "detectorID" : [],
505 "B" : [[], [], []],
506 "time": [],
507 "mask": []}
508
509 if ("us" in self.hits_to_fit) or ("ds" in self.hits_to_fit) or ("ve" in self.hits_to_fit) :
510 # Loop through muon filter hits
511 for i_hit, muFilterHit in enumerate(self.MuFilterHits) :
512 # Don't use veto for fitting
513 if muFilterHit.GetSystem() == 1 :
514 if "ve" not in self.hits_to_fit :
515 continue
516 elif muFilterHit.GetSystem() == 2 :
517 if "us" not in self.hits_to_fit :
518 continue
519 elif muFilterHit.GetSystem() == 3 :
520 if "ds" not in self.hits_to_fit :
521 continue
522 else :
523 if self.logger.IsLogNeeded(ROOT.fair.Severity.warn):
524 print("WARNING! Unknown MuFilter system!!")
525
526 self.mufiDet.GetPosition(muFilterHit.GetDetectorID(), self.a, self.b)
527
528 hit_collection["pos"][0].append(self.a.X())
529 hit_collection["pos"][1].append(self.a.Y())
530 hit_collection["pos"][2].append(self.a.Z())
531
532 hit_collection["B"][0].append(self.b.X())
533 hit_collection["B"][1].append(self.b.Y())
534 hit_collection["B"][2].append(self.b.Z())
535
536 hit_collection["vert"].append(muFilterHit.isVertical())
537 hit_collection["system"].append(muFilterHit.GetSystem())
538
539 hit_collection["d"][0].append(self.MuFilter_ds_dx)
540 hit_collection["d"][2].append(self.MuFilter_ds_dz)
541
542 hit_collection["index"].append(i_hit)
543
544 hit_collection["detectorID"].append(muFilterHit.GetDetectorID())
545 hit_collection["mask"].append(False)
546
547 times = []
548 # Downstream
549 if muFilterHit.GetSystem() == 3 :
550 hit_collection["d"][1].append(self.MuFilter_ds_dx)
551 for ch in range(self.MuFilter_ds_nSiPMs_hor):
552 if muFilterHit.isVertical() and ch==self.MuFilter_ds_nSiPMs_vert: break
553 if self.isMC:
554 times.append(muFilterHit.GetAllTimes()[ch]) #already in ns
555 else: times.append(muFilterHit.GetAllTimes()[ch]*6.25) #tdc2ns
556 # Upstream
557 else :
558 hit_collection["d"][1].append(self.MuFilter_us_dy)
559 for ch in range(self.MuFilter_us_nSiPMs):
560 if self.isMC:
561 times.append(muFilterHit.GetAllTimes()[ch]) #already in ns
562 else: times.append(muFilterHit.GetAllTimes()[ch]*6.25) #tdc2ns
563 hit_collection["time"].append(times)
564
565 if "sf" in self.hits_to_fit :
566 if self.Scifi_meas:
567 # Make scifi clusters
568 self.clusScifi.Clear()
569 self.scifiCluster()
570
571 # Loop through scifi clusters
572 for i_clust, scifiCl in enumerate(self.clusScifi) :
573 scifiCl.GetPosition(self.a,self.b)
574
575 hit_collection["pos"][0].append(self.a.X())
576 hit_collection["pos"][1].append(self.a.Y())
577 hit_collection["pos"][2].append(self.a.Z())
578
579 hit_collection["B"][0].append(self.b.X())
580 hit_collection["B"][1].append(self.b.Y())
581 hit_collection["B"][2].append(self.b.Z())
582
583 # take the cluster size as the active area size
584 hit_collection["d"][0].append(scifiCl.GetN()*self.Scifi_dx)
585 hit_collection["d"][1].append(scifiCl.GetN()*self.Scifi_dy)
586 hit_collection["d"][2].append(self.Scifi_dz)
587
588 if int(scifiCl.GetFirst()/100000)%10==1:
589 hit_collection["vert"].append(True)
590 else: hit_collection["vert"].append(False)
591 hit_collection["index"].append(i_clust)
592
593 hit_collection["system"].append(0)
594 hit_collection["detectorID"].append(scifiCl.GetFirst())
595 hit_collection["mask"].append(False)
596
597 times = []
598 if self.isMC : times.append(scifiCl.GetTime()/6.25) # for MC, hit time is in ns. Then for MC Scifi cluster time one has to divide by tdc2ns
599 else: times.append(scifiCl.GetTime()) # already in ns
600 hit_collection["time"].append(times)
601
602 else:
603 if self.hits_for_triplet == 'sf' and self.hits_to_fit == 'sf':
604 # Loop through scifi hits and count hits per projection and plane
605 N_plane_ZY = {1:0, 2:0, 3:0, 4:0, 5:0}
606 N_plane_ZX = {1:0, 2:0, 3:0, 4:0, 5:0}
607 for scifiHit in self.ScifiHits:
608 if not scifiHit.isValid(): continue
609 if scifiHit.isVertical():
610 N_plane_ZX[scifiHit.GetStation()] += 1
611 else:
612 N_plane_ZY[scifiHit.GetStation()] += 1
613 if self.mask_plane:
614 mask_plane_ZY = []
615 mask_plane_ZX = []
616 # sorting
617 N_plane_ZY = dict(sorted(N_plane_ZY.items(), key=lambda item: item[1], reverse = True))
618 N_plane_ZX = dict(sorted(N_plane_ZX.items(), key=lambda item: item[1], reverse = True))
619 # count planes with hits
620 n_zx = self.Scifi_nPlanes - list(N_plane_ZX.values()).count(0)
621 n_zy = self.Scifi_nPlanes - list(N_plane_ZY.values()).count(0)
622 # check with min number of hit planes
623 if n_zx < self.min_planes_hit or n_zy < self.min_planes_hit: return
624 # mask busiest planes until there are at least 3 planes with hits left
625 for ii in range(n_zx-self.min_planes_hit):
626 if list(N_plane_ZX.values())[ii] > self.Nhits_per_plane:
627 mask_plane_ZX.append(list(N_plane_ZX.keys())[ii])
628 for ii in range(n_zy-self.min_planes_hit):
629 if list(N_plane_ZY.values())[ii] > self.Nhits_per_plane:
630 mask_plane_ZY.append(list(N_plane_ZY.keys())[ii])
631
632 # Loop through scifi hits
633 for i_hit, scifiHit in enumerate(self.ScifiHits) :
634 if not scifiHit.isValid(): continue
635 self.scifiDet.GetSiPMPosition(scifiHit.GetDetectorID(), self.a, self.b)
636 hit_collection["pos"][0].append(self.a.X())
637 hit_collection["pos"][1].append(self.a.Y())
638 hit_collection["pos"][2].append(self.a.Z())
639
640 hit_collection["B"][0].append(self.b.X())
641 hit_collection["B"][1].append(self.b.Y())
642 hit_collection["B"][2].append(self.b.Z())
643
644 hit_collection["d"][0].append(self.Scifi_dx)
645 hit_collection["d"][1].append(self.Scifi_dy)
646 hit_collection["d"][2].append(self.Scifi_dz)
647
648 hit_collection["vert"].append(scifiHit.isVertical())
649 hit_collection["index"].append(i_hit)
650
651 hit_collection["system"].append(0)
652
653 hit_collection["detectorID"].append(scifiHit.GetDetectorID())
654
655 if self.hits_for_triplet == 'sf' and self.hits_to_fit == 'sf' and self.mask_plane:
656 if (scifiHit.isVertical()==0 and scifiHit.GetStation() in mask_plane_ZY) or (scifiHit.isVertical() and scifiHit.GetStation() in mask_plane_ZX):
657 hit_collection["mask"].append(True)
658 else: hit_collection["mask"].append(False)
659 else:
660 hit_collection["mask"].append(False)
661
662 times = []
663
664 if self.isMC : times.append(scifiHit.GetTime()) # already in ns
665 else: times.append(scifiHit.GetTime()*6.25) #tdc2ns
666 hit_collection["time"].append(times)
667
668 # If no hits, return
669 if len(hit_collection['pos'][0])==0: return
670
671 # Make the hit collection numpy arrays.
672 for key, item in hit_collection.items() :
673 if key == 'vert' :
674 this_dtype = np.bool_
675 elif key == 'mask' :
676 this_dtype = np.bool_
677 elif key == "index" or key == "system" or key == "detectorID" :
678 this_dtype = np.int32
679 elif key != 'time' :
680 this_dtype = np.float32
681 if key== 'time':
682 length = max(map(len, item))
683 hit_collection[key] = np.stack(np.array([xi+[None]*(length-len(xi)) for xi in item]), axis = 1)
684 else: hit_collection[key] = np.array(item, dtype = this_dtype)
685
686 # Useful for later
687 triplet_condition_system = []
688 if "sf" in self.hits_for_triplet :
689 triplet_condition_system.append(0)
690 if "ve" in self.hits_for_triplet :
691 triplet_condition_system.append(1)
692 if "us" in self.hits_for_triplet :
693 triplet_condition_system.append(2)
694 if "ds" in self.hits_for_triplet :
695 triplet_condition_system.append(3)
696
697 # Reconstruct muons until there are not enough hits in downstream muon filter
698 for i_muon in range(self.max_reco_muons) :
699
700 triplet_hits_horizontal = np.logical_and( ~hit_collection["vert"],
701 np.isin(hit_collection["system"], triplet_condition_system) )
702 triplet_hits_vertical = np.logical_and( hit_collection["vert"],
703 np.isin(hit_collection["system"], triplet_condition_system) )
704
705 n_planes_ZY = numPlanesHit(hit_collection["system"][triplet_hits_horizontal],
706 hit_collection["detectorID"][triplet_hits_horizontal])
707 if n_planes_ZY < self.min_planes_hit :
708 break
709
710 n_planes_ZX = numPlanesHit(hit_collection["system"][triplet_hits_vertical],
711 hit_collection["detectorID"][triplet_hits_vertical])
712 if n_planes_ZX < self.min_planes_hit :
713 break
714
715 # Get hits in hough transform format
716 muon_hits_horizontal = np.logical_and( np.logical_and( ~hit_collection["vert"], ~hit_collection["mask"]),
717 np.isin(hit_collection["system"], [1, 2, 3]))
718 muon_hits_vertical = np.logical_and( np.logical_and( hit_collection["vert"], ~hit_collection["mask"]),
719 np.isin(hit_collection["system"], [1, 2, 3]))
720 scifi_hits_horizontal = np.logical_and( np.logical_and( ~hit_collection["vert"], ~hit_collection["mask"]),
721 np.isin(hit_collection["system"], [0]))
722 scifi_hits_vertical = np.logical_and( np.logical_and( hit_collection["vert"], ~hit_collection["mask"]),
723 np.isin(hit_collection["system"], [0]))
724
725
726 ZY = np.dstack([np.concatenate([np.tile(hit_collection["pos"][2][muon_hits_horizontal], self.muon_weight),
727 hit_collection["pos"][2][scifi_hits_horizontal]]),
728 np.concatenate([np.tile(hit_collection["pos"][1][muon_hits_horizontal], self.muon_weight),
729 hit_collection["pos"][1][scifi_hits_horizontal]])])[0]
730
731 d_ZY = np.dstack([np.concatenate([np.tile(hit_collection["d"][2][muon_hits_horizontal], self.muon_weight),
732 hit_collection["d"][2][scifi_hits_horizontal]]),
733 np.concatenate([np.tile(hit_collection["d"][1][muon_hits_horizontal], self.muon_weight),
734 hit_collection["d"][1][scifi_hits_horizontal]])])[0]
735
736 ZX = np.dstack([np.concatenate([np.tile(hit_collection["pos"][2][muon_hits_vertical], self.muon_weight),
737 hit_collection["pos"][2][scifi_hits_vertical]]),
738 np.concatenate([np.tile(hit_collection["pos"][0][muon_hits_vertical], self.muon_weight),
739 hit_collection["pos"][0][scifi_hits_vertical]])])[0]
740
741 d_ZX = np.dstack([np.concatenate([np.tile(hit_collection["d"][2][muon_hits_vertical], self.muon_weight),
742 hit_collection["d"][2][scifi_hits_vertical]]),
743 np.concatenate([np.tile(hit_collection["d"][0][muon_hits_vertical], self.muon_weight),
744 hit_collection["d"][0][scifi_hits_vertical]])])[0]
745
746 is_scaled = False
747 ZY_hough = self.h_ZY.fit_randomize(ZY, d_ZY, self.n_random, is_scaled, self.draw)
748 ZX_hough = self.h_ZX.fit_randomize(ZX, d_ZX, self.n_random, is_scaled, self.draw)
749
750 tol = self.tolerance
751 # Special treatment for events with low hit occupancy - increase tolerance
752 # For Scifi-only tracks
753 if len(hit_collection["detectorID"]) <= self.max_n_Scifi_hits and self.hits_for_triplet == 'sf' and self.hits_to_fit == 'sf' :
754 # as there are masked Scifi planes, make sure to use hit counts before the masking
755 if max(N_plane_ZX.values()) <= self.max_n_hits_plane and max(N_plane_ZY.values()) <= self.max_n_hits_plane :
756 tol = 5*self.tolerance
757 # for DS-only tracks#
758 if self.hits_for_triplet == 'ds' and self.hits_to_fit == 'ds' :
759 # Loop through hits and count hits per projection and plane
760 N_plane_ZY = {0:0, 1:0, 2:0, 3:0}
761 N_plane_ZX = {0:0, 1:0, 2:0, 3:0}
762 for item in range(len(hit_collection["detectorID"])):
763 if hit_collection["vert"][item]: N_plane_ZX[(hit_collection["detectorID"][item]%10000)//1000] += 1
764 else: N_plane_ZY[(hit_collection["detectorID"][item]%10000)//1000] += 1
765 if max(N_plane_ZX.values()) <= self.max_n_hits_plane and max(N_plane_ZY.values()) <= self.max_n_hits_plane :
766 tol = 3*self.tolerance
767
768 # Check if track intersects minimum number of hits in each plane.
769 track_hits_for_triplet_ZY = hit_finder(ZY_hough[0], ZY_hough[1],
770 np.dstack([hit_collection["pos"][2][triplet_hits_horizontal],
771 hit_collection["pos"][1][triplet_hits_horizontal]]),
772 np.dstack([hit_collection["d"][2][triplet_hits_horizontal],
773 hit_collection["d"][1][triplet_hits_horizontal]]), tol)
774
775 track_hits_for_triplet_ZX = hit_finder(ZX_hough[0], ZX_hough[1],
776 np.dstack([hit_collection["pos"][2][triplet_hits_vertical],
777 hit_collection["pos"][0][triplet_hits_vertical]]),
778 np.dstack([hit_collection["d"][2][triplet_hits_vertical],
779 hit_collection["d"][0][triplet_hits_vertical]]), tol)
780
781 n_planes_hit_ZY = numPlanesHit(hit_collection["system"][triplet_hits_horizontal][track_hits_for_triplet_ZY],
782 hit_collection["detectorID"][triplet_hits_horizontal][track_hits_for_triplet_ZY])
783
784 n_planes_hit_ZX = numPlanesHit(hit_collection["system"][triplet_hits_vertical][track_hits_for_triplet_ZX],
785 hit_collection["detectorID"][triplet_hits_vertical][track_hits_for_triplet_ZX])
786
787 # For failed SciFi track fits, in events with little hit activity, try using less Hough-space bins
788 if (self.hits_to_fit == 'sf' and len(hit_collection["detectorID"]) <= self.max_n_Scifi_hits and \
789 (n_planes_hit_ZY < self.min_planes_hit or n_planes_hit_ZX < self.min_planes_hit)):
790
791 is_scaled = True
792 ZY_hough = self.h_ZY.fit_randomize(ZY, d_ZY, self.n_random, is_scaled, self.draw)
793 ZX_hough = self.h_ZX.fit_randomize(ZX, d_ZX, self.n_random, is_scaled, self.draw)
794
795 # Check if track intersects minimum number of hits in each plane.
796 track_hits_for_triplet_ZY = hit_finder(ZY_hough[0], ZY_hough[1],
797 np.dstack([hit_collection["pos"][2][triplet_hits_horizontal],
798 hit_collection["pos"][1][triplet_hits_horizontal]]),
799 np.dstack([hit_collection["d"][2][triplet_hits_horizontal],
800 hit_collection["d"][1][triplet_hits_horizontal]]), tol)
801
802 n_planes_hit_ZY = numPlanesHit(hit_collection["system"][triplet_hits_horizontal][track_hits_for_triplet_ZY],
803 hit_collection["detectorID"][triplet_hits_horizontal][track_hits_for_triplet_ZY])
804 if n_planes_hit_ZY < self.min_planes_hit:
805 break
806
807 track_hits_for_triplet_ZX = hit_finder(ZX_hough[0], ZX_hough[1],
808 np.dstack([hit_collection["pos"][2][triplet_hits_vertical],
809 hit_collection["pos"][0][triplet_hits_vertical]]),
810 np.dstack([hit_collection["d"][2][triplet_hits_vertical],
811 hit_collection["d"][0][triplet_hits_vertical]]), tol)
812 n_planes_hit_ZX = numPlanesHit(hit_collection["system"][triplet_hits_vertical][track_hits_for_triplet_ZX],
813 hit_collection["detectorID"][triplet_hits_vertical][track_hits_for_triplet_ZX])
814
815 if n_planes_hit_ZY < self.min_planes_hit or n_planes_hit_ZX < self.min_planes_hit:
816 break
817
818# print("Found {0} downstream ZX planes associated to muon track".format(n_planes_ds_ZX))
819# print("Found {0} downstream ZY planes associated to muon track".format(n_planes_ds_ZY))
820
821 # This time with all the hits, not just triplet condition.
822 track_hits_ZY = hit_finder(ZY_hough[0], ZY_hough[1],
823 np.dstack([hit_collection["pos"][2][~hit_collection["vert"]],
824 hit_collection["pos"][1][~hit_collection["vert"]]]),
825 np.dstack([hit_collection["d"][2][~hit_collection["vert"]],
826 hit_collection["d"][1][~hit_collection["vert"]]]), tol)
827
828 track_hits_ZX = hit_finder(ZX_hough[0], ZX_hough[1],
829 np.dstack([hit_collection["pos"][2][hit_collection["vert"]],
830 hit_collection["pos"][0][hit_collection["vert"]]]),
831 np.dstack([hit_collection["d"][2][hit_collection["vert"]],
832 hit_collection["d"][0][hit_collection["vert"]]]), tol)
833 # Onto Kalman fitter (based on SndlhcTracking.py)
834 posM = ROOT.TVector3(0, 0, 0.)
835 momM = ROOT.TVector3(0,0,100.) # default track with high momentum
836
837 # approximate covariance
838 covM = ROOT.TMatrixDSym(6)
839 if self.hits_to_fit.find('sf') >= 0:
841 if self.hits_to_fit == 'ds':
843 for i in range(3): covM[i][i] = res*res
844 for i in range(3,6): covM[i][i] = ROOT.TMath.Power(res / (4.*2.) / ROOT.TMath.Sqrt(3), 2)
845 rep = ROOT.genfit.RKTrackRep(13)
846
847 # start state
848 state = ROOT.genfit.MeasuredStateOnPlane(rep)
849 rep.setPosMomCov(state, posM, momM, covM)
850
851 # create track
852 seedState = ROOT.TVectorD(6)
853 seedCov = ROOT.TMatrixDSym(6)
854 rep.get6DStateCov(state, seedState, seedCov)
855
856 theTrack = ROOT.genfit.Track(rep, seedState, seedCov)
857
858 # Sort measurements in Z
859 hit_z = np.concatenate([hit_collection["pos"][2][hit_collection["vert"]][track_hits_ZX],
860 hit_collection["pos"][2][~hit_collection["vert"]][track_hits_ZY]])
861
862 hit_A0 = np.concatenate([hit_collection["pos"][0][hit_collection["vert"]][track_hits_ZX],
863 hit_collection["pos"][0][~hit_collection["vert"]][track_hits_ZY]])
864
865 hit_A1 = np.concatenate([hit_collection["pos"][1][hit_collection["vert"]][track_hits_ZX],
866 hit_collection["pos"][1][~hit_collection["vert"]][track_hits_ZY]])
867
868 hit_B0 = np.concatenate([hit_collection["B"][0][hit_collection["vert"]][track_hits_ZX],
869 hit_collection["B"][0][~hit_collection["vert"]][track_hits_ZY]])
870
871 hit_B1 = np.concatenate([hit_collection["B"][1][hit_collection["vert"]][track_hits_ZX],
872 hit_collection["B"][1][~hit_collection["vert"]][track_hits_ZY]])
873
874 hit_B2 = np.concatenate([hit_collection["B"][2][hit_collection["vert"]][track_hits_ZX],
875 hit_collection["B"][2][~hit_collection["vert"]][track_hits_ZY]])
876
877 hit_detid = np.concatenate([hit_collection["detectorID"][hit_collection["vert"]][track_hits_ZX],
878 hit_collection["detectorID"][~hit_collection["vert"]][track_hits_ZY]])
879
880 kalman_spatial_sigma = np.concatenate([hit_collection["d"][0][hit_collection["vert"]][track_hits_ZX] / 12**0.5,
881 hit_collection["d"][1][~hit_collection["vert"]][track_hits_ZY] / 12**0.5])
882
883 # Maximum distance. Use (d_xy/2**2 + d_z/2**2)**0.5
884 kalman_max_dis = np.concatenate([((hit_collection["d"][0][hit_collection["vert"]][track_hits_ZX]/2.)**2 +
885 (hit_collection["d"][2][hit_collection["vert"]][track_hits_ZX]/2.)**2)**0.5,
886 ((hit_collection["d"][1][~hit_collection["vert"]][track_hits_ZY]/2.)**2 +
887 (hit_collection["d"][2][~hit_collection["vert"]][track_hits_ZY]/2.)**2)**0.5])
888
889 hitID = 0 # Does it matter? We don't have a global hit ID.
890
891 hit_time = {}
892 for ch in range(hit_collection["time"].shape[0]):
893 hit_time[ch] = np.concatenate([hit_collection["time"][ch][hit_collection["vert"]][track_hits_ZX],
894 hit_collection["time"][ch][~hit_collection["vert"]][track_hits_ZY]])
895
896 for i_z_sorted in hit_z.argsort() :
897 tp = ROOT.genfit.TrackPoint()
898 hitCov = ROOT.TMatrixDSym(7)
899 hitCov[6][6] = kalman_spatial_sigma[i_z_sorted]**2
900
901 measurement = ROOT.genfit.WireMeasurement(ROOT.TVectorD(7, array('d', [hit_A0[i_z_sorted],
902 hit_A1[i_z_sorted],
903 hit_z[i_z_sorted],
904 hit_B0[i_z_sorted],
905 hit_B1[i_z_sorted],
906 hit_B2[i_z_sorted],
907 0.])),
908 hitCov,
909 1, # detid?
910 6, # hitid?
911 tp)
912
913 measurement.setMaxDistance(kalman_max_dis[i_z_sorted])
914 measurement.setDetId(int(hit_detid[i_z_sorted]))
915 measurement.setHitId(int(hitID))
916 hitID += 1
917 tp.addRawMeasurement(measurement)
918 theTrack.insertPoint(tp)
919
920 if not theTrack.checkConsistency():
921 theTrack.Delete()
922 raise RuntimeError("Kalman fitter track consistency check failed.")
923
924 # do the fit
925 self.kalman_fitter.processTrack(theTrack) # processTrackWithRep(theTrack,rep,True)
926
927 fitStatus = theTrack.getFitStatus()
928 if not fitStatus.isFitConverged() and 0>1:
929 theTrack.Delete()
930 raise RuntimeError("Kalman fit did not converge.")
931
932 # Now save the track if fit converged!
933 theTrack.SetUniqueID(self.track_type)
934 if fitStatus.isFitConverged():
935 if self.genfitTrack: self.kalman_tracks.Add(theTrack)
936 else :
937 # Load items into snd track class object
938 this_track = ROOT.sndRecoTrack(theTrack)
939 pointTimes = ROOT.std.vector(ROOT.std.vector('float'))()
940 for n, i_z_sorted in enumerate(hit_z.argsort()):
941 t_per_hit = []
942 for ch in range(len(hit_time)):
943 if hit_time[ch][i_z_sorted] != None:
944 t_per_hit.append(hit_time[ch][i_z_sorted])
945 pointTimes.push_back(t_per_hit)
946 this_track.setRawMeasTimes(pointTimes)
947 this_track.setTrackType(self.track_type)
948 # Save the track in sndRecoTrack format
949 self.kalman_tracks[i_muon] = this_track
950 # Delete the Kalman track object
951 theTrack.Delete()
952
953 # Remove track hits and try to find an additional track
954 # Find array index to be removed
955 index_to_remove_ZX = np.where(np.in1d(hit_collection["detectorID"], hit_collection["detectorID"][hit_collection["vert"]][track_hits_ZX]))[0]
956 index_to_remove_ZY = np.where(np.in1d(hit_collection["detectorID"], hit_collection["detectorID"][~hit_collection["vert"]][track_hits_ZY]))[0]
957
958 index_to_remove = np.concatenate([index_to_remove_ZX, index_to_remove_ZY])
959
960 # Remove dictionary entries
961 for key in hit_collection.keys() :
962 if len(hit_collection[key].shape) == 1 :
963 hit_collection[key] = np.delete(hit_collection[key], index_to_remove)
964 elif len(hit_collection[key].shape) == 2 :
965 hit_collection[key] = np.delete(hit_collection[key], index_to_remove, axis = 1)
966 else :
967 raise Exception("Wrong number of dimensions found when deleting hits in iterative muon identification algorithm.")
968
969 def FinishTask(self) :
970 print("Processed" ,self.events_run)
971 if not self.genfitTrack : self.kalman_tracks.Delete()
972 else : pass
973
974 # this is a copy on SndlhcTracking function with small adjustments in event object names to make it work here
975 # FIXME Should find a way to use this function straight from the SndlhcTracking!
976 def scifiCluster(self):
977 clusters = []
978 hitDict = {}
979 for k in range(self.ScifiHits.GetEntries()):
980 d = self.ScifiHits[k]
981 if not d.isValid(): continue
982 hitDict[d.GetDetectorID()] = k
983 hitList = list(hitDict.keys())
984 if len(hitList)>0:
985 hitList.sort()
986 tmp = [ hitList[0] ]
987 cprev = hitList[0]
988 ncl = 0
989 last = len(hitList)-1
990 hitvector = ROOT.std.vector("sndScifiHit*")()
991 for i in range(len(hitList)):
992 if i==0 and len(hitList)>1: continue
993 c=hitList[i]
994 neighbour = False
995 if (c-cprev)==1: # does not account for neighbours across sipms
996 neighbour = True
997 tmp.append(c)
998 if not neighbour or c==hitList[last]:
999 first = tmp[0]
1000 N = len(tmp)
1001 hitvector.clear()
1002 for aHit in tmp: hitvector.push_back( self.ScifiHits[hitDict[aHit]])
1003 aCluster = ROOT.sndCluster(first,N,hitvector,self.scifiDet,False)
1004 clusters.append(aCluster)
1005 if c!=hitList[last]:
1006 ncl+=1
1007 tmp = [c]
1008 elif not neighbour : # save last channel
1009 hitvector.clear()
1010 hitvector.push_back(self.ScifiHits[hitDict[c]])
1011 aCluster = ROOT.sndCluster(c,1,hitvector,self.scifiDet,False)
1012 clusters.append(aCluster)
1013 cprev = c
1014 self.clusScifi.Delete()
1015 for c in clusters:
1016 self.clusScifi.Add(c)
SetParFile(self, file_name)
SetHoughSpaceFormat(self, Hspace_format)
fit_randomize(self, hit_collection, hit_d, n_random, is_scaled, draw, weights=None)
fit(self, hit_collection, is_scaled, draw, weights=None)
__init__(self, n_yH, yH_range, n_xH, xH_range, z_offset, Hformat, space_scale, det_Zlen, squaretheta=False, smooth=True)
hit_finder(slope, intercept, box_centers, box_ds, tol=0.)
numPlanesHit(systems, detector_ids)