Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Analysis tools MLReco Updates #173

Merged
merged 19 commits into from
Mar 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 26 additions & 1 deletion analysis/classes/ParticleFragment.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,30 +43,39 @@ class ParticleFragment:
a primary ionization trajectory within the group of fragments that
compose a particle.
'''

_COORD_ATTRS = ['points', 'start_point', 'end_point']

def __init__(self,
fragment_id: int = -1,
group_id: int = -1,
interaction_id: int = -1,
image_id: int = -1,
volume_id: int = -1,
pid: int = -1,
nu_id: int = -1,
semantic_type: int = -1,
index: np.ndarray = np.empty(0, dtype=np.int64),
points: np.ndarray = np.empty((0,3), dtype=np.float32),
depositions: np.ndarray = np.empty(0, dtype=np.float32),
is_primary: int = -1,
is_primary: bool = False,
start_point: np.ndarray = -np.ones(3, dtype=np.float32),
end_point: np.ndarray = -np.ones(3, dtype=np.float32),
start_dir: np.ndarray = -np.ones(3, dtype=np.float32),
end_dir: np.ndarray = -np.ones(3, dtype=np.float32),
length: float = -1.,
matched: bool = False,
is_contained: bool = False,
units: str = 'px',
**kwargs):

# Initialize private attributes to be assigned through setters only
self._size = None
self._index = None
self._depositions = None
self._units = units
if type(units) is bytes:
self._units = units.decode()

# Initialize attributes
self.id = int(fragment_id)
Expand All @@ -76,12 +85,14 @@ def __init__(self,
self.volume_id = volume_id
self.semantic_type = int(semantic_type)
self.nu_id = int(nu_id)
self.pid = int(pid)

self.index = index
self.points = points
self.depositions = depositions

self.is_primary = is_primary
self.is_contained = is_contained

self._start_point = np.copy(start_point)
self._end_point = np.copy(end_point)
Expand Down Expand Up @@ -220,3 +231,17 @@ def clear_match_info(self):
@property
def is_principal_match(self):
return self._is_principal_match

def convert_to_cm(self, meta):
'''
Converts the units of all coordinate attributes to cm.
'''
assert self._units == 'px'
for attr in self._COORD_ATTRS:
setattr(self, attr, pixel_to_cm(getattr(self, attr), meta))
self._units = 'cm'

@property
def units(self):
return self._units

6 changes: 6 additions & 0 deletions analysis/classes/TruthParticleFragment.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@ class TruthParticleFragment(ParticleFragment):
Similar as `depositions`, i.e. using adapted true labels.
Using true MeV energy deposits instead of rescaled ADC units.
"""

# Attributes that specify coordinates
_COORD_ATTRS = ParticleFragment._COORD_ATTRS + \
['truth_points', 'sed_points', 'position', 'end_position', \
'parent_position', 'ancestor_position', 'first_step', 'last_step']

def __init__(self,
*args,
Expand Down Expand Up @@ -108,6 +113,7 @@ def register_larcv_particle(self, particle):

# Set parent attributes based on the above
# self.semantic_type = self.shape
self.pid = PDG_TO_PID[int(self.pdg_code)]
self.start_point = self.first_step.astype(np.float32)
self.end_point = self.last_step.astype(np.float32)

Expand Down
8 changes: 3 additions & 5 deletions analysis/classes/builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ def _build_reco(self,

pid_scores = softmax(type_logits, axis=1)
primary_scores = softmax(primary_logits, axis=1)

for i, p in enumerate(particles):
volume_id, cts = np.unique(volume_labels[p], return_counts=True)
volume_id = int(volume_id[cts.argmax()])
Expand Down Expand Up @@ -489,8 +489,7 @@ def _build_truth(self,
if energy_label is not None:
truth_depositions_MeV = energy_label[mask_nonghost].squeeze()

particle = TruthParticle(#group_id=id,
group_id=len(out),
particle = TruthParticle(group_id=len(out),
interaction_id=interaction_id,
nu_id=nu_id,
pid=pid,
Expand All @@ -501,6 +500,7 @@ def _build_truth(self,
points=coords,
sources=input_source[mask] if len(input_source) else input_source,
depositions=depositions,
is_primary=bool(is_primary),
depositions_MeV=np.empty(0, dtype=np.float32),
truth_index=true_voxel_indices,
truth_points=coords_noghost,
Expand All @@ -509,8 +509,6 @@ def _build_truth(self,
sed_index=sed_index.astype(np.int64),
sed_points=sed_points,
sed_depositions_MeV=sed_depositions_MeV,
is_primary=bool(is_primary),
# pid=pdg,
particle_asis=lpart)

particle.start_point = particle.first_step
Expand Down
3 changes: 2 additions & 1 deletion analysis/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,8 @@ def convert_pixels_to_cm(self, data, result):
])

data_products = set([
'particles', 'truth_particles', 'interactions', 'truth_interactions'
'particles', 'truth_particles', 'interactions', 'truth_interactions',
'particle_fragments', 'truth_particle_fragments'
])

meta = data['meta'][0]
Expand Down
6 changes: 3 additions & 3 deletions analysis/post_processing/evaluation/match.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ def __init__(self, matching_mode,
weight=False,
list_principal_matches=True,
fragments=False):
self.fragments = fragments
if self.fragments:
self.result_cap = ['particle_fragments', 'truth_particle_fragments']

self.matching_mode = matching_mode
self.min_overlap = min_overlap
self.overlap_mode = overlap_mode
self.weight = weight
self.list_principal_matches = list_principal_matches
self.fragments = fragments
if self.fragments:
self.result_cap = ['particle_fragments', 'truth_particle_fragments']

def process(self, data_dict, result_dict):

Expand Down
28 changes: 25 additions & 3 deletions analysis/post_processing/post_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,35 @@ def __init__(self, run_mode = None, truth_point_mode = None):
# Make a list of object keys to process
req_keys = self.result_cap + self.result_cap_opt
self.part_keys, self.inter_keys = [], []
if run_mode != 'truth':
if run_mode == 'truth':
if 'truth_particles' in req_keys:
self.part_keys.append('truth_particles')
if 'truth_interactions' in req_keys:
self.inter_keys.append('truth_interactions')
if 'truth_particle_fragments' in req_keys:
self.part_keys.append('truth_particle_fragments')
elif run_mode == 'reco':
if 'particles' in req_keys:
self.part_keys.append('particles')
if 'interactions' in req_keys:
self.inter_keys.append('interactions')
if 'particle_fragments' in req_keys:
self.part_keys.append('particle_fragments')
elif run_mode == 'both' or run_mode == 'all':
if 'particles' in req_keys:
self.part_keys.append('particles')
if 'interactions' in req_keys:
self.inter_keys.append('interactions')
if run_mode != 'reco':
if 'truth_particles' in req_keys:
self.part_keys.append('truth_particles')
if 'truth_interactions' in req_keys:
self.inter_keys.append('truth_interactions')
if 'particle_fragments' in req_keys:
self.part_keys.append('particle_fragments')
if 'truth_particle_fragments' in req_keys:
self.part_keys.append('truth_particle_fragments')
else:
raise ValueError('Unrecognized run mode')

self.all_keys = self.part_keys + self.inter_keys

Expand Down Expand Up @@ -87,7 +106,10 @@ def run(self, data_dict, result_dict, image_id):
data_single, result_single = {}, {}
for data_key in self.data_cap:
if data_key in data_dict.keys():
data_single[data_key] = data_dict[data_key][image_id]
if data_key == 'meta':
data_single[data_key] = data_dict[data_key]
else:
data_single[data_key] = data_dict[data_key][image_id]
else:
msg = f'Unable to find {data_key} in data dictionary while '\
f'running post-processor {self.name}.'
Expand Down
2 changes: 1 addition & 1 deletion analysis/post_processing/reconstruction/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .points import ParticleExtremaProcessor
from .geometry import DirectionProcessor, \
ContainmentProcessor, FiducialProcessor
ContainmentProcessor, FiducialProcessor, SimpleContainmentProcessor
from .calorimetry import CalorimetricEnergyProcessor
from .tracking import CSDAEnergyProcessor
from .mcs import MCSEnergyProcessor
Expand Down
103 changes: 101 additions & 2 deletions analysis/post_processing/reconstruction/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ def __init__(self,
neighborhood_radius = -1,
optimize = True,
truth_point_mode = 'points',
run_mode = 'both'):
run_mode = 'both',
fragments=False):
'''
Store the particle direction recosntruction parameters

Expand All @@ -32,6 +33,9 @@ def __init__(self,
Optimizes the number of points involved in the estimate
'''
# Initialize the parent class
if fragments:
self.result_cap = ['particle_fragments']
self.result_cap_opt = ['truth_particle_fragments']
super().__init__(run_mode, truth_point_mode)

# Store the direction reconstruction parameters
Expand Down Expand Up @@ -90,7 +94,8 @@ def __init__(self,
allow_multi_module = False,
min_particle_sizes = 0,
truth_point_mode = 'points',
run_mode = 'both'):
run_mode = 'both',
fragments=False):
'''
Initialize the containment conditions.

Expand Down Expand Up @@ -131,6 +136,9 @@ def __init__(self,
size (in voxel count) specified by this parameter. If specified
as a dictionary, it maps a specific particle type to its own cut.
'''
if fragments:
self.result_cap = ['particle_fragments']
self.result_cap_opt = ['truth_particle_fragments']
# Initialize the parent class
super().__init__(run_mode, truth_point_mode)

Expand Down Expand Up @@ -281,3 +289,94 @@ def process(self, data_dict, result_dict):
ia.is_fiducial = self.geo.check_containment(vertex)

return {}, {}


class SimpleContainmentProcessor(PostProcessor):
'''
Check whether a particle or interaction comes within some distance
of the boundaries of the image. Only does simple containment check
using pre-defined image size.
'''
name = 'check_simple_containment'
data_cap = ['meta']
result_cap = ['particles', 'interactions']
result_cap_opt = ['truth_particles', 'truth_interactions']

def __init__(self,
margin,
image_size,
fragments=False,
truth_point_mode = 'points',
run_mode = 'both',):
'''
Initialize the containment conditions.

If the `source` method is used, the cut will be based on the source of
the point cloud, i.e. if a point cloud was produced by TPCs i and j, it
must be contained within the volume bound by the set of TPCs i and j,
and whichever volume is present between them.

Parameters
----------
margin : float
Minimum distance from a detector wall to be considered contained:
image_size:
Size of the image in pixels
'''
if fragments:
self.result_cap = ['particle_fragments']
self.result_cap_opt = ['truth_particle_fragments']
# Initialize the parent class
super().__init__(run_mode, truth_point_mode)

self.margin = margin
self.image_size = image_size

def process(self, data_dict, result_dict):
'''
Check the containment of all particles/interactions in one entry

Parameters
----------
data_dict : dict
Input data dictionary
result_dict : dict
Chain output dictionary
'''
lower, upper, size = np.split(np.asarray(data_dict['meta'][0]).reshape(-1), 3)
# Loop over particle objects
for k in self.part_keys:
for p in result_dict[k]:
# Make sure the particle coordinates are expressed in cm
self.check_units(p)

# Get point coordinates
points = self.get_points(p)
if not len(points):
p.is_contained = True
continue

# Check particle containment
p.is_contained = (points[:, 0] > lower[0] + self.margin).all() and \
(points[:, 0] < upper[0] - self.margin).all() and \
(points[:, 1] > lower[1] + self.margin).all() and \
(points[:, 1] < upper[1] - self.margin).all() and \
(points[:, 2] > lower[2] + self.margin).all() and \
(points[:, 2] < upper[2] - self.margin).all()

# Loop over interaction objects
for k in self.inter_keys:
for ii in result_dict[k]:
# Check that all the particles in the interaction are contained
ii.is_contained = True
for p in ii.particles:
if not p.is_contained:
# Do not account for particles below a certain size
if p.pid > -1 \
and p.size < self.min_particle_sizes[p.pid]:
continue

ii.is_contained = False
break

return {}, {}
1 change: 1 addition & 0 deletions analysis/post_processing/reconstruction/kinematics.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def process(self, data_dict, result_dict):
assigned = True
break
else:
# Re-normalize softmax probabilities
scores *= 1./(1 - scores[k])

assert assigned, 'Must specify a ' \
Expand Down
11 changes: 9 additions & 2 deletions analysis/post_processing/reconstruction/label.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ class ChildrenProcessor(PostProcessor):
result_cap = ['truth_particles']

def __init__(self,
mode='semantic_type'):
mode='semantic_type',
fragments=False):
'''
Initialize the counting parameters

Expand All @@ -29,6 +30,9 @@ def __init__(self,
'''
# Store the counting mode
self.mode = mode
if fragments:
self.result_cap = ['truth_particle_fragments']
self.fragments = fragments

def process(self, data_dict, result_dict):
'''
Expand All @@ -44,7 +48,10 @@ def process(self, data_dict, result_dict):
# Build a directed graph on the true particles
G = nx.DiGraph()

particles = result_dict['truth_particles']
if self.fragments:
particles = result_dict['truth_particle_fragments']
else:
particles = result_dict['truth_particles']
for p in particles:
G.add_node(p.id, attr=getattr(p, self.mode))

Expand Down
Loading
Loading