Skip to content

Commit

Permalink
Two step gap clustering (#596)
Browse files Browse the repository at this point in the history
* Two-step/threshold gapsize clustering example

* reject lone hit during s2 merging

* further tuning of the parameters

* cleaning up the code

* gap size parameter added in configuration file

* tuning gaps and fix one bug

* modify s2_aft_mean value in XENON1T configuration file

* isolating only small hits to avoid potential effect on high energy signals

* 200ns is better in terms of single electron s2s

* use better peak classification during clustering stage

* modify Xe100 config file to fix test issue

* modify test plugins

* TEST ROOT bug

* reprocessing

* tuning the clustering parameters
  • Loading branch information
feigaodm authored Aug 3, 2017
1 parent bd3647e commit 7b8d0ef
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 20 deletions.
8 changes: 5 additions & 3 deletions pax/config/XENON100.ini
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,13 @@ dsp = [
# Diagnostic plotting -- does nothing unless option is set, but convenient to have here for debugging
'HitfinderDiagnosticPlots.HitfinderDiagnosticPlots',

# Reject hits in noisy channels
# 'RejectNoiseHits.RejectNoiseHits',
'SumWaveform.SumWaveform', # Must do this AFTER noisy hit rejection!

# Combine hits into rough clusters = peaks
'BuildPeaks.GapSizeClustering',

# Reject hits in noisy channels
'RejectNoiseHits.RejectNoiseHits',
'SumWaveform.SumWaveform', # Must do this AFTER noisy hit rejection!

# Refine the clustering / split peaks
'NaturalBreaksClustering.NaturalBreaksClustering',
Expand Down Expand Up @@ -102,6 +103,7 @@ right_extension = 50 * ns
# Start a new cluster / peak if a gap larger than this is encountered
# see [note tbd]
max_gap_size_in_cluster = 650 * ns
max_gap_size_in_s1like_cluster = 650 * ns



Expand Down
15 changes: 9 additions & 6 deletions pax/config/XENON1T.ini
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,13 @@ dsp = [
# Diagnostic plotting -- does nothing unless option is set, but convenient to have here for debugging
'HitfinderDiagnosticPlots.HitfinderDiagnosticPlots',

# Reject hits in noisy channels
# 'RejectNoiseHits.RejectNoiseHits',
'SumWaveform.SumWaveform', # Must do this AFTER noisy hit rejection!

# Combine hits into rough clusters = peaks
'BuildPeaks.GapSizeClustering',

# Reject hits in noisy channels
'RejectNoiseHits.RejectNoiseHits',
'SumWaveform.SumWaveform', # Must do this AFTER noisy hit rejection!

# Refine the clustering / split peaks
'LocalMinimumClustering.LocalMinimumClustering',
Expand Down Expand Up @@ -264,11 +265,12 @@ anode_wire_radius = 235/2 * um # xenon:xenon1t:junji:m
s1_light_yield_map = 's1_xyz_XENON1T_kr83m-sr1_pax-664_fdc-adcorrtpf.json'
s2_light_yield_map = 's2_xy_map_v2.1.json'
s1_detection_efficiency = 0.12 # Monte Carlo paper draft, p. 23
# This should include LCE and QE effects, but not zero-length encoding / hitfinder acceptance.
# This should include LCE and QE effects,
# but not zero-length encoding / hitfinder acceptance.
s1_patterns_file = 'XENON1T_s1_xyz_patterns_May2017.json.gz'
s2_patterns_file = 'XENON1T_s2_xy_patterns_top_v0.3.0.json.gz'
s2_fitted_patterns_file = 'XENON1T_s2_xy_fitted_patterns_top_v0.3.0.json.gz'
s2_mean_area_fraction_top = 0.680 # See XeAnalysisScripts/mapmaking
s2_mean_area_fraction_top = 0.627 # See xenon:xenon1t:sim:notes:yuehuan:sr_1_kr83m&#s2_top_fraction

# Probability of double photoelectron emission
p_double_pe_emision = 0.15 # xenon:xenon1t:aalbers:double_pe_emission
Expand Down Expand Up @@ -317,7 +319,8 @@ s2_afterpulse_types = {

[BuildPeaks.GapSizeClustering]
# Start a new cluster / peak if a gap larger than this is encountered
max_gap_size_in_cluster = 2 * us
max_gap_size_in_cluster = 2.5 * us
max_gap_size_in_s1like_cluster = 240 * ns


[NaturalBreaksClustering]
Expand Down
5 changes: 4 additions & 1 deletion pax/plugins/peak_processing/BasicProperties.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,10 @@ def integrate_until_fraction(w, fractions_desired, results):
while fraction_seen + fraction_this_sample >= needed_fraction:
# Yes, so we need to add the next sample fractionally
area_needed = area_tot * (needed_fraction - fraction_seen)
results[current_fraction_index] = i + area_needed/x
if x != 0:
results[current_fraction_index] = i + area_needed/x
else:
results[current_fraction_index] = i
# Advance to the next fraction
current_fraction_index += 1
if current_fraction_index > len(fractions_desired) - 1:
Expand Down
6 changes: 3 additions & 3 deletions pax/plugins/peak_processing/ClassifyPeaks.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ class AdHocClassification1T(plugin.TransformPlugin):

def startup(self):
self.s1_rise_time_bound = interpolate.interp1d([0, 5, 10, 100],
[100, 100, 70, 70],
[80, 75, 70, 70],
fill_value='extrapolate', kind='linear')
self.s1_rise_time_aft = interpolate.interp1d([0, 0.3, 0.4, 0.5, 0.70, 0.70, 1.0],
[70, 70, 65, 60, 35, 0, 0], kind='linear')
self.s1_rise_time_aft = interpolate.interp1d([0, 0.4, 0.5, 0.6, 0.70, 0.70, 1.0],
[70, 70, 68, 65, 60, 0, 0], kind='linear')

def transform_event(self, event):

Expand Down
68 changes: 61 additions & 7 deletions pax/plugins/signal_processing/BuildPeaks.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np

from pax import plugin, dsputils
from pax.plugins.peak_processing.BasicProperties import integrate_until_fraction


class GapSizeClustering(plugin.ClusteringPlugin):
Expand All @@ -11,22 +12,75 @@ def startup(self):
self.dt = self.config['sample_duration']
self.n_channels = self.config['n_channels']
self.detector_by_channel = dsputils.get_detector_by_channel(self.config)

# Maximum gap inside an S1-like cluster
self.s1_gap_threshold = self.config['max_gap_size_in_s1like_cluster'] / self.dt

# Maximum gap inside other clusters
self.gap_threshold = self.config['max_gap_size_in_cluster'] / self.dt

# Rise time threshold to mark S1 candidates
self.rise_time_threshold = self.config.get('rise_time_threshold', 80)

# tight coincidence
self.tight_coincidence_window = self.config.get('tight_coincidence_window', 50) // self.dt

@staticmethod
def iterate_gap_clusters(hits, gap_threshold):
gaps = dsputils.gaps_between_hits(hits)
cluster_indices = [0] + np.where(gaps > gap_threshold)[0].tolist() + [len(hits)]
for i in range(len(cluster_indices) - 1):
l_i, r_i = cluster_indices[i], cluster_indices[i + 1]
yield l_i, r_i, hits[l_i:r_i]

def transform_event(self, event):
# Cluster hits in each detector separately
# Cluster hits in each detector separately.
# Assumes detector channel mappings are non-overlapping
for detector, channels in self.config['channels_in_detector'].items():
hits = event.all_hits[(event.all_hits['channel'] >= channels[0]) &
(event.all_hits['channel'] <= channels[-1])]
if len(hits) == 0:
continue
hits.sort(order='left_central')
gaps = dsputils.gaps_between_hits(hits)
cluster_indices = [0] + np.where(gaps > self.gap_threshold)[0].tolist() + [len(hits)]
for i in range(len(cluster_indices) - 1):
hits_in_this_peak = hits[cluster_indices[i]:cluster_indices[i + 1]]
event.peaks.append(self.build_peak(hits=hits_in_this_peak,
detector=detector))
dt = self.dt

# First cluster into small clusters. Try to find S1 candidates among them, and set these apart
s1_mask = np.zeros(len(hits), dtype=np.bool) # True if hit is part of S1 candidate
for l_i, r_i, h in self.iterate_gap_clusters(hits, self.s1_gap_threshold):
l = h['left_central'].min()
# center = np.sum(h['center'] * h['area']) / h['area'].sum() / self.dt
# rise_time = (center - l)
area_sum = np.sum(h['area'])
peak = self.build_peak(hits=h, detector=detector)
# use summed WF to calculate peak properties
w = event.get_sum_waveform(peak.detector).samples[peak.left:peak.right + 1]
max_idx = np.argmax(w)
peak.index_of_maximum = peak.left + max_idx
# calculate rise time for peak classification
area_times = np.ones(21) * float('nan')
integrate_until_fraction(w, fractions_desired=np.linspace(0, 1, 21), results=area_times)
area_times *= dt
area_midpoint = area_times[10]
area_decile_from_midpoint = area_times[::2] - area_midpoint
rise_time = -area_decile_from_midpoint[1]
# tight coincidence for peak classification
x = h['index_of_maximum']
l = peak.index_of_maximum - self.tight_coincidence_window
r = peak.index_of_maximum + self.tight_coincidence_window
tight_coincidence = len(np.unique(h['channel'][(x >= l) & (x <= r)]))
if (tight_coincidence >= 3 and rise_time < self.rise_time_threshold) or (len(h) <= 2 and area_sum < 50):
# Yes, this is an S1 candidate Or, this is a lone hit. Mark it as a peak,
# hits will be ignored in next stage.
s1_mask[l_i:r_i] = True
event.peaks.append(self.build_peak(hits=h, detector=detector))

# Remove hits that already left us as S1 candidates or lone hits
hits = hits[True ^ s1_mask]
if len(hits) == 0:
continue

# Cluster remaining hits with the larger gap threshold
for l_i, r_i, h in self.iterate_gap_clusters(hits, self.gap_threshold):
event.peaks.append(self.build_peak(hits=h, detector=detector))

return event

0 comments on commit 7b8d0ef

Please sign in to comment.