Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add n saturated channels #691

Merged
merged 15 commits into from
Oct 14, 2021
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions straxen/plugins/event_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ class EventBasics(strax.Plugin):
The main S2 and alternative S2 are given by the largest two S2-Peaks
within the event. By default this is also true for S1.
"""
__version__ = '1.1.1'
__version__ = '1.2.0'

depends_on = ('events',
'peak_basics',
Expand Down Expand Up @@ -195,7 +195,10 @@ def _set_dtype_requirements(self):
('range_50p_area', np.float32, 'width, 50% area [ns]'),
('range_90p_area', np.float32, 'width, 90% area [ns]'),
('rise_time', np.float32, 'time between 10% and 50% area quantiles [ns]'),
('area_fraction_top', np.float32, 'fraction of area seen by the top PMT array')
('area_fraction_top', np.float32, 'fraction of area seen by the top PMT array'),
('tight_coincidence', np.int16, 'Hits within tight range of mean'),
('n_saturated_channels', np.int16, 'Total number of saturated channels'),
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved
('tight_coincidence_channel', np.int16, 'PMT channel within tight range of mean'),
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved
)

@staticmethod
Expand Down
8 changes: 7 additions & 1 deletion straxen/plugins/peak_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class PeakBasics(strax.Plugin):
arrays.
NB: This plugin can therefore be loaded as a pandas DataFrame.
"""
__version__ = "0.0.9"
__version__ = "0.1.0"
parallel = True
depends_on = ('peaks',)
provides = 'peak_basics'
Expand All @@ -47,6 +47,8 @@ class PeakBasics(strax.Plugin):
'max_pmt'), np.int16),
(('Area of signal in the largest-contributing PMT (PE)',
'max_pmt_area'), np.float32),
(('Total number of saturated channels',
'n_saturated_channels'), np.int16),
(('Width (in ns) of the central 50% area of the peak',
'range_50p_area'), np.float32),
(('Width (in ns) of the central 90% area of the peak',
Expand All @@ -62,6 +64,8 @@ class PeakBasics(strax.Plugin):
'rise_time'), np.float32),
(('Hits within tight range of mean',
'tight_coincidence'), np.int16),
(('PMT channel within tight range of mean',
'tight_coincidence_channel'), np.int16),
(('Classification of the peak(let)',
'type'), np.int8)
]
Expand All @@ -78,6 +82,8 @@ def compute(self, peaks):
r['max_pmt'] = np.argmax(p['area_per_channel'], axis=1)
r['max_pmt_area'] = np.max(p['area_per_channel'], axis=1)
r['tight_coincidence'] = p['tight_coincidence']
r['tight_coincidence_channel'] = p['tight_coincidence_channel']
r['n_saturated_channels'] = p['n_saturated_channels']

n_top = self.config['n_top_pmts']
area_top = p['area_per_channel'][:, :n_top].sum(axis=1)
Expand Down
40 changes: 31 additions & 9 deletions straxen/plugins/peaklet_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ class Peaklets(strax.Plugin):
parallel = 'process'
compressor = 'zstd'

__version__ = '0.4.1'
__version__ = '0.5.0'

def infer_dtype(self):
return dict(peaklets=strax.peak_dtype(
Expand Down Expand Up @@ -234,12 +234,16 @@ def compute(self, records, start, end):
peaklet_max_times = (
peaklets['time']
+ np.argmax(peaklets['data'], axis=1) * peaklets['dt'])
peaklets['tight_coincidence'] = get_tight_coin(
tight_coincidence, tight_coincidence_channel = get_tight_coin(
hit_max_times,
hitlets['channel'],
peaklet_max_times,
self.config['tight_coincidence_window_left'],
self.config['tight_coincidence_window_right'])

peaklets['tight_coincidence'] = tight_coincidence
peaklets['tight_coincidence_channel'] = tight_coincidence_channel

if self.config['diagnose_sorting'] and len(r):
assert np.diff(r['time']).min(initial=1) >= 0, "Records not sorted"
assert np.diff(hitlets['time']).min(initial=1) >= 0, "Hits/Hitlets not sorted"
Expand Down Expand Up @@ -618,7 +622,7 @@ class MergedS2s(strax.OverlapWindowPlugin):
depends_on = ('peaklets', 'peaklet_classification', 'lone_hits')
data_kind = 'merged_s2s'
provides = 'merged_s2s'
__version__ = '0.3.1'
__version__ = '0.4.0'

def setup(self):
self.to_pe = straxen.get_correction_from_cmt(self.run_id,
Expand Down Expand Up @@ -805,7 +809,7 @@ class Peaks(strax.Plugin):
data_kind = 'peaks'
provides = 'peaks'
parallel = True
save_when = strax.SaveWhen.NEVER
save_when = strax.SaveWhen.EXPLICIT

__version__ = '0.1.2'

Expand Down Expand Up @@ -853,16 +857,31 @@ def compute(self, peaklets_he, merged_s2s_he):


@numba.jit(nopython=True, nogil=True, cache=True)
def get_tight_coin(hit_max_times, peak_max_times, left, right):
"""Calculates the tight coincidence
def get_tight_coin(hit_max_times, hit_channel, peak_max_times, left, right,
n_channel=straxen.n_tpc_pmts):
"""Calculates the tight coincidence based on hits and PMT channels.

Defined by number of hits within a specified time range of the
the peak's maximum amplitude.
Imitates tight_coincidence variable in pax:
github.com/XENON1T/pax/blob/master/pax/plugins/peak_processing/BasicProperties.py

:param hit_max_times: Time of the hit amplitude in ns.
:param hit_channel: PMT channels of the hits
:param peak_max_times: Time of the peaks maximum in ns.
:param left: Left boundary in which we search for the tight
coincidence in ns.
:param right: Right boundary in which we search for the tight
coincidence in ns.
:param n_channel: Number of PMT channels of the detector

:returns: n_coin_hit, n_coin_channel of length peaks containing the
tight coincidence.
"""
left_hit_i = 0
n_coin = np.zeros(len(peak_max_times), dtype=np.int16)
n_coin_hit = np.zeros(len(peak_max_times), dtype=np.int16)
n_coin_channel = np.zeros(len(peak_max_times), dtype=np.int16)
channels_seen = np.zeros(n_channel, dtype=np.bool_)

# loop over peaks
for p_i, p_t in enumerate(peak_max_times):
Expand All @@ -873,13 +892,16 @@ def get_tight_coin(hit_max_times, peak_max_times, left, right):
# if the hit is in the window, its a tight coin
d = hit_max_times[left_hit_i] - p_t
if (-left <= d) & (d <= right):
n_coin[p_i] += 1
n_coin_hit[p_i] += 1
channels_seen[hit_channel[left_hit_i]] = 1

# stop the loop when we know we're outside the range
if d > right:
n_coin_channel[p_i] = np.sum(channels_seen)
channels_seen[:] = 0
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved
break

return n_coin
return n_coin_hit, n_coin_channel


@numba.njit(cache=True, nogil=True)
Expand Down
36 changes: 36 additions & 0 deletions tests/test_peaklet_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import hypothesis.strategies as strat

import strax
from strax.testutils import fake_hits
import straxen
from straxen.plugins.peaklet_processing import get_tight_coin


@settings(deadline=None)
Expand Down Expand Up @@ -45,3 +47,37 @@ def test_n_hits():
res = p.compute(records, 0, 999)
peaklets = res['peaklets']
assert peaklets['n_hits'] == 3, f"Peaklet has the wrong number of hits!"


@given(fake_hits,
strat.lists(elements=strat.integers(0, 9), min_size=20))
@settings(deadline=None)
def test_tight_coincidence(hits, channel):
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved
hits['area'] = 1
hits['channel'] = channel[:len(hits)] # In case there are less channel then hits (unlikely)
gap_threshold = 10
peaks = strax.find_peaks(hits,
adc_to_pe=np.ones(10),
right_extension=0, left_extension=0,
gap_threshold=gap_threshold,
min_channels=1,
min_area=0)

peaks_max_time = peaks['time'] + peaks['length']//2
hits_max_time = hits['time'] + hits['length']//2

left = 5
right = 5
tight_coin, tight_coin_channel = get_tight_coin(hits_max_time,
hits['channel'],
peaks_max_time,
left,
right,
)
for ind, p_max_t in enumerate(peaks_max_time):
m_hits_in_peak = (hits_max_time >= (p_max_t - left))
m_hits_in_peak &= (hits_max_time <= (p_max_t + right))
n_hits = np.sum(m_hits_in_peak)
n_channel = len(np.unique(hits[m_hits_in_peak]['channel']))
assert n_hits == tight_coin[ind], 'Wrong number of tight hits'
assert n_channel == tight_coin_channel[ind], f'Wrong number of tight channel got {tight_coin_channel[ind]}, but expectd {n_channel}'
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved
WenzDaniel marked this conversation as resolved.
Show resolved Hide resolved