Skip to content
This repository has been archived by the owner on Jun 5, 2024. It is now read-only.

Commit

Permalink
Added scaling of S2 AFT according to value from config (#370)
Browse files Browse the repository at this point in the history
* Added scaling of S2 AFT according to value from config

* Making exception for PMT mask if dummy maps is used
  • Loading branch information
terliuk authored May 18, 2022
1 parent bdcd6f7 commit d3934d5
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 15 deletions.
22 changes: 12 additions & 10 deletions wfsim/core/s2.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,9 +528,9 @@ def photon_channels(n_electron, z_obs, positions, _instruction, config, resource
_photon_channels = np.zeros(0, dtype=np.int64)
return _photon_channels

aft = config['s2_mean_area_fraction_top']
aft_sigma = config.get('s2_aft_sigma', 0.0118)
aft_skewness = config.get('s2_aft_skewness', -1.433)
#aft = config['s2_mean_area_fraction_top']
aft_sigma = config.get('s2_aft_sigma', 0.0)
aft_skewness = config.get('s2_aft_skewness', 0.0)

channels = np.arange(config['n_tpc_pmts']).astype(np.int64)
top_index = np.arange(config['n_top_pmts'])
Expand Down Expand Up @@ -559,13 +559,15 @@ def photon_channels(n_electron, z_obs, positions, _instruction, config, resource
for unique_i, count in zip(*np.unique(_instruction, return_counts=True)):
pat = pattern[unique_i] # [pmt]

if aft > 0: # Redistribute pattern with user specified aft
_aft = aft * (1 + skewnorm.rvs(loc=0,
scale=aft_sigma,
a=aft_skewness))
_aft = np.clip(_aft, 0, 1)
pat[top_index] = pat[top_index] / pat[top_index].sum() * _aft
pat[bottom_index] = pat[bottom_index] / pat[bottom_index].sum() * (1 - _aft)
if aft_sigma != 0: # Redistribute pattern with user specified aft smearing
_cur_aft=np.sum(pat[top_index])/np.sum(pat)
_new_aft=_cur_aft*skewnorm.rvs(loc=1.0, scale=aft_sigma, a=aft_skewness)
_new_aft=np.clip(_new_aft, 0, 1)
pat[top_index]*=(_new_aft/_cur_aft)
pat[bottom_index]*=(1 - _new_aft)/(1 - _cur_aft)
# old code from Peter
#pat[top_index] = pat[top_index] / pat[top_index].sum() * _aft
#pat[bottom_index] = pat[bottom_index] / pat[bottom_index].sum() * (1 - _aft)

if np.isnan(pat).sum() > 0: # Pattern map return zeros
_photon_channels = np.array([-1] * count)
Expand Down
59 changes: 54 additions & 5 deletions wfsim/load_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,25 +231,43 @@ def __init__(self, config=None):

elif config.get('detector', 'XENONnT') == 'XENONnT':
pmt_mask = np.array(config['gains']) > 0 # Converted from to pe (from cmt by default)

self.s1_pattern_map = make_map(files['s1_pattern_map'], fmt='pkl')
self.s2_pattern_map = make_map(files['s2_pattern_map'], fmt='pkl')
self.s1_pattern_map = make_patternmap(files['s1_pattern_map'], fmt='pkl', pmt_mask=pmt_mask)
self.s2_pattern_map = make_patternmap(files['s2_pattern_map'], fmt='pkl', pmt_mask=pmt_mask)

# if there is a (data driven!) map, load it. If not make it from the pattern map
if files['s1_lce_correction_map']:
self.s1_lce_correction_map = make_map(files['s1_lce_correction_map'])
else:
lymap = deepcopy(self.s1_pattern_map)
# AT: this scaling with mast is redundant to `make_patternmap`, but keep it in for now
lymap.data['map'] = np.sum(lymap.data['map'][:][:][:], axis=3, keepdims=True, where=pmt_mask)
lymap.__init__(lymap.data)
self.s1_lce_correction_map = lymap
# making S2 aft scaling (if provided)
if 's2_mean_area_fraction_top' in config.keys():
avg_s2aft_=config['s2_mean_area_fraction_top']
if avg_s2aft_>=0.0:
if isinstance(files['s2_pattern_map'], list):
log.warning(f'Scaling of S2 AFT with dummy map, this will have no effect!')
else:
s2map=deepcopy(self.s2_pattern_map)
s2map_topeff_=s2map.data['map'][...,0:config['n_top_pmts']].sum(axis=2)
s2map_toteff_=s2map.data['map'].sum(axis=2)
orig_aft_=np.mean((s2map_topeff_/s2map_toteff_)[s2map_toteff_>0.0])
# getting scales for top/bottom separately to preserve total efficiency
scale_top_=avg_s2aft_/orig_aft_
scale_bot_=(1 - avg_s2aft_)/(1 - orig_aft_)
s2map.data['map'][:,:,0:config['n_top_pmts']]*=scale_top_
s2map.data['map'][:,:,config['n_top_pmts']:config['n_tpc_pmts']]*=scale_bot_
self.s2_pattern_map.__init__(s2map.data)

# if there is a (data driven!) map, load it. If not make it from the pattern map
if files['s2_correction_map']:
self.s2_correction_map = make_map(files['s2_correction_map'])
else:
s2cmap = deepcopy(self.s2_pattern_map)
# Lower the LCE by removing contribution from dead PMTs
# AT: masking is a bit redundant due to PMT mask application in make_patternmap
s2cmap.data['map'] = np.sum(s2cmap.data['map'][:][:], axis=2, keepdims=True, where=pmt_mask)
# Scale by median value
s2cmap.data['map'] = s2cmap.data['map'] / np.median(s2cmap.data['map'][s2cmap.data['map'] > 0])
Expand Down Expand Up @@ -348,8 +366,39 @@ def make_map(map_file, fmt=None, method='WeightedNearestNeighbors'):

else:
raise TypeError("Can't handle map_file except a string or a list")


@export
def make_patternmap(map_file, fmt=None, method='WeightedNearestNeighbors', pmt_mask=None):
""" This is special interpretation of the of previous make_map(), but designed
for pattern map loading with provided PMT mask. This way simplifies both S1 and S2
cases
"""
# making tests not failing, we can probably overwrite it completel
if isinstance(map_file, list):
log.warning(f'Using dummy map with pattern mask! This has no effect here!')
assert map_file[0] == 'constant dummy', ('Alternative file input can only be '
'("constant dummy", constant: int, shape: list')
return DummyMap(map_file[1], map_file[2])
elif isinstance(map_file, str):
if fmt is None:
fmt = parse_extension(map_file)
map_data = deepcopy(straxen.get_resource(map_file, fmt=fmt))
# XXX: straxed deals with pointers and caches resources, it means that resources are global
# what is bad, so we make own copy here and modify it locally
if 'compressed' in map_data:
compressor, dtype, shape = map_data['compressed']
map_data['map'] = np.frombuffer(
strax.io.COMPRESSORS[compressor]['decompress'](map_data['map']),
dtype=dtype).reshape(*shape)
del map_data['compressed']
if 'quantized' in map_data:
map_data['map'] = map_data['quantized']*map_data['map'].astype(np.float32)
del map_data['quantized']
if not (pmt_mask is None):
assert (map_data['map'].shape[-1]==pmt_mask.shape[0]), "Error! Pattern map and PMT gains must have same dimensions!"
map_data['map'][..., ~pmt_mask]=0.0
return straxen.InterpolatingMap(map_data, method=method)
else:
raise TypeError("Can't handle map_file except a string or a list")
@export
class DummyMap:
"""Return constant results
Expand Down

0 comments on commit d3934d5

Please sign in to comment.