-
Notifications
You must be signed in to change notification settings - Fork 10
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Allow 2-parameter lambda_sampling #19
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -13,7 +13,8 @@ class LensLikelihood(TransformedCosmography, LensLikelihoodBase, AnisotropyScali | |
def __init__(self, z_lens, z_source, name='name', likelihood_type='TDKin', anisotropy_model='NONE', | ||
ani_param_array=None, ani_scaling_array_list=None, ani_scaling_array=None, | ||
num_distribution_draws=50, kappa_ext_bias=False, kappa_pdf=None, kappa_bin_edges=None, mst_ifu=False, | ||
lambda_scaling_property=0, normalized=False, kwargs_lens_properties=None, **kwargs_likelihood): | ||
lambda_scaling_property=0,lambda_scaling_property_beta=0, | ||
normalized=False, kwargs_lens_properties=None, **kwargs_likelihood): | ||
""" | ||
|
||
:param z_lens: lens redshift | ||
|
@@ -37,6 +38,8 @@ def __init__(self, z_lens, z_source, name='name', likelihood_type='TDKin', aniso | |
in sampling this lens. | ||
:param lambda_scaling_property: float (optional), scaling of | ||
lambda_mst = lambda_mst_global + alpha * lambda_scaling_property | ||
:param lambda_scaling_property_beta: float (optional), scaling of | ||
lambda_mst = lambda_mst_global + beta * lambda_scaling_property_beta | ||
:param normalized: bool, if True, returns the normalized likelihood, if False, separates the constant prefactor | ||
(in case of a Gaussian 1/(sigma sqrt(2 pi)) ) to compute the reduced chi2 statistics | ||
:param kwargs_lens_properties: keyword arguments of the lens properties | ||
|
@@ -60,6 +63,7 @@ def __init__(self, z_lens, z_source, name='name', likelihood_type='TDKin', aniso | |
else: | ||
self._draw_kappa = False | ||
self._lambda_scaling_property = lambda_scaling_property | ||
self._lambda_scaling_property_beta = lambda_scaling_property_beta | ||
|
||
def lens_log_likelihood(self, cosmo, kwargs_lens=None, kwargs_kin=None, kwargs_source=None): | ||
""" | ||
|
@@ -199,7 +203,7 @@ def check_dist(self, kwargs_lens, kwargs_kin, kwargs_source): | |
return False | ||
|
||
def draw_lens(self, lambda_mst=1, lambda_mst_sigma=0, kappa_ext=0, kappa_ext_sigma=0, gamma_ppn=1, lambda_ifu=1, | ||
lambda_ifu_sigma=0, alpha_lambda=0): | ||
lambda_ifu_sigma=0, alpha_lambda=0, beta_lambda=0): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. add documentation for beta_lambda |
||
""" | ||
draws a realization of a specific model from the hyper-parameter distribution | ||
|
||
|
@@ -215,10 +219,12 @@ def draw_lens(self, lambda_mst=1, lambda_mst_sigma=0, kappa_ext=0, kappa_ext_sig | |
:return: draw from the distributions | ||
""" | ||
if self._mst_ifu is True: | ||
lambda_lens = lambda_ifu + alpha_lambda * self._lambda_scaling_property | ||
lambda_lens = lambda_ifu + alpha_lambda * self._lambda_scaling_property \ | ||
+ beta_lambda * self._lambda_scaling_property_beta | ||
lambda_mst_draw = np.random.normal(lambda_lens, lambda_ifu_sigma) | ||
else: | ||
lambda_lens = lambda_mst + alpha_lambda * self._lambda_scaling_property | ||
lambda_lens = lambda_mst + alpha_lambda * self._lambda_scaling_property \ | ||
+ beta_lambda * self._lambda_scaling_property_beta | ||
lambda_mst_draw = np.random.normal(lambda_lens, lambda_mst_sigma) | ||
if self._draw_kappa is True: | ||
kappa_ext_draw = self._kappa_dist.draw_one | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,7 +7,7 @@ class LensParam(object): | |
""" | ||
def __init__(self, lambda_mst_sampling=False, lambda_mst_distribution='NONE', kappa_ext_sampling=False, | ||
kappa_ext_distribution='NONE', lambda_ifu_sampling=False, lambda_ifu_distribution='NONE', | ||
alpha_lambda_sampling=False, kwargs_fixed=None, log_scatter=False): | ||
alpha_lambda_sampling=False, beta_lambda_sampling=False, kwargs_fixed=None, log_scatter=False): | ||
""" | ||
|
||
:param lambda_mst_sampling: bool, if True adds a global mass-sheet transform parameter in the sampling | ||
|
@@ -19,6 +19,8 @@ def __init__(self, lambda_mst_sampling=False, lambda_mst_distribution='NONE', ka | |
:param lambda_ifu_distribution: string, distribution function of the lambda_ifu parameter | ||
:param alpha_lambda_sampling: bool, if True samples a parameter alpha_lambda, which scales lambda_mst linearly | ||
according to a predefined quantity of the lens | ||
:param beta_lambda_sampling: bool, if True samples a parameter beta_lambda, which scales lambda_mst linearly | ||
according to a predefined quantity of the lens | ||
:param log_scatter: boolean, if True, samples the Gaussian scatter amplitude in log space (and thus flat prior in log) | ||
:param kwargs_fixed: keyword arguments that are held fixed through the sampling | ||
""" | ||
|
@@ -29,6 +31,7 @@ def __init__(self, lambda_mst_sampling=False, lambda_mst_distribution='NONE', ka | |
self._kappa_ext_sampling = kappa_ext_sampling | ||
self._kappa_ext_distribution = kappa_ext_distribution | ||
self._alpha_lambda_sampling = alpha_lambda_sampling | ||
self._beta_lambda_sampling = beta_lambda_sampling | ||
self._log_scatter = log_scatter | ||
if kwargs_fixed is None: | ||
kwargs_fixed = {} | ||
|
@@ -89,6 +92,12 @@ def param_list(self, latex_style=False): | |
list.append(r'$\alpha_{\lambda}$') | ||
else: | ||
list.append('alpha_lambda') | ||
if self._beta_lambda_sampling is True: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. add test function |
||
if 'beta_lambda' not in self._kwargs_fixed: | ||
if latex_style is True: | ||
list.append(r'$\beta_{\lambda}$') | ||
else: | ||
list.append('beta_lambda') | ||
return list | ||
|
||
def args2kwargs(self, args, i=0): | ||
|
@@ -146,6 +155,12 @@ def args2kwargs(self, args, i=0): | |
else: | ||
kwargs['alpha_lambda'] = args[i] | ||
i += 1 | ||
if self._beta_lambda_sampling is True: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. add test function |
||
if 'beta_lambda' in self._kwargs_fixed: | ||
kwargs['beta_lambda'] = self._kwargs_fixed['beta_lambda'] | ||
else: | ||
kwargs['beta_lambda'] = args[i] | ||
i += 1 | ||
return kwargs, i | ||
|
||
def kwargs2args(self, kwargs): | ||
|
@@ -182,4 +197,7 @@ def kwargs2args(self, kwargs): | |
if self._alpha_lambda_sampling is True: | ||
if 'alpha_lambda' not in self._kwargs_fixed: | ||
args.append(kwargs['alpha_lambda']) | ||
if self._beta_lambda_sampling is True: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. add test function |
||
if 'beta_lambda' not in self._kwargs_fixed: | ||
args.append(kwargs['beta_lambda']) | ||
return args |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
can you describe a bit in more detail how and where this 'pre-defined quantity' gets described?