-
-
Notifications
You must be signed in to change notification settings - Fork 984
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Vectorize ELBO computation over num particles #1176
Changes from all commits
28150ff
b6820a0
779e260
4995fcf
76fa97c
ec0a818
3ccd5ce
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -45,37 +45,36 @@ class Trace_ELBO(ELBO): | |
Rajesh Ranganath, Sean Gerrish, David M. Blei | ||
""" | ||
|
||
def _get_traces(self, model, guide, *args, **kwargs): | ||
def _get_trace(self, model, guide, *args, **kwargs): | ||
""" | ||
runs the guide and runs the model against the guide with | ||
the result packaged as a trace generator | ||
Returns a single trace from the guide, and the model that is run | ||
against it. | ||
""" | ||
for i in range(self.num_particles): | ||
guide_trace = poutine.trace(guide).get_trace(*args, **kwargs) | ||
model_trace = poutine.trace(poutine.replay(model, trace=guide_trace)).get_trace(*args, **kwargs) | ||
if is_validation_enabled(): | ||
check_model_guide_match(model_trace, guide_trace) | ||
enumerated_sites = [name for name, site in guide_trace.nodes.items() | ||
if site["type"] == "sample" and site["infer"].get("enumerate")] | ||
if enumerated_sites: | ||
warnings.warn('\n'.join([ | ||
'Trace_ELBO found sample sites configured for enumeration:' | ||
', '.join(enumerated_sites), | ||
'If you want to enumerate sites, you need to use TraceEnum_ELBO instead.'])) | ||
guide_trace = prune_subsample_sites(guide_trace) | ||
model_trace = prune_subsample_sites(model_trace) | ||
|
||
model_trace.compute_log_prob() | ||
guide_trace.compute_score_parts() | ||
if is_validation_enabled(): | ||
for site in model_trace.nodes.values(): | ||
if site["type"] == "sample": | ||
check_site_shape(site, self.max_iarange_nesting) | ||
for site in guide_trace.nodes.values(): | ||
if site["type"] == "sample": | ||
check_site_shape(site, self.max_iarange_nesting) | ||
|
||
yield model_trace, guide_trace | ||
guide_trace = poutine.trace(guide).get_trace(*args, **kwargs) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. is this method identical sans the for loop over num_particles? the diff shows the whole method has changed but from a cursory glance it looks the same There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. they are identical. have you tried the old "cross your eyes at the side-by-side diff" trick? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. i do the "cmd-f a randomly sampled substring and see if it appears on both sides". 90% of the time, it works every time. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yup, that's right! Mostly |
||
model_trace = poutine.trace(poutine.replay(model, trace=guide_trace)).get_trace(*args, **kwargs) | ||
if is_validation_enabled(): | ||
check_model_guide_match(model_trace, guide_trace) | ||
enumerated_sites = [name for name, site in guide_trace.nodes.items() | ||
if site["type"] == "sample" and site["infer"].get("enumerate")] | ||
if enumerated_sites: | ||
warnings.warn('\n'.join([ | ||
'Trace_ELBO found sample sites configured for enumeration:' | ||
', '.join(enumerated_sites), | ||
'If you want to enumerate sites, you need to use TraceEnum_ELBO instead.'])) | ||
guide_trace = prune_subsample_sites(guide_trace) | ||
model_trace = prune_subsample_sites(model_trace) | ||
|
||
model_trace.compute_log_prob() | ||
guide_trace.compute_score_parts() | ||
if is_validation_enabled(): | ||
for site in model_trace.nodes.values(): | ||
if site["type"] == "sample": | ||
check_site_shape(site, self.max_iarange_nesting) | ||
for site in guide_trace.nodes.values(): | ||
if site["type"] == "sample": | ||
check_site_shape(site, self.max_iarange_nesting) | ||
|
||
return model_trace, guide_trace | ||
|
||
def loss(self, model, guide, *args, **kwargs): | ||
""" | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,6 @@ | |
from tests.integration_tests.test_conjugate_gaussian_models import GaussianChain | ||
|
||
|
||
# TODO increase precision and number of particles once latter is parallelized properly | ||
class ConjugateChainGradientTests(GaussianChain): | ||
|
||
def test_gradients(self): | ||
|
@@ -17,15 +16,15 @@ def do_test_gradients(self, N, reparameterized): | |
pyro.clear_param_store() | ||
self.setup_chain(N) | ||
|
||
elbo = TraceGraph_ELBO(num_particles=1000) | ||
elbo = TraceGraph_ELBO(num_particles=10000, vectorize_particles=True, max_iarange_nesting=1) | ||
elbo.loss_and_grads(self.model, self.guide, reparameterized=reparameterized) | ||
|
||
for i in range(1, N + 1): | ||
for param_prefix in ["loc_q_%d", "log_sig_q_%d", "kappa_q_%d"]: | ||
if i == N and param_prefix == 'kappa_q_%d': | ||
continue | ||
actual_grad = pyro.param(param_prefix % i).grad | ||
assert_equal(actual_grad, 0.0 * actual_grad, prec=0.20, msg="".join([ | ||
assert_equal(actual_grad, 0.0 * actual_grad, prec=0.10, msg="".join([ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nice! |
||
"parameter %s%d" % (param_prefix[:-2], i), | ||
"\nexpected = zero vector", | ||
"\n actual = {}".format(actual_grad.detach().cpu().numpy())])) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is it worth checking whether
fn
is already wrapped inpoutine.broadcast
and avoid wrapping a second time? Alternatively, this avoidance check could be done insidepoutine.broadcast
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Good idea. I'll check it in
poutine.broadcast
itself.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I had to add
fn.__broadcasted__
as an attribute to mark a function that has already been broadcasted.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Shouldn't
broadcast
be idempotent?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes
broadcast
should be idempotent, I'm asking only whether it is worth avoiding double wrapping for efficiency purposes. If it costs more than 5 lines of code, then it's probably not worth avoiding the double wrap.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Agreed, I'll remove this.